From 5d8c659373ae2b169892fc9d99d54bd1b3baf65a Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Tue, 30 May 2023 14:37:39 +0100 Subject: [PATCH 001/153] Remove unused `FederationServer.__str__` override (#15690) Signed-off-by: Sean Quah --- changelog.d/15690.misc | 1 + synapse/federation/federation_server.py | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) create mode 100644 changelog.d/15690.misc diff --git a/changelog.d/15690.misc b/changelog.d/15690.misc new file mode 100644 index 000000000000..c6c259eb7d14 --- /dev/null +++ b/changelog.d/15690.misc @@ -0,0 +1 @@ +Remove some unused code. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index f4ca70a69806..e17cb840de99 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -1291,9 +1291,6 @@ async def _process_incoming_pdus_in_room_inner( return lock = new_lock - def __str__(self) -> str: - return "" % self.server_name - async def exchange_third_party_invite( self, sender_user_id: str, target_user_id: str, room_id: str, signed: Dict ) -> None: From e2c8458bba5ab20f84c93a6c68e293b2d304cdc0 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 17 Jun 2022 14:48:55 +0200 Subject: [PATCH 002/153] Make the api.auth.Auth a Protocol --- synapse/api/auth/__init__.py | 175 ++++++++++++++ synapse/api/auth/base.py | 273 ++++++++++++++++++++++ synapse/api/{auth.py => auth/internal.py} | 249 +------------------- synapse/server.py | 3 +- tests/api/test_auth.py | 4 +- tests/handlers/test_register.py | 4 +- tests/test_state.py | 4 +- 7 files changed, 464 insertions(+), 248 deletions(-) create mode 100644 synapse/api/auth/__init__.py create mode 100644 synapse/api/auth/base.py rename synapse/api/{auth.py => auth/internal.py} (61%) diff --git a/synapse/api/auth/__init__.py b/synapse/api/auth/__init__.py new file mode 100644 index 000000000000..90cfe39d7623 --- /dev/null +++ b/synapse/api/auth/__init__.py @@ -0,0 +1,175 @@ +# Copyright 2023 The Matrix.org Foundation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional, Tuple + +from typing_extensions import Protocol + +from twisted.web.server import Request + +from synapse.appservice import ApplicationService +from synapse.http.site import SynapseRequest +from synapse.types import Requester + +# guests always get this device id. +GUEST_DEVICE_ID = "guest_device" + + +class Auth(Protocol): + """The interface that an auth provider must implement.""" + + async def check_user_in_room( + self, + room_id: str, + requester: Requester, + allow_departed_users: bool = False, + ) -> Tuple[str, Optional[str]]: + """Check if the user is in the room, or was at some point. + Args: + room_id: The room to check. + + user_id: The user to check. + + current_state: Optional map of the current state of the room. + If provided then that map is used to check whether they are a + member of the room. Otherwise the current membership is + loaded from the database. + + allow_departed_users: if True, accept users that were previously + members but have now departed. + + Raises: + AuthError if the user is/was not in the room. + Returns: + The current membership of the user in the room and the + membership event ID of the user. + """ + + async def get_user_by_req( + self, + request: SynapseRequest, + allow_guest: bool = False, + allow_expired: bool = False, + ) -> Requester: + """Get a registered user's ID. + + Args: + request: An HTTP request with an access_token query parameter. + allow_guest: If False, will raise an AuthError if the user making the + request is a guest. + allow_expired: If True, allow the request through even if the account + is expired, or session token lifetime has ended. Note that + /login will deliver access tokens regardless of expiration. + + Returns: + Resolves to the requester + Raises: + InvalidClientCredentialsError if no user by that token exists or the token + is invalid. + AuthError if access is denied for the user in the access token + """ + + async def validate_appservice_can_control_user_id( + self, app_service: ApplicationService, user_id: str + ) -> None: + """Validates that the app service is allowed to control + the given user. + + Args: + app_service: The app service that controls the user + user_id: The author MXID that the app service is controlling + + Raises: + AuthError: If the application service is not allowed to control the user + (user namespace regex does not match, wrong homeserver, etc) + or if the user has not been registered yet. + """ + + async def get_user_by_access_token( + self, + token: str, + allow_expired: bool = False, + ) -> Requester: + """Validate access token and get user_id from it + + Args: + token: The access token to get the user by + allow_expired: If False, raises an InvalidClientTokenError + if the token is expired + + Raises: + InvalidClientTokenError if a user by that token exists, but the token is + expired + InvalidClientCredentialsError if no user by that token exists or the token + is invalid + """ + + async def is_server_admin(self, requester: Requester) -> bool: + """Check if the given user is a local server admin. + + Args: + requester: user to check + + Returns: + True if the user is an admin + """ + + async def check_can_change_room_list( + self, room_id: str, requester: Requester + ) -> bool: + """Determine whether the user is allowed to edit the room's entry in the + published room list. + + Args: + room_id + user + """ + + @staticmethod + def has_access_token(request: Request) -> bool: + """Checks if the request has an access_token. + + Returns: + False if no access_token was given, True otherwise. + """ + + @staticmethod + def get_access_token_from_request(request: Request) -> str: + """Extracts the access_token from the request. + + Args: + request: The http request. + Returns: + The access_token + Raises: + MissingClientTokenError: If there isn't a single access_token in the + request + """ + + async def check_user_in_room_or_world_readable( + self, room_id: str, requester: Requester, allow_departed_users: bool = False + ) -> Tuple[str, Optional[str]]: + """Checks that the user is or was in the room or the room is world + readable. If it isn't then an exception is raised. + + Args: + room_id: room to check + user_id: user to check + allow_departed_users: if True, accept users that were previously + members but have now departed + + Returns: + Resolves to the current membership of the user in the room and the + membership event ID of the user. If the user is not in the room and + never has been, then `(Membership.JOIN, None)` is returned. + """ diff --git a/synapse/api/auth/base.py b/synapse/api/auth/base.py new file mode 100644 index 000000000000..240f2b90dee2 --- /dev/null +++ b/synapse/api/auth/base.py @@ -0,0 +1,273 @@ +# Copyright 2023 The Matrix.org Foundation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from typing import TYPE_CHECKING, Optional, Tuple + +from twisted.web.server import Request + +from synapse import event_auth +from synapse.api.constants import EventTypes, HistoryVisibility, Membership +from synapse.api.errors import ( + AuthError, + Codes, + MissingClientTokenError, + UnstableSpecAuthError, +) +from synapse.appservice import ApplicationService +from synapse.logging.opentracing import trace +from synapse.types import Requester + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +class BaseAuth: + """Common base class for all auth implementations.""" + + def __init__(self, hs: "HomeServer"): + self.hs = hs + self.store = hs.get_datastores().main + self._storage_controllers = hs.get_storage_controllers() + + async def check_user_in_room( + self, + room_id: str, + requester: Requester, + allow_departed_users: bool = False, + ) -> Tuple[str, Optional[str]]: + """Check if the user is in the room, or was at some point. + Args: + room_id: The room to check. + + requester: The user making the request, according to the access token. + + current_state: Optional map of the current state of the room. + If provided then that map is used to check whether they are a + member of the room. Otherwise the current membership is + loaded from the database. + + allow_departed_users: if True, accept users that were previously + members but have now departed. + + Raises: + AuthError if the user is/was not in the room. + Returns: + The current membership of the user in the room and the + membership event ID of the user. + """ + + user_id = requester.user.to_string() + ( + membership, + member_event_id, + ) = await self.store.get_local_current_membership_for_user_in_room( + user_id=user_id, + room_id=room_id, + ) + + if membership: + if membership == Membership.JOIN: + return membership, member_event_id + + # XXX this looks totally bogus. Why do we not allow users who have been banned, + # or those who were members previously and have been re-invited? + if allow_departed_users and membership == Membership.LEAVE: + forgot = await self.store.did_forget(user_id, room_id) + if not forgot: + return membership, member_event_id + raise UnstableSpecAuthError( + 403, + "User %s not in room %s" % (user_id, room_id), + errcode=Codes.NOT_JOINED, + ) + + @trace + async def check_user_in_room_or_world_readable( + self, room_id: str, requester: Requester, allow_departed_users: bool = False + ) -> Tuple[str, Optional[str]]: + """Checks that the user is or was in the room or the room is world + readable. If it isn't then an exception is raised. + + Args: + room_id: room to check + user_id: user to check + allow_departed_users: if True, accept users that were previously + members but have now departed + + Returns: + Resolves to the current membership of the user in the room and the + membership event ID of the user. If the user is not in the room and + never has been, then `(Membership.JOIN, None)` is returned. + """ + + try: + # check_user_in_room will return the most recent membership + # event for the user if: + # * The user is a non-guest user, and was ever in the room + # * The user is a guest user, and has joined the room + # else it will throw. + return await self.check_user_in_room( + room_id, requester, allow_departed_users=allow_departed_users + ) + except AuthError: + visibility = await self._storage_controllers.state.get_current_state_event( + room_id, EventTypes.RoomHistoryVisibility, "" + ) + if ( + visibility + and visibility.content.get("history_visibility") + == HistoryVisibility.WORLD_READABLE + ): + return Membership.JOIN, None + raise AuthError( + 403, + "User %r not in room %s, and room previews are disabled" + % (requester.user, room_id), + ) + + async def validate_appservice_can_control_user_id( + self, app_service: ApplicationService, user_id: str + ) -> None: + """Validates that the app service is allowed to control + the given user. + + Args: + app_service: The app service that controls the user + user_id: The author MXID that the app service is controlling + + Raises: + AuthError: If the application service is not allowed to control the user + (user namespace regex does not match, wrong homeserver, etc) + or if the user has not been registered yet. + """ + + # It's ok if the app service is trying to use the sender from their registration + if app_service.sender == user_id: + pass + # Check to make sure the app service is allowed to control the user + elif not app_service.is_interested_in_user(user_id): + raise AuthError( + 403, + "Application service cannot masquerade as this user (%s)." % user_id, + ) + # Check to make sure the user is already registered on the homeserver + elif not (await self.store.get_user_by_id(user_id)): + raise AuthError( + 403, "Application service has not registered this user (%s)" % user_id + ) + + async def is_server_admin(self, requester: Requester) -> bool: + """Check if the given user is a local server admin. + + Args: + requester: user to check + + Returns: + True if the user is an admin + """ + raise NotImplementedError() + + async def check_can_change_room_list( + self, room_id: str, requester: Requester + ) -> bool: + """Determine whether the user is allowed to edit the room's entry in the + published room list. + + Args: + room_id + user + """ + + is_admin = await self.is_server_admin(requester) + if is_admin: + return True + + await self.check_user_in_room(room_id, requester) + + # We currently require the user is a "moderator" in the room. We do this + # by checking if they would (theoretically) be able to change the + # m.room.canonical_alias events + + power_level_event = ( + await self._storage_controllers.state.get_current_state_event( + room_id, EventTypes.PowerLevels, "" + ) + ) + + auth_events = {} + if power_level_event: + auth_events[(EventTypes.PowerLevels, "")] = power_level_event + + send_level = event_auth.get_send_level( + EventTypes.CanonicalAlias, "", power_level_event + ) + user_level = event_auth.get_user_power_level( + requester.user.to_string(), auth_events + ) + + return user_level >= send_level + + @staticmethod + def has_access_token(request: Request) -> bool: + """Checks if the request has an access_token. + + Returns: + False if no access_token was given, True otherwise. + """ + # This will always be set by the time Twisted calls us. + assert request.args is not None + + query_params = request.args.get(b"access_token") + auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") + return bool(query_params) or bool(auth_headers) + + @staticmethod + def get_access_token_from_request(request: Request) -> str: + """Extracts the access_token from the request. + + Args: + request: The http request. + Returns: + The access_token + Raises: + MissingClientTokenError: If there isn't a single access_token in the + request + """ + # This will always be set by the time Twisted calls us. + assert request.args is not None + + auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") + query_params = request.args.get(b"access_token") + if auth_headers: + # Try the get the access_token from a "Authorization: Bearer" + # header + if query_params is not None: + raise MissingClientTokenError( + "Mixing Authorization headers and access_token query parameters." + ) + if len(auth_headers) > 1: + raise MissingClientTokenError("Too many Authorization headers.") + parts = auth_headers[0].split(b" ") + if parts[0] == b"Bearer" and len(parts) == 2: + return parts[1].decode("ascii") + else: + raise MissingClientTokenError("Invalid Authorization header.") + else: + # Try to get the access_token from the query params. + if not query_params: + raise MissingClientTokenError() + + return query_params[0].decode("ascii") diff --git a/synapse/api/auth.py b/synapse/api/auth/internal.py similarity index 61% rename from synapse/api/auth.py rename to synapse/api/auth/internal.py index 66e869bc2db2..813d537e537c 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth/internal.py @@ -1,4 +1,4 @@ -# Copyright 2014 - 2016 OpenMarket Ltd +# Copyright 2023 The Matrix.org Foundation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,113 +12,49 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional import pymacaroons from netaddr import IPAddress from twisted.web.server import Request -from synapse import event_auth -from synapse.api.constants import EventTypes, HistoryVisibility, Membership from synapse.api.errors import ( AuthError, Codes, InvalidClientTokenError, MissingClientTokenError, - UnstableSpecAuthError, ) -from synapse.appservice import ApplicationService from synapse.http import get_request_user_agent from synapse.http.site import SynapseRequest -from synapse.logging.opentracing import ( - active_span, - force_tracing, - start_active_span, - trace, -) +from synapse.logging.opentracing import active_span, force_tracing, start_active_span from synapse.types import Requester, create_requester from synapse.util.cancellation import cancellable +from . import GUEST_DEVICE_ID +from .base import BaseAuth + if TYPE_CHECKING: from synapse.server import HomeServer logger = logging.getLogger(__name__) -# guests always get this device id. -GUEST_DEVICE_ID = "guest_device" - - -class Auth: +class InternalAuth(BaseAuth): """ This class contains functions for authenticating users of our client-server API. """ def __init__(self, hs: "HomeServer"): - self.hs = hs + super().__init__(hs) self.clock = hs.get_clock() - self.store = hs.get_datastores().main self._account_validity_handler = hs.get_account_validity_handler() - self._storage_controllers = hs.get_storage_controllers() self._macaroon_generator = hs.get_macaroon_generator() self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips self._track_puppeted_user_ips = hs.config.api.track_puppeted_user_ips self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users - async def check_user_in_room( - self, - room_id: str, - requester: Requester, - allow_departed_users: bool = False, - ) -> Tuple[str, Optional[str]]: - """Check if the user is in the room, or was at some point. - Args: - room_id: The room to check. - - requester: The user making the request, according to the access token. - - current_state: Optional map of the current state of the room. - If provided then that map is used to check whether they are a - member of the room. Otherwise the current membership is - loaded from the database. - - allow_departed_users: if True, accept users that were previously - members but have now departed. - - Raises: - AuthError if the user is/was not in the room. - Returns: - The current membership of the user in the room and the - membership event ID of the user. - """ - - user_id = requester.user.to_string() - ( - membership, - member_event_id, - ) = await self.store.get_local_current_membership_for_user_in_room( - user_id=user_id, - room_id=room_id, - ) - - if membership: - if membership == Membership.JOIN: - return membership, member_event_id - - # XXX this looks totally bogus. Why do we not allow users who have been banned, - # or those who were members previously and have been re-invited? - if allow_departed_users and membership == Membership.LEAVE: - forgot = await self.store.did_forget(user_id, room_id) - if not forgot: - return membership, member_event_id - raise UnstableSpecAuthError( - 403, - "User %s not in room %s" % (user_id, room_id), - errcode=Codes.NOT_JOINED, - ) - @cancellable async def get_user_by_req( self, @@ -253,37 +189,6 @@ async def _wrapped_get_user_by_req( except KeyError: raise MissingClientTokenError() - async def validate_appservice_can_control_user_id( - self, app_service: ApplicationService, user_id: str - ) -> None: - """Validates that the app service is allowed to control - the given user. - - Args: - app_service: The app service that controls the user - user_id: The author MXID that the app service is controlling - - Raises: - AuthError: If the application service is not allowed to control the user - (user namespace regex does not match, wrong homeserver, etc) - or if the user has not been registered yet. - """ - - # It's ok if the app service is trying to use the sender from their registration - if app_service.sender == user_id: - pass - # Check to make sure the app service is allowed to control the user - elif not app_service.is_interested_in_user(user_id): - raise AuthError( - 403, - "Application service cannot masquerade as this user (%s)." % user_id, - ) - # Check to make sure the user is already registered on the homeserver - elif not (await self.store.get_user_by_id(user_id)): - raise AuthError( - 403, "Application service has not registered this user (%s)" % user_id - ) - @cancellable async def _get_appservice_user(self, request: Request) -> Optional[Requester]: """ @@ -462,141 +367,3 @@ async def is_server_admin(self, requester: Requester) -> bool: True if the user is an admin """ return await self.store.is_server_admin(requester.user) - - async def check_can_change_room_list( - self, room_id: str, requester: Requester - ) -> bool: - """Determine whether the user is allowed to edit the room's entry in the - published room list. - - Args: - room_id: The room to check. - requester: The user making the request, according to the access token. - """ - - is_admin = await self.is_server_admin(requester) - if is_admin: - return True - - await self.check_user_in_room(room_id, requester) - - # We currently require the user is a "moderator" in the room. We do this - # by checking if they would (theoretically) be able to change the - # m.room.canonical_alias events - - power_level_event = ( - await self._storage_controllers.state.get_current_state_event( - room_id, EventTypes.PowerLevels, "" - ) - ) - - auth_events = {} - if power_level_event: - auth_events[(EventTypes.PowerLevels, "")] = power_level_event - - send_level = event_auth.get_send_level( - EventTypes.CanonicalAlias, "", power_level_event - ) - user_level = event_auth.get_user_power_level( - requester.user.to_string(), auth_events - ) - - return user_level >= send_level - - @staticmethod - def has_access_token(request: Request) -> bool: - """Checks if the request has an access_token. - - Returns: - False if no access_token was given, True otherwise. - """ - # This will always be set by the time Twisted calls us. - assert request.args is not None - - query_params = request.args.get(b"access_token") - auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") - return bool(query_params) or bool(auth_headers) - - @staticmethod - @cancellable - def get_access_token_from_request(request: Request) -> str: - """Extracts the access_token from the request. - - Args: - request: The http request. - Returns: - The access_token - Raises: - MissingClientTokenError: If there isn't a single access_token in the - request - """ - # This will always be set by the time Twisted calls us. - assert request.args is not None - - auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") - query_params = request.args.get(b"access_token") - if auth_headers: - # Try the get the access_token from a "Authorization: Bearer" - # header - if query_params is not None: - raise MissingClientTokenError( - "Mixing Authorization headers and access_token query parameters." - ) - if len(auth_headers) > 1: - raise MissingClientTokenError("Too many Authorization headers.") - parts = auth_headers[0].split(b" ") - if parts[0] == b"Bearer" and len(parts) == 2: - return parts[1].decode("ascii") - else: - raise MissingClientTokenError("Invalid Authorization header.") - else: - # Try to get the access_token from the query params. - if not query_params: - raise MissingClientTokenError() - - return query_params[0].decode("ascii") - - @trace - async def check_user_in_room_or_world_readable( - self, room_id: str, requester: Requester, allow_departed_users: bool = False - ) -> Tuple[str, Optional[str]]: - """Checks that the user is or was in the room or the room is world - readable. If it isn't then an exception is raised. - - Args: - room_id: The room to check. - requester: The user making the request, according to the access token. - allow_departed_users: If True, accept users that were previously - members but have now departed. - - Returns: - Resolves to the current membership of the user in the room and the - membership event ID of the user. If the user is not in the room and - never has been, then `(Membership.JOIN, None)` is returned. - """ - - try: - # check_user_in_room will return the most recent membership - # event for the user if: - # * The user is a non-guest user, and was ever in the room - # * The user is a guest user, and has joined the room - # else it will throw. - return await self.check_user_in_room( - room_id, requester, allow_departed_users=allow_departed_users - ) - except AuthError: - visibility = await self._storage_controllers.state.get_current_state_event( - room_id, EventTypes.RoomHistoryVisibility, "" - ) - if ( - visibility - and visibility.content.get("history_visibility") - == HistoryVisibility.WORLD_READABLE - ): - return Membership.JOIN, None - raise UnstableSpecAuthError( - 403, - "User %s not in room %s, and room previews are disabled" - % (requester.user, room_id), - errcode=Codes.NOT_JOINED, - ) diff --git a/synapse/server.py b/synapse/server.py index cce5fb66ff02..df88af12a999 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -31,6 +31,7 @@ from twisted.web.resource import Resource from synapse.api.auth import Auth +from synapse.api.auth.internal import InternalAuth from synapse.api.auth_blocking import AuthBlocking from synapse.api.filtering import Filtering from synapse.api.ratelimiting import Ratelimiter, RequestRatelimiter @@ -427,7 +428,7 @@ def get_replication_notifier(self) -> ReplicationNotifier: @cache_in_self def get_auth(self) -> Auth: - return Auth(self) + return InternalAuth(self) @cache_in_self def get_auth_blocking(self) -> AuthBlocking: diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 6e36e73f0d75..3dac52d178ad 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -18,7 +18,7 @@ from twisted.test.proto_helpers import MemoryReactor -from synapse.api.auth import Auth +from synapse.api.auth.internal import InternalAuth from synapse.api.auth_blocking import AuthBlocking from synapse.api.constants import UserTypes from synapse.api.errors import ( @@ -48,7 +48,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # have been called by the HomeserverTestCase machinery. hs.datastores.main = self.store # type: ignore[union-attr] hs.get_auth_handler().store = self.store - self.auth = Auth(hs) + self.auth = InternalAuth(hs) # AuthBlocking reads from the hs' config on initialization. We need to # modify its config instead of the hs' diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 73822b07a599..8d8584609b8c 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,7 +17,7 @@ from twisted.test.proto_helpers import MemoryReactor -from synapse.api.auth import Auth +from synapse.api.auth.internal import InternalAuth from synapse.api.constants import UserTypes from synapse.api.errors import ( CodeMessageException, @@ -683,7 +683,7 @@ def test_spam_checker_shadow_ban(self) -> None: request = Mock(args={}) request.args[b"access_token"] = [token.encode("ascii")] request.requestHeaders.getRawHeaders = mock_getRawHeaders() - auth = Auth(self.hs) + auth = InternalAuth(self.hs) requester = self.get_success(auth.get_user_by_req(request)) self.assertTrue(requester.shadow_banned) diff --git a/tests/test_state.py b/tests/test_state.py index ddf59916b172..7a49b8795320 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -28,7 +28,7 @@ from twisted.internet import defer -from synapse.api.auth import Auth +from synapse.api.auth.internal import InternalAuth from synapse.api.constants import EventTypes, Membership from synapse.api.room_versions import RoomVersions from synapse.events import EventBase, make_event_from_dict @@ -240,7 +240,7 @@ def setUp(self) -> None: hs.get_macaroon_generator.return_value = MacaroonGenerator( clock, "tesths", b"verysecret" ) - hs.get_auth.return_value = Auth(hs) + hs.get_auth.return_value = InternalAuth(hs) hs.get_state_resolution_handler = lambda: StateResolutionHandler(hs) hs.get_storage_controllers.return_value = storage_controllers From 765244faeef9e20c573d2c7935f05f76aeca1c28 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 13 Sep 2022 17:54:32 +0200 Subject: [PATCH 003/153] Initial MSC3964 support: delegation of auth to OIDC server --- synapse/api/auth/oauth_delegated.py | 227 ++++++++++++++++++++++++++++ synapse/config/auth.py | 30 +++- synapse/server.py | 4 + 3 files changed, 260 insertions(+), 1 deletion(-) create mode 100644 synapse/api/auth/oauth_delegated.py diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py new file mode 100644 index 000000000000..b3b5c29a949e --- /dev/null +++ b/synapse/api/auth/oauth_delegated.py @@ -0,0 +1,227 @@ +# Copyright 2023 The Matrix.org Foundation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from typing import TYPE_CHECKING, Any, Dict, List, Optional +from urllib.parse import urlencode + +from authlib.oauth2 import ClientAuth +from authlib.oauth2.auth import encode_client_secret_basic, encode_client_secret_post +from authlib.oauth2.rfc7523 import ClientSecretJWT, PrivateKeyJWT, private_key_jwt_sign +from authlib.oauth2.rfc7662 import IntrospectionToken +from authlib.oidc.discovery import OpenIDProviderMetadata, get_well_known_url + +from twisted.web.client import readBody +from twisted.web.http_headers import Headers + +from synapse.api.auth.base import BaseAuth +from synapse.api.errors import AuthError, StoreError +from synapse.http.site import SynapseRequest +from synapse.logging.context import make_deferred_yieldable +from synapse.types import Requester, UserID, create_requester +from synapse.util import json_decoder +from synapse.util.caches.cached_call import RetryOnExceptionCachedCall + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +def scope_to_list(scope: str) -> List[str]: + """Convert a scope string to a list of scope tokens""" + return scope.strip().split(" ") + + +class PrivateKeyJWTWithKid(PrivateKeyJWT): + """An implementation of the private_key_jwt client auth method that includes a kid header. + + This is needed because some providers (Keycloak) require the kid header to figure + out which key to use to verify the signature. + """ + + def sign(self, auth: Any, token_endpoint: str) -> bytes: + return private_key_jwt_sign( + auth.client_secret, + client_id=auth.client_id, + token_endpoint=token_endpoint, + claims=self.claims, + header={"kid": auth.client_secret["kid"]}, + ) + + +class OAuthDelegatedAuth(BaseAuth): + AUTH_METHODS = { + "client_secret_post": encode_client_secret_post, + "client_secret_basic": encode_client_secret_basic, + "client_secret_jwt": ClientSecretJWT(), + "private_key_jwt": PrivateKeyJWTWithKid(), + } + + def __init__(self, hs: "HomeServer"): + super().__init__(hs) + + self._config = hs.config.auth + assert self._config.oauth_delegation_enabled, "OAuth delegation is not enabled" + assert self._config.oauth_delegation_issuer, "No issuer provided" + assert self._config.oauth_delegation_client_id, "No client_id provided" + assert self._config.oauth_delegation_client_secret, "No client_secret provided" + assert ( + self._config.oauth_delegation_client_auth_method + in OAuthDelegatedAuth.AUTH_METHODS + ), "Invalid client_auth_method" + + self._http_client = hs.get_proxied_http_client() + self._hostname = hs.hostname + + self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata) + secret = self._config.oauth_delegation_client_secret + self._client_auth = ClientAuth( + self._config.oauth_delegation_client_id, + secret, + OAuthDelegatedAuth.AUTH_METHODS[ + self._config.oauth_delegation_client_auth_method + ], + ) + + async def _load_metadata(self) -> OpenIDProviderMetadata: + if self._config.oauth_delegation_issuer_metadata is not None: + return OpenIDProviderMetadata( + **self._config.oauth_delegation_issuer_metadata + ) + url = get_well_known_url(self._config.oauth_delegation_issuer, external=True) + response = await self._http_client.get_json(url) + metadata = OpenIDProviderMetadata(**response) + # metadata.validate_introspection_endpoint() + return metadata + + async def _introspect_token(self, token: str) -> IntrospectionToken: + metadata = await self._issuer_metadata.get() + introspection_endpoint = metadata.get("introspection_endpoint") + raw_headers: Dict[str, str] = { + "Content-Type": "application/x-www-form-urlencoded", + "User-Agent": str(self._http_client.user_agent, "utf-8"), + "Accept": "application/json", + } + + args = {"token": token, "token_type_hint": "access_token"} + body = urlencode(args, True) + + # Fill the body/headers with credentials + uri, raw_headers, body = self._client_auth.prepare( + method="POST", uri=introspection_endpoint, headers=raw_headers, body=body + ) + headers = Headers({k: [v] for (k, v) in raw_headers.items()}) + + # Do the actual request + # We're not using the SimpleHttpClient util methods as we don't want to + # check the HTTP status code and we do the body encoding ourself. + response = await self._http_client.request( + method="POST", + uri=uri, + data=body.encode("utf-8"), + headers=headers, + ) + + resp_body = await make_deferred_yieldable(readBody(response)) + # TODO: Let's not worry about 5xx errors & co. for now and just try + # decoding that as JSON. We should also do some validation of the + # response + resp = json_decoder.decode(resp_body.decode("utf-8")) + return IntrospectionToken(**resp) + + async def get_user_by_req( + self, + request: SynapseRequest, + allow_guest: bool = False, + allow_expired: bool = False, + ) -> Requester: + access_token = self.get_access_token_from_request(request) + return await self.get_user_by_access_token(access_token, allow_expired) + + async def get_user_by_access_token( + self, + token: str, + allow_expired: bool = False, + ) -> Requester: + introspection_result = await self._introspect_token(token) + + logger.info(f"Introspection result: {introspection_result!r}") + + # TODO: introspection verification should be more extensive, especially: + # - verify the scopes + # - verify the audience + if not introspection_result.get("active"): + raise AuthError( + 403, + "Invalid access token", + ) + + # TODO: claim mapping should be configurable + username: Optional[str] = introspection_result.get("username") + if username is None or not isinstance(username, str): + raise AuthError( + 500, + "Invalid username claim in the introspection result", + ) + + # Let's look at the scope + scope: List[str] = scope_to_list(introspection_result.get("scope", "")) + device_id = None + # Find device_id in scope + for tok in scope: + if tok.startswith("urn:matrix:org.matrix.msc2967.client:device:"): + parts = tok.split(":") + if len(parts) == 5: + device_id = parts[4] + + user_id = UserID(username, self._hostname) + user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) + + # If the user does not exist, we should create it on the fly + # TODO: we could use SCIM to provision users ahead of time and listen + # for SCIM SET events if those ever become standard: + # https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00 + if not user_info: + await self.store.register_user(user_id=user_id.to_string()) + user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) + if not user_info: + raise AuthError( + 500, + "Could not create user on the fly", + ) + + if device_id: + # Create the device on the fly if it does not exist + try: + await self.store.get_device( + user_id=user_id.to_string(), device_id=device_id + ) + except StoreError: + await self.store.store_device( + user_id=user_id.to_string(), + device_id=device_id, + initial_device_display_name="OIDC-native client", + ) + + # TODO: there is a few things missing in the requester here, which still need + # to be figured out, like: + # - impersonation, with the `authenticated_entity`, which is used for + # rate-limiting, MAU limits, etc. + # - shadow-banning, with the `shadow_banned` flag + # - a proper solution for appservices, which still needs to be figured out in + # the context of MSC3861 + return create_requester( + user_id=user_id, + device_id=device_id, + ) diff --git a/synapse/config/auth.py b/synapse/config/auth.py index 35774962c0be..25b5cc60dcf1 100644 --- a/synapse/config/auth.py +++ b/synapse/config/auth.py @@ -14,9 +14,11 @@ # limitations under the License. from typing import Any +from authlib.jose.rfc7517 import JsonWebKey + from synapse.types import JsonDict -from ._base import Config +from ._base import Config, ConfigError class AuthConfig(Config): @@ -53,3 +55,29 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.ui_auth_session_timeout = self.parse_duration( ui_auth.get("session_timeout", 0) ) + + oauth_delegation = config.get("oauth_delegation", {}) + self.oauth_delegation_enabled = oauth_delegation.get("enabled", False) + self.oauth_delegation_issuer = oauth_delegation.get("issuer", "") + self.oauth_delegation_issuer_metadata = oauth_delegation.get("issuer_metadata") + self.oauth_delegation_account = oauth_delegation.get("account", "") + self.oauth_delegation_client_id = oauth_delegation.get("client_id", "") + self.oauth_delegation_client_secret = oauth_delegation.get("client_secret", "") + self.oauth_delegation_client_auth_method = oauth_delegation.get( + "client_auth_method", "client_secret_post" + ) + + self.password_enabled = password_config.get( + "enabled", not self.oauth_delegation_enabled + ) + + if self.oauth_delegation_client_auth_method == "private_key_jwt": + self.oauth_delegation_client_secret = JsonWebKey.import_key( + self.oauth_delegation_client_secret + ) + + # If we are delegating via OAuth then password cannot be supported as well + if self.oauth_delegation_enabled and self.password_enabled: + raise ConfigError( + "Password auth cannot be enabled when OAuth delegation is enabled" + ) diff --git a/synapse/server.py b/synapse/server.py index df88af12a999..1c82500f3024 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -428,6 +428,10 @@ def get_replication_notifier(self) -> ReplicationNotifier: @cache_in_self def get_auth(self) -> Auth: + if self.config.auth.oauth_delegation_enabled: + from synapse.api.auth.oauth_delegated import OAuthDelegatedAuth + + return OAuthDelegatedAuth(self) return InternalAuth(self) @cache_in_self From 8f576aa462684e13b20dc380e759a76e6db821b6 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 16 May 2023 15:36:40 +0200 Subject: [PATCH 004/153] Expose the public keys used for client authentication on an endpoint --- synapse/rest/synapse/client/__init__.py | 6 +++ synapse/rest/synapse/client/jwks.py | 72 +++++++++++++++++++++++++ 2 files changed, 78 insertions(+) create mode 100644 synapse/rest/synapse/client/jwks.py diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py index e55924f5979b..dcfd0ad6aac6 100644 --- a/synapse/rest/synapse/client/__init__.py +++ b/synapse/rest/synapse/client/__init__.py @@ -46,6 +46,12 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc "/_synapse/client/unsubscribe": UnsubscribeResource(hs), } + # Expose the JWKS endpoint if OAuth2 delegation is enabled + if hs.config.auth.oauth_delegation_enabled: + from synapse.rest.synapse.client.jwks import JwksResource + + resources["/_synapse/jwks"] = JwksResource(hs) + # provider-specific SSO bits. Only load these if they are enabled, since they # rely on optional dependencies. if hs.config.oidc.oidc_enabled: diff --git a/synapse/rest/synapse/client/jwks.py b/synapse/rest/synapse/client/jwks.py new file mode 100644 index 000000000000..818585843eaa --- /dev/null +++ b/synapse/rest/synapse/client/jwks.py @@ -0,0 +1,72 @@ +# Copyright 2022 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from typing import TYPE_CHECKING, Tuple + +from synapse.http.server import DirectServeJsonResource +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +class JwksResource(DirectServeJsonResource): + def __init__(self, hs: "HomeServer"): + from authlib.jose.rfc7517 import Key + + super().__init__(extract_context=True) + + # Parameters that are allowed to be exposed in the public key. + # This is done manually, because authlib's private to public key conversion + # is unreliable depending on the version. Instead, we just serialize the private + # key and only keep the public parameters. + # List from https://www.iana.org/assignments/jose/jose.xhtml#web-key-parameters + public_parameters = { + "kty", + "use", + "key_ops", + "alg", + "kid", + "x5u", + "x5c", + "x5t", + "x5t#S256", + "crv", + "x", + "y", + "n", + "e", + "ext", + } + + secret = hs.config.auth.oauth_delegation_client_secret + + if isinstance(secret, Key): + private_key = secret.as_dict() + public_key = { + k: v for k, v in private_key.items() if k in public_parameters + } + keys = [public_key] + else: + keys = [] + + self.res = { + "keys": keys, + } + + async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + return 200, self.res From e82ec6d00819253d15d22a41ba3b75ad77dce98f Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 18 Nov 2021 15:21:00 +0100 Subject: [PATCH 005/153] MSC2965: OIDC Provider discovery via well-known document --- synapse/rest/well_known.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index e2174fdfea03..fd3b17a5ad51 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -44,6 +44,15 @@ def get_well_known(self) -> Optional[JsonDict]: "base_url": self._config.registration.default_identity_server } + if self._config.auth.oauth_delegation_enabled: + result["org.matrix.msc2965.authentication"] = { + "issuer": self._config.auth.oauth_delegation_issuer + } + if self._config.auth.oauth_delegation_account != "": + result["org.matrix.msc2965.authentication"][ + "account" + ] = self._config.auth.oauth_delegation_account + if self._config.server.extra_well_known_client_content: for ( key, From c5cf1b421d8e0d765f812880ff41fe5d244a0919 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 17 Jun 2022 16:58:05 +0200 Subject: [PATCH 006/153] Save the scopes in the requester --- synapse/api/auth/oauth_delegated.py | 1 + synapse/types/__init__.py | 8 ++++++++ tests/api/test_auth.py | 2 ++ 3 files changed, 11 insertions(+) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index b3b5c29a949e..2715127e3285 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -224,4 +224,5 @@ async def get_user_by_access_token( return create_requester( user_id=user_id, device_id=device_id, + scope=scope, ) diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index 42baf8ac6bb4..dfc95e8ebb8a 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -131,6 +131,7 @@ class Requester: user: "UserID" access_token_id: Optional[int] is_guest: bool + scope: Set[str] shadow_banned: bool device_id: Optional[str] app_service: Optional["ApplicationService"] @@ -147,6 +148,7 @@ def serialize(self) -> Dict[str, Any]: "user_id": self.user.to_string(), "access_token_id": self.access_token_id, "is_guest": self.is_guest, + "scope": list(self.scope), "shadow_banned": self.shadow_banned, "device_id": self.device_id, "app_server_id": self.app_service.id if self.app_service else None, @@ -175,6 +177,7 @@ def deserialize( user=UserID.from_string(input["user_id"]), access_token_id=input["access_token_id"], is_guest=input["is_guest"], + scope=set(input["scope"]), shadow_banned=input["shadow_banned"], device_id=input["device_id"], app_service=appservice, @@ -186,6 +189,7 @@ def create_requester( user_id: Union[str, "UserID"], access_token_id: Optional[int] = None, is_guest: bool = False, + scope: StrCollection = (), shadow_banned: bool = False, device_id: Optional[str] = None, app_service: Optional["ApplicationService"] = None, @@ -199,6 +203,7 @@ def create_requester( access_token_id: *ID* of the access token used for this request, or None if it came via the appservice API or similar is_guest: True if the user making this request is a guest user + scope: the scope of the access token used for this request, if any shadow_banned: True if the user making this request is shadow-banned. device_id: device_id which was set at authentication time app_service: the AS requesting on behalf of the user @@ -215,10 +220,13 @@ def create_requester( if authenticated_entity is None: authenticated_entity = user_id.to_string() + scope = set(scope) + return Requester( user_id, access_token_id, is_guest, + scope, shadow_banned, device_id, app_service, diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 3dac52d178ad..cdb0048122a1 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -426,6 +426,7 @@ def test_blocking_mau__appservice_requester_allowed_when_not_tracking_ips( access_token_id=None, device_id="FOOBAR", is_guest=False, + scope=set(), shadow_banned=False, app_service=appservice, authenticated_entity="@appservice:server", @@ -456,6 +457,7 @@ def test_blocking_mau__appservice_requester_disallowed_when_tracking_ips( access_token_id=None, device_id="FOOBAR", is_guest=False, + scope=set(), shadow_banned=False, app_service=appservice, authenticated_entity="@appservice:server", From 7628dbf4e9b48d9714ccbd0530af579d9c290fed Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Mon, 20 Jun 2022 11:17:48 +0200 Subject: [PATCH 007/153] Handle the Synapse admin scope --- synapse/api/auth/oauth_delegated.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index 2715127e3285..ff1f395e5864 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -140,6 +140,9 @@ async def _introspect_token(self, token: str) -> IntrospectionToken: resp = json_decoder.decode(resp_body.decode("utf-8")) return IntrospectionToken(**resp) + async def is_server_admin(self, requester: Requester) -> bool: + return "urn:synapse:admin:*" in requester.scope + async def get_user_by_req( self, request: SynapseRequest, From f9cd549f6485620381443f2b4b75a1bd0a88d39f Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 13 Sep 2022 16:13:20 +0200 Subject: [PATCH 008/153] Record the `sub` claims as an external_id --- synapse/api/auth/oauth_delegated.py | 59 ++++++++++++++++++----------- 1 file changed, 37 insertions(+), 22 deletions(-) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index ff1f395e5864..5565ef0a1a73 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -68,6 +68,8 @@ class OAuthDelegatedAuth(BaseAuth): "private_key_jwt": PrivateKeyJWTWithKid(), } + EXTERNAL_ID_PROVIDER = "oauth-delegated" + def __init__(self, hs: "HomeServer"): super().__init__(hs) @@ -170,13 +172,42 @@ async def get_user_by_access_token( "Invalid access token", ) - # TODO: claim mapping should be configurable - username: Optional[str] = introspection_result.get("username") - if username is None or not isinstance(username, str): - raise AuthError( - 500, - "Invalid username claim in the introspection result", + # Match via the sub claim + sub: Optional[str] = introspection_result.get("sub") + if sub is None: + raise AuthError(500, "Invalid sub claim in the introspection result") + + user_id_str = await self.store.get_user_by_external_id( + OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub + ) + if user_id_str is None: + # If we could not find a user via the external_id, it either does not exist, + # or the external_id was never recorded + + # TODO: claim mapping should be configurable + username: Optional[str] = introspection_result.get("username") + if username is None or not isinstance(username, str): + raise AuthError( + 500, + "Invalid username claim in the introspection result", + ) + user_id = UserID(username, self._hostname) + + # First try to find a user from the username claim + user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) + if user_info is None: + # If the user does not exist, we should create it on the fly + # TODO: we could use SCIM to provision users ahead of time and listen + # for SCIM SET events if those ever become standard: + # https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00 + await self.store.register_user(user_id=user_id.to_string()) + + # And record the sub as external_id + await self.store.record_user_external_id( + OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub, user_id.to_string() ) + else: + user_id = UserID.from_string(user_id_str) # Let's look at the scope scope: List[str] = scope_to_list(introspection_result.get("scope", "")) @@ -188,22 +219,6 @@ async def get_user_by_access_token( if len(parts) == 5: device_id = parts[4] - user_id = UserID(username, self._hostname) - user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) - - # If the user does not exist, we should create it on the fly - # TODO: we could use SCIM to provision users ahead of time and listen - # for SCIM SET events if those ever become standard: - # https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00 - if not user_info: - await self.store.register_user(user_id=user_id.to_string()) - user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) - if not user_info: - raise AuthError( - 500, - "Could not create user on the fly", - ) - if device_id: # Create the device on the fly if it does not exist try: From d20669971a5be17776a2991c77f5348662bb3902 Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Tue, 20 Sep 2022 12:54:18 +0100 Subject: [PATCH 009/153] Use `name` claim as display name when registering users on the fly. This makes is so that the `name` claim got when introspecting the token is used as the display name when registering a user on the fly. --- synapse/api/auth/oauth_delegated.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index 5565ef0a1a73..9e01e3fadc82 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -200,7 +200,14 @@ async def get_user_by_access_token( # TODO: we could use SCIM to provision users ahead of time and listen # for SCIM SET events if those ever become standard: # https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00 - await self.store.register_user(user_id=user_id.to_string()) + + # TODO: claim mapping should be configurable + # If present, use the name claim as the displayname + name: Optional[str] = introspection_result.get("name") + + await self.store.register_user( + user_id=user_id.to_string(), create_profile_with_displayname=name + ) # And record the sub as external_id await self.store.record_user_external_id( From a1374b5c70fc8520930a1777dc131403812d7967 Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Wed, 16 Nov 2022 11:05:05 +0000 Subject: [PATCH 010/153] MSC2967: Check access token scope for use as user and add guest support --- synapse/api/auth/oauth_delegated.py | 30 +++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index 9e01e3fadc82..cfa178218cb8 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -25,7 +25,7 @@ from twisted.web.http_headers import Headers from synapse.api.auth.base import BaseAuth -from synapse.api.errors import AuthError, StoreError +from synapse.api.errors import AuthError, InvalidClientTokenError, StoreError from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable from synapse.types import Requester, UserID, create_requester @@ -164,18 +164,29 @@ async def get_user_by_access_token( logger.info(f"Introspection result: {introspection_result!r}") # TODO: introspection verification should be more extensive, especially: - # - verify the scopes # - verify the audience if not introspection_result.get("active"): - raise AuthError( - 403, - "Invalid access token", - ) + raise InvalidClientTokenError("Token is not active") + + # Let's look at the scope + scope: List[str] = scope_to_list(introspection_result.get("scope", "")) + + # Determine type of user based on presence of particular scopes + has_admin_scope = "urn:synapse:admin:*" in scope + has_user_scope = "urn:matrix:org.matrix.msc2967.client:api:*" in scope + has_guest_scope = "urn:matrix:org.matrix.msc2967.client:api:guest" in scope + is_user = has_user_scope or has_admin_scope + is_guest = has_guest_scope and not is_user + + if not is_user and not is_guest: + raise InvalidClientTokenError("No scope in token granting user rights") # Match via the sub claim sub: Optional[str] = introspection_result.get("sub") if sub is None: - raise AuthError(500, "Invalid sub claim in the introspection result") + raise InvalidClientTokenError( + "Invalid sub claim in the introspection result" + ) user_id_str = await self.store.get_user_by_external_id( OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub @@ -216,10 +227,8 @@ async def get_user_by_access_token( else: user_id = UserID.from_string(user_id_str) - # Let's look at the scope - scope: List[str] = scope_to_list(introspection_result.get("scope", "")) - device_id = None # Find device_id in scope + device_id = None for tok in scope: if tok.startswith("urn:matrix:org.matrix.msc2967.client:device:"): parts = tok.split(":") @@ -250,4 +259,5 @@ async def get_user_by_access_token( user_id=user_id, device_id=device_id, scope=scope, + is_guest=is_guest, ) From 28a9663bdf092541250ae1209f201e57b663dc81 Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Wed, 16 Nov 2022 17:44:13 +0000 Subject: [PATCH 011/153] Initial tests for OAuth delegation --- tests/handlers/test_oauth_delegation.py | 345 ++++++++++++++++++++++++ 1 file changed, 345 insertions(+) create mode 100644 tests/handlers/test_oauth_delegation.py diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py new file mode 100644 index 000000000000..54f48948196a --- /dev/null +++ b/tests/handlers/test_oauth_delegation.py @@ -0,0 +1,345 @@ +# Copyright 2022 Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Dict +from unittest.mock import ANY, Mock +from urllib.parse import parse_qs + +from twisted.test.proto_helpers import MemoryReactor + +from synapse.api.errors import InvalidClientTokenError +from synapse.server import HomeServer +from synapse.types import JsonDict +from synapse.util import Clock + +from tests.test_utils import FakeResponse, get_awaitable_result, simple_async_mock +from tests.unittest import HomeserverTestCase, skip_unless +from tests.utils import mock_getRawHeaders + +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + + +# These are a few constants that are used as config parameters in the tests. +SERVER_NAME = "test" +ISSUER = "https://issuer/" +CLIENT_ID = "test-client-id" +CLIENT_SECRET = "test-client-secret" +BASE_URL = "https://synapse/" +SCOPES = ["openid"] + +AUTHORIZATION_ENDPOINT = ISSUER + "authorize" +TOKEN_ENDPOINT = ISSUER + "token" +USERINFO_ENDPOINT = ISSUER + "userinfo" +WELL_KNOWN = ISSUER + ".well-known/openid-configuration" +JWKS_URI = ISSUER + ".well-known/jwks.json" +INTROSPECTION_ENDPOINT = ISSUER + "introspect" + +SYNAPSE_ADMIN_SCOPE = "urn:synapse:admin:*" +MATRIX_USER_SCOPE = "urn:matrix:org.matrix.msc2967.client:api:*" +MATRIX_GUEST_SCOPE = "urn:matrix:org.matrix.msc2967.client:api:guest" +DEVICE = "AABBCCDD" +MATRIX_DEVICE_SCOPE = "urn:matrix:org.matrix.msc2967.client:device:" + DEVICE +SUBJECT = "abc-def-ghi" +USERNAME = "test-user" + + +async def get_json(url: str) -> JsonDict: + # Mock get_json calls to handle jwks & oidc discovery endpoints + if url == WELL_KNOWN: + # Minimal discovery document, as defined in OpenID.Discovery + # https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata + return { + "issuer": ISSUER, + "authorization_endpoint": AUTHORIZATION_ENDPOINT, + "token_endpoint": TOKEN_ENDPOINT, + "jwks_uri": JWKS_URI, + "userinfo_endpoint": USERINFO_ENDPOINT, + "introspection_endpoint": INTROSPECTION_ENDPOINT, + "response_types_supported": ["code"], + "subject_types_supported": ["public"], + "id_token_signing_alg_values_supported": ["RS256"], + } + elif url == JWKS_URI: + return {"keys": []} + + return {} + + +@skip_unless(HAS_AUTHLIB, "requires authlib") +class MSC3861OAuthDelegation(HomeserverTestCase): + def default_config(self) -> Dict[str, Any]: + config = super().default_config() + config["public_baseurl"] = BASE_URL + config["oauth_delegation"] = { + "enabled": True, + "issuer": ISSUER, + "client_id": CLIENT_ID, + "client_auth_method": "client_secret_post", + "client_secret": CLIENT_SECRET, + } + return config + + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: + self.http_client = Mock(spec=["get_json"]) + self.http_client.get_json.side_effect = get_json + self.http_client.user_agent = b"Synapse Test" + + hs = self.setup_test_homeserver(proxied_http_client=self.http_client) + + self.auth = hs.get_auth() + + return hs + + def _assertParams(self) -> None: + """Assert that the request parameters are correct.""" + params = parse_qs(self.http_client.request.call_args[1]["data"].decode("utf-8")) + self.assertEqual(params["token"], ["mockAccessToken"]) + self.assertEqual(params["client_id"], [CLIENT_ID]) + self.assertEqual(params["client_secret"], [CLIENT_SECRET]) + + def test_inactive_token(self) -> None: + """The handler should return a 403 where the token is inactive.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={"active": False}, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + + def test_active_no_scope(self) -> None: + """The handler should return a 403 where no scope is given.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={"active": True}, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + + def test_active_user_no_subject(self) -> None: + """The handler should return a 500 when no subject is present.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={"active": True, "scope": " ".join([MATRIX_USER_SCOPE])}, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + + def test_active_no_user_scope(self) -> None: + """The handler should return a 500 when no subject is present.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_DEVICE_SCOPE]), + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + + def test_active_admin(self) -> None: + """The handler should return a requester with admin rights.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([SYNAPSE_ADMIN_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, False) + self.assertEqual(requester.device_id, None) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), True + ) + + def test_active_admin_highest_privilege(self) -> None: + """The handler should resolve to the most permissive scope.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join( + [SYNAPSE_ADMIN_SCOPE, MATRIX_USER_SCOPE, MATRIX_GUEST_SCOPE] + ), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, False) + self.assertEqual(requester.device_id, None) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), True + ) + + def test_active_user(self) -> None: + """The handler should return a requester with normal user rights.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_USER_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, False) + self.assertEqual(requester.device_id, None) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), False + ) + + def test_active_user_with_device(self) -> None: + """The handler should return a requester with normal user rights and a device ID.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_USER_SCOPE, MATRIX_DEVICE_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, False) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), False + ) + self.assertEqual(requester.device_id, DEVICE) + + def test_active_guest_with_device(self) -> None: + """The handler should return a requester with guest user rights and a device ID.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_GUEST_SCOPE, MATRIX_DEVICE_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, True) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), False + ) + self.assertEqual(requester.device_id, DEVICE) From 5fe96082d09d1af3dc33b62b6a47a6baca02703c Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Thu, 17 Nov 2022 14:34:11 +0000 Subject: [PATCH 012/153] Actually enforce guest + return www-authenticate header --- synapse/api/auth/oauth_delegated.py | 18 +++++++++-- synapse/api/errors.py | 28 ++++++++++++++-- synapse/http/server.py | 6 ++++ tests/handlers/test_oauth_delegation.py | 43 +++++++++++++++++++++++-- 4 files changed, 87 insertions(+), 8 deletions(-) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index cfa178218cb8..9cb6eb7f7944 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -25,7 +25,12 @@ from twisted.web.http_headers import Headers from synapse.api.auth.base import BaseAuth -from synapse.api.errors import AuthError, InvalidClientTokenError, StoreError +from synapse.api.errors import ( + AuthError, + InvalidClientTokenError, + OAuthInsufficientScopeError, + StoreError, +) from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable from synapse.types import Requester, UserID, create_requester @@ -152,7 +157,16 @@ async def get_user_by_req( allow_expired: bool = False, ) -> Requester: access_token = self.get_access_token_from_request(request) - return await self.get_user_by_access_token(access_token, allow_expired) + + # TODO: we probably want to assert the allow_guest inside this call so that we don't provision the user if they don't have enough permission: + requester = await self.get_user_by_access_token(access_token, allow_expired) + + if not allow_guest and requester.is_guest: + raise OAuthInsufficientScopeError( + ["urn:matrix:org.matrix.msc2967.client:api:*"] + ) + + return requester async def get_user_by_access_token( self, diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 8c7c94b04568..af894243f8d3 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -119,14 +119,20 @@ class Codes(str, Enum): class CodeMessageException(RuntimeError): - """An exception with integer code and message string attributes. + """An exception with integer code, a message string attributes and optional headers. Attributes: code: HTTP error code msg: string describing the error + headers: optional response headers to send """ - def __init__(self, code: Union[int, HTTPStatus], msg: str): + def __init__( + self, + code: Union[int, HTTPStatus], + msg: str, + headers: Optional[Dict[str, str]] = None, + ): super().__init__("%d: %s" % (code, msg)) # Some calls to this method pass instances of http.HTTPStatus for `code`. @@ -137,6 +143,7 @@ def __init__(self, code: Union[int, HTTPStatus], msg: str): # To eliminate this behaviour, we convert them to their integer equivalents here. self.code = int(code) self.msg = msg + self.headers = headers class RedirectException(CodeMessageException): @@ -182,6 +189,7 @@ def __init__( msg: str, errcode: str = Codes.UNKNOWN, additional_fields: Optional[Dict] = None, + headers: Optional[Dict[str, str]] = None, ): """Constructs a synapse error. @@ -190,7 +198,7 @@ def __init__( msg: The human-readable error message. errcode: The matrix error code e.g 'M_FORBIDDEN' """ - super().__init__(code, msg) + super().__init__(code, msg, headers) self.errcode = errcode if additional_fields is None: self._additional_fields: Dict = {} @@ -335,6 +343,20 @@ def __init__( super().__init__(code, msg, errcode, additional_fields) +class OAuthInsufficientScopeError(SynapseError): + """An error raised when the caller does not have sufficient scope to perform the requested action""" + + def __init__( + self, + required_scopes: List[str], + ): + headers = { + "WWW-Authenticate": 'Bearer error="insufficient_scope", scope="%s"' + % (" ".join(required_scopes)) + } + super().__init__(401, "Insufficient scope", Codes.FORBIDDEN, None, headers) + + class UnstableSpecAuthError(AuthError): """An error raised when a new error code is being proposed to replace a previous one. This error will return a "org.matrix.unstable.errcode" property with the new error code, diff --git a/synapse/http/server.py b/synapse/http/server.py index 101dc2e747d0..04768c6a237f 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -111,6 +111,9 @@ def return_json_error( exc: SynapseError = f.value # type: ignore error_code = exc.code error_dict = exc.error_dict(config) + if exc.headers is not None: + for header, value in exc.headers.items(): + request.setHeader(header, value) logger.info("%s SynapseError: %s - %s", request, error_code, exc.msg) elif f.check(CancelledError): error_code = HTTP_STATUS_REQUEST_CANCELLED @@ -172,6 +175,9 @@ def return_html_error( cme: CodeMessageException = f.value # type: ignore code = cme.code msg = cme.msg + if cme.headers is not None: + for header, value in cme.headers.items(): + request.setHeader(header, value) if isinstance(cme, RedirectException): logger.info("%s redirect to %s", request, cme.location) diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 54f48948196a..bca9db16267b 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -17,7 +17,8 @@ from twisted.test.proto_helpers import MemoryReactor -from synapse.api.errors import InvalidClientTokenError +from synapse.api.errors import InvalidClientTokenError, OAuthInsufficientScopeError +from synapse.rest.client import devices from synapse.server import HomeServer from synapse.types import JsonDict from synapse.util import Clock @@ -82,6 +83,10 @@ async def get_json(url: str) -> JsonDict: @skip_unless(HAS_AUTHLIB, "requires authlib") class MSC3861OAuthDelegation(HomeserverTestCase): + servlets = [ + devices.register_servlets, + ] + def default_config(self) -> Dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL @@ -314,7 +319,37 @@ def test_active_user_with_device(self) -> None: ) self.assertEqual(requester.device_id, DEVICE) - def test_active_guest_with_device(self) -> None: + def test_active_guest_not_allowed(self) -> None: + """The handler should return an insufficient scope error.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_GUEST_SCOPE, MATRIX_DEVICE_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + error = self.get_failure( + self.auth.get_user_by_req(request), OAuthInsufficientScopeError + ) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual( + getattr(error.value, "headers", {})["WWW-Authenticate"], + 'Bearer error="insufficient_scope", scope="urn:matrix:org.matrix.msc2967.client:api:*"', + ) + + def test_active_guest_allowed(self) -> None: """The handler should return a requester with guest user rights and a device ID.""" self.http_client.request = simple_async_mock( @@ -331,7 +366,9 @@ def test_active_guest_with_device(self) -> None: request = Mock(args={}) request.args[b"access_token"] = [b"mockAccessToken"] request.requestHeaders.getRawHeaders = mock_getRawHeaders() - requester = self.get_success(self.auth.get_user_by_req(request)) + requester = self.get_success( + self.auth.get_user_by_req(request, allow_guest=True) + ) self.http_client.get_json.assert_called_once_with(WELL_KNOWN) self.http_client.request.assert_called_once_with( method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY From 31691d61511d41286272d779727502e396ce86eb Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Wed, 10 May 2023 16:08:43 +0200 Subject: [PATCH 013/153] Disable account related endpoints when using OAuth delegation --- synapse/handlers/auth.py | 8 +- synapse/rest/client/account.py | 24 ++-- synapse/rest/client/devices.py | 11 +- synapse/rest/client/keys.py | 30 +++- synapse/rest/client/login.py | 3 + synapse/rest/client/logout.py | 3 + synapse/rest/client/register.py | 3 + tests/handlers/test_oauth_delegation.py | 180 +++++++++++++++++++++++- 8 files changed, 243 insertions(+), 19 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index d001f2fb2f0b..a53984be336f 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -274,6 +274,8 @@ def __init__(self, hs: "HomeServer"): # response. self._extra_attributes: Dict[str, SsoLoginExtraAttributes] = {} + self.oauth_delegation_enabled = hs.config.auth.oauth_delegation_enabled + async def validate_user_via_ui_auth( self, requester: Requester, @@ -322,8 +324,12 @@ async def validate_user_via_ui_auth( LimitExceededError if the ratelimiter's failed request count for this user is too high to proceed - """ + if self.oauth_delegation_enabled: + raise SynapseError( + HTTPStatus.INTERNAL_SERVER_ERROR, "UIA shouldn't be used with MSC3861" + ) + if not requester.access_token_id: raise ValueError("Cannot validate a user without an access token") if can_skip_ui_auth and self._ui_auth_session_timeout: diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index 3d0c55daa05c..ccd1f7509cde 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -27,6 +27,7 @@ from synapse.api.errors import ( Codes, InteractiveAuthIncompleteError, + NotFoundError, SynapseError, ThreepidValidationError, ) @@ -600,6 +601,9 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # ThreePidBindRestServelet.PostBody with an `alias_generator` to handle # `threePidCreds` versus `three_pid_creds`. async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + if self.hs.config.auth.oauth_delegation_enabled: + raise NotFoundError(errcode=Codes.UNRECOGNIZED) + if not self.hs.config.registration.enable_3pid_changes: raise SynapseError( 400, "3PID changes are disabled on this server", Codes.FORBIDDEN @@ -890,19 +894,21 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.worker.worker_app is None: - EmailPasswordRequestTokenRestServlet(hs).register(http_server) - PasswordRestServlet(hs).register(http_server) - DeactivateAccountRestServlet(hs).register(http_server) - EmailThreepidRequestTokenRestServlet(hs).register(http_server) - MsisdnThreepidRequestTokenRestServlet(hs).register(http_server) - AddThreepidEmailSubmitTokenServlet(hs).register(http_server) - AddThreepidMsisdnSubmitTokenServlet(hs).register(http_server) + if not hs.config.auth.oauth_delegation_enabled: + EmailPasswordRequestTokenRestServlet(hs).register(http_server) + DeactivateAccountRestServlet(hs).register(http_server) + PasswordRestServlet(hs).register(http_server) + EmailThreepidRequestTokenRestServlet(hs).register(http_server) + MsisdnThreepidRequestTokenRestServlet(hs).register(http_server) + AddThreepidEmailSubmitTokenServlet(hs).register(http_server) + AddThreepidMsisdnSubmitTokenServlet(hs).register(http_server) ThreepidRestServlet(hs).register(http_server) if hs.config.worker.worker_app is None: - ThreepidAddRestServlet(hs).register(http_server) ThreepidBindRestServlet(hs).register(http_server) ThreepidUnbindRestServlet(hs).register(http_server) - ThreepidDeleteRestServlet(hs).register(http_server) + if not hs.config.auth.oauth_delegation_enabled: + ThreepidAddRestServlet(hs).register(http_server) + ThreepidDeleteRestServlet(hs).register(http_server) WhoamiRestServlet(hs).register(http_server) if hs.config.worker.worker_app is None and hs.config.experimental.msc3720_enabled: diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index e97d0bf475ba..00e9bff43f5c 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -19,7 +19,7 @@ from pydantic import Extra, StrictStr from synapse.api import errors -from synapse.api.errors import NotFoundError +from synapse.api.errors import NotFoundError, UnrecognizedRequestError from synapse.handlers.device import DeviceHandler from synapse.http.server import HttpServer from synapse.http.servlet import ( @@ -135,6 +135,7 @@ def __init__(self, hs: "HomeServer"): self.device_handler = handler self.auth_handler = hs.get_auth_handler() self._msc3852_enabled = hs.config.experimental.msc3852_enabled + self.oauth_delegation_enabled = hs.config.auth.oauth_delegation_enabled async def on_GET( self, request: SynapseRequest, device_id: str @@ -166,6 +167,9 @@ class DeleteBody(RequestBodyModel): async def on_DELETE( self, request: SynapseRequest, device_id: str ) -> Tuple[int, JsonDict]: + if self.oauth_delegation_enabled: + raise UnrecognizedRequestError(code=404) + requester = await self.auth.get_user_by_req(request) try: @@ -344,7 +348,10 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.worker.worker_app is None: + if ( + hs.config.worker.worker_app is None + and not hs.config.auth.oauth_delegation_enabled + ): DeleteDevicesRestServlet(hs).register(http_server) DevicesRestServlet(hs).register(http_server) if hs.config.worker.worker_app is None: diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index 413edd8a4d7b..c3ca83c0c88e 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -17,9 +17,10 @@ import logging import re from collections import Counter +from http import HTTPStatus from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple -from synapse.api.errors import InvalidAPICallError, SynapseError +from synapse.api.errors import Codes, InvalidAPICallError, SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import ( RestServlet, @@ -375,9 +376,29 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: user_id = requester.user.to_string() body = parse_json_object_from_request(request) - if self.hs.config.experimental.msc3967_enabled: - if await self.e2e_keys_handler.is_cross_signing_set_up_for_user(user_id): - # If we already have a master key then cross signing is set up and we require UIA to reset + is_cross_signing_setup = ( + await self.e2e_keys_handler.is_cross_signing_set_up_for_user(user_id) + ) + + # Before MSC3967 we required UIA both when setting up cross signing for the + # first time and when resetting the device signing key. With MSC3967 we only + # require UIA when resetting cross-signing, and not when setting up the first + # time. Because there is no UIA in MSC3861, for now we throw an error if the + # user tries to reset the device signing key when MSC3861 is enabled, but allow + # first-time setup. + if self.hs.config.auth.oauth_delegation_enabled: + # There is no way to reset the device signing key with MSC3861 + if is_cross_signing_setup: + raise SynapseError( + HTTPStatus.NOT_IMPLEMENTED, + "Resetting cross signing keys is not yet supported with MSC3861", + Codes.UNRECOGNIZED, + ) + # But first-time setup is fine + + elif self.hs.config.experimental.msc3967_enabled: + # If we already have a master key then cross signing is set up and we require UIA to reset + if is_cross_signing_setup: await self.auth_handler.validate_user_via_ui_auth( requester, request, @@ -387,6 +408,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: can_skip_ui_auth=False, ) # Otherwise we don't require UIA since we are setting up cross signing for first time + else: # Previous behaviour is to always require UIA but allow it to be skipped await self.auth_handler.validate_user_via_ui_auth( diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 6ca61ffbd08e..4d0eabcb840e 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -633,6 +633,9 @@ async def on_GET(self, request: SynapseRequest) -> None: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: + if hs.config.auth.oauth_delegation_enabled: + return + LoginRestServlet(hs).register(http_server) if ( hs.config.worker.worker_app is None diff --git a/synapse/rest/client/logout.py b/synapse/rest/client/logout.py index 6d34625ad5d6..b64a6d5961b7 100644 --- a/synapse/rest/client/logout.py +++ b/synapse/rest/client/logout.py @@ -80,5 +80,8 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: + if hs.config.auth.oauth_delegation_enabled: + return + LogoutRestServlet(hs).register(http_server) LogoutAllRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 7f84a17e2964..6866988c3809 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -955,6 +955,9 @@ def _calculate_registration_flows( def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: + if hs.config.auth.oauth_delegation_enabled: + return + if hs.config.worker.worker_app is None: EmailRegisterRequestTokenRestServlet(hs).register(http_server) MsisdnRegisterRequestTokenRestServlet(hs).register(http_server) diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index bca9db16267b..ee1bc5ca7ac6 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -11,14 +11,27 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict + +from http import HTTPStatus +from typing import Any, Dict, Union from unittest.mock import ANY, Mock from urllib.parse import parse_qs +from signedjson.key import ( + encode_verify_key_base64, + generate_signing_key, + get_verify_key, +) +from signedjson.sign import sign_json + from twisted.test.proto_helpers import MemoryReactor -from synapse.api.errors import InvalidClientTokenError, OAuthInsufficientScopeError -from synapse.rest.client import devices +from synapse.api.errors import ( + Codes, + InvalidClientTokenError, + OAuthInsufficientScopeError, +) +from synapse.rest.client import account, devices, keys, login, logout, register from synapse.server import HomeServer from synapse.types import JsonDict from synapse.util import Clock @@ -57,6 +70,7 @@ MATRIX_DEVICE_SCOPE = "urn:matrix:org.matrix.msc2967.client:device:" + DEVICE SUBJECT = "abc-def-ghi" USERNAME = "test-user" +USER_ID = "@" + USERNAME + ":" + SERVER_NAME async def get_json(url: str) -> JsonDict: @@ -84,7 +98,12 @@ async def get_json(url: str) -> JsonDict: @skip_unless(HAS_AUTHLIB, "requires authlib") class MSC3861OAuthDelegation(HomeserverTestCase): servlets = [ + account.register_servlets, devices.register_servlets, + keys.register_servlets, + register.register_servlets, + login.register_servlets, + logout.register_servlets, ] def default_config(self) -> Dict[str, Any]: @@ -380,3 +399,158 @@ def test_active_guest_allowed(self) -> None: get_awaitable_result(self.auth.is_server_admin(requester)), False ) self.assertEqual(requester.device_id, DEVICE) + + def make_device_keys(self, user_id: str, device_id: str) -> JsonDict: + # We only generate a master key to simplify the test. + master_signing_key = generate_signing_key(device_id) + master_verify_key = encode_verify_key_base64(get_verify_key(master_signing_key)) + + return { + "master_key": sign_json( + { + "user_id": user_id, + "usage": ["master"], + "keys": {"ed25519:" + master_verify_key: master_verify_key}, + }, + user_id, + master_signing_key, + ), + } + + def test_cross_signing(self) -> None: + """Try uploading device keys with OAuth delegation enabled.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_USER_SCOPE, MATRIX_DEVICE_SCOPE]), + "username": USERNAME, + }, + ) + ) + keys_upload_body = self.make_device_keys(USER_ID, DEVICE) + channel = self.make_request( + "POST", + "/_matrix/client/v3/keys/device_signing/upload", + keys_upload_body, + access_token="mockAccessToken", + ) + + self.assertEqual(channel.code, 200, channel.json_body) + + channel = self.make_request( + "POST", + "/_matrix/client/v3/keys/device_signing/upload", + keys_upload_body, + access_token="mockAccessToken", + ) + + self.assertEqual(channel.code, HTTPStatus.NOT_IMPLEMENTED, channel.json_body) + + def expect_unauthorized( + self, method: str, path: str, content: Union[bytes, str, JsonDict] = "" + ) -> None: + channel = self.make_request(method, path, content, shorthand=False) + + self.assertEqual(channel.code, 401, channel.json_body) + + def expect_unrecognized( + self, method: str, path: str, content: Union[bytes, str, JsonDict] = "" + ) -> None: + channel = self.make_request(method, path, content) + + self.assertEqual(channel.code, 404, channel.json_body) + self.assertEqual( + channel.json_body["errcode"], Codes.UNRECOGNIZED, channel.json_body + ) + + def test_uia_endpoints(self) -> None: + """Test that endpoints that were removed in MSC2964 are no longer available.""" + + # This is just an endpoint that should remain visible (but requires auth): + self.expect_unauthorized("GET", "/_matrix/client/v3/devices") + + # This remains usable, but will require a uia scope: + self.expect_unauthorized( + "POST", "/_matrix/client/v3/keys/device_signing/upload" + ) + + def test_3pid_endpoints(self) -> None: + """Test that 3pid account management endpoints that were removed in MSC2964 are no longer available.""" + + # Remains and requires auth: + self.expect_unauthorized("GET", "/_matrix/client/v3/account/3pid") + self.expect_unauthorized( + "POST", + "/_matrix/client/v3/account/3pid/bind", + { + "client_secret": "foo", + "id_access_token": "bar", + "id_server": "foo", + "sid": "bar", + }, + ) + self.expect_unauthorized("POST", "/_matrix/client/v3/account/3pid/unbind", {}) + + # These are gone: + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/3pid" + ) # deprecated + self.expect_unrecognized("POST", "/_matrix/client/v3/account/3pid/add") + self.expect_unrecognized("POST", "/_matrix/client/v3/account/3pid/delete") + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/3pid/email/requestToken" + ) + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/3pid/msisdn/requestToken" + ) + + def test_account_management_endpoints_removed(self) -> None: + """Test that account management endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized("POST", "/_matrix/client/v3/account/deactivate") + self.expect_unrecognized("POST", "/_matrix/client/v3/account/password") + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/password/email/requestToken" + ) + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/password/msisdn/requestToken" + ) + + def test_registration_endpoints_removed(self) -> None: + """Test that registration endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized( + "GET", "/_matrix/client/v1/register/m.login.registration_token/validity" + ) + self.expect_unrecognized("POST", "/_matrix/client/v3/register") + self.expect_unrecognized("GET", "/_matrix/client/v3/register") + self.expect_unrecognized("GET", "/_matrix/client/v3/register/available") + self.expect_unrecognized( + "POST", "/_matrix/client/v3/register/email/requestToken" + ) + self.expect_unrecognized( + "POST", "/_matrix/client/v3/register/msisdn/requestToken" + ) + + def test_session_management_endpoints_removed(self) -> None: + """Test that session management endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized("GET", "/_matrix/client/v3/login") + self.expect_unrecognized("POST", "/_matrix/client/v3/login") + self.expect_unrecognized("GET", "/_matrix/client/v3/login/sso/redirect") + self.expect_unrecognized("POST", "/_matrix/client/v3/logout") + self.expect_unrecognized("POST", "/_matrix/client/v3/logout/all") + self.expect_unrecognized("POST", "/_matrix/client/v3/refresh") + self.expect_unrecognized("GET", "/_matrix/static/client/login") + + def test_device_management_endpoints_removed(self) -> None: + """Test that device management endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized("POST", "/_matrix/client/v3/delete_devices") + self.expect_unrecognized("DELETE", "/_matrix/client/v3/devices/{DEVICE}") + + def test_openid_endpoints_removed(self) -> None: + """Test that OpenID id_token endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized( + "POST", "/_matrix/client/v3/user/{USERNAME}/openid/request_token" + ) From 03920bdd4e9390d74762ecd923ddf0d6c75d222e Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Mon, 6 Feb 2023 17:12:42 +0000 Subject: [PATCH 014/153] Test MSC2965 implementation: well-known discovery document --- tests/rest/test_well_known.py | 38 +++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py index 2091b08d89cf..34333d88df25 100644 --- a/tests/rest/test_well_known.py +++ b/tests/rest/test_well_known.py @@ -17,6 +17,13 @@ from tests import unittest +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + class WellKnownTests(unittest.HomeserverTestCase): def create_test_resource(self) -> Resource: @@ -96,3 +103,34 @@ def test_server_well_known_disabled(self) -> None: "GET", "/.well-known/matrix/server", shorthand=False ) self.assertEqual(channel.code, 404) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @unittest.override_config( + { + "public_baseurl": "https://homeserver", # this is only required so that client well known is served + "oauth_delegation": { + "enabled": True, + "issuer": "https://issuer", + "account": "https://my-account.issuer", + "client_id": "id", + "client_auth_method": "client_secret_post", + "client_secret": "secret", + }, + } + ) + def test_client_well_known_msc3861_oauth_delegation(self) -> None: + channel = self.make_request( + "GET", "/.well-known/matrix/client", shorthand=False + ) + + self.assertEqual(channel.code, 200) + self.assertEqual( + channel.json_body, + { + "m.homeserver": {"base_url": "https://homeserver/"}, + "org.matrix.msc2965.authentication": { + "issuer": "https://issuer", + "account": "https://my-account.issuer", + }, + }, + ) From 249f4a338dde0c1bcde5e14121d8d9fa156f185f Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Tue, 9 May 2023 16:20:04 +0200 Subject: [PATCH 015/153] Refactor config to be an experimental feature Also enforce you can't combine it with incompatible config options --- ...auth_delegated.py => msc3861_delegated.py} | 53 ++--- synapse/config/auth.py | 39 +--- synapse/config/experimental.py | 193 ++++++++++++++++- synapse/handlers/auth.py | 4 +- synapse/module_api/__init__.py | 7 + synapse/rest/client/account.py | 6 +- synapse/rest/client/devices.py | 6 +- synapse/rest/client/keys.py | 2 +- synapse/rest/client/login.py | 2 +- synapse/rest/client/logout.py | 2 +- synapse/rest/client/register.py | 2 +- synapse/rest/synapse/client/__init__.py | 2 +- synapse/rest/synapse/client/jwks.py | 8 +- synapse/rest/well_known.py | 9 +- synapse/server.py | 6 +- tests/config/test_oauth_delegation.py | 202 ++++++++++++++++++ tests/handlers/test_oauth_delegation.py | 15 +- tests/rest/test_well_known.py | 17 +- 18 files changed, 479 insertions(+), 96 deletions(-) rename synapse/api/auth/{oauth_delegated.py => msc3861_delegated.py} (87%) create mode 100644 tests/config/test_oauth_delegation.py diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/msc3861_delegated.py similarity index 87% rename from synapse/api/auth/oauth_delegated.py rename to synapse/api/auth/msc3861_delegated.py index 9cb6eb7f7944..4ca3280bd3c4 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -65,7 +65,7 @@ def sign(self, auth: Any, token_endpoint: str) -> bytes: ) -class OAuthDelegatedAuth(BaseAuth): +class MSC3861DelegatedAuth(BaseAuth): AUTH_METHODS = { "client_secret_post": encode_client_secret_post, "client_secret_basic": encode_client_secret_basic, @@ -78,35 +78,38 @@ class OAuthDelegatedAuth(BaseAuth): def __init__(self, hs: "HomeServer"): super().__init__(hs) - self._config = hs.config.auth - assert self._config.oauth_delegation_enabled, "OAuth delegation is not enabled" - assert self._config.oauth_delegation_issuer, "No issuer provided" - assert self._config.oauth_delegation_client_id, "No client_id provided" - assert self._config.oauth_delegation_client_secret, "No client_secret provided" - assert ( - self._config.oauth_delegation_client_auth_method - in OAuthDelegatedAuth.AUTH_METHODS - ), "Invalid client_auth_method" + self._config = hs.config.experimental.msc3861 + auth_method = MSC3861DelegatedAuth.AUTH_METHODS.get( + self._config.client_auth_method.value, None + ) + # Those assertions are already checked when parsing the config + assert self._config.enabled, "OAuth delegation is not enabled" + assert self._config.issuer, "No issuer provided" + assert self._config.client_id, "No client_id provided" + assert auth_method is not None, "Invalid client_auth_method provided" self._http_client = hs.get_proxied_http_client() self._hostname = hs.hostname self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata) - secret = self._config.oauth_delegation_client_secret - self._client_auth = ClientAuth( - self._config.oauth_delegation_client_id, - secret, - OAuthDelegatedAuth.AUTH_METHODS[ - self._config.oauth_delegation_client_auth_method - ], - ) - async def _load_metadata(self) -> OpenIDProviderMetadata: - if self._config.oauth_delegation_issuer_metadata is not None: - return OpenIDProviderMetadata( - **self._config.oauth_delegation_issuer_metadata + if isinstance(auth_method, PrivateKeyJWTWithKid): + # Use the JWK as the client secret when using the private_key_jwt method + assert self._config.jwk, "No JWK provided" + self._client_auth = ClientAuth( + self._config.client_id, self._config.jwk, auth_method ) - url = get_well_known_url(self._config.oauth_delegation_issuer, external=True) + else: + # Else use the client secret + assert self._config.client_secret, "No client_secret provided" + self._client_auth = ClientAuth( + self._config.client_id, self._config.client_secret, auth_method + ) + + async def _load_metadata(self) -> OpenIDProviderMetadata: + if self._config.issuer_metadata is not None: + return OpenIDProviderMetadata(**self._config.issuer_metadata) + url = get_well_known_url(self._config.issuer, external=True) response = await self._http_client.get_json(url) metadata = OpenIDProviderMetadata(**response) # metadata.validate_introspection_endpoint() @@ -203,7 +206,7 @@ async def get_user_by_access_token( ) user_id_str = await self.store.get_user_by_external_id( - OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub + MSC3861DelegatedAuth.EXTERNAL_ID_PROVIDER, sub ) if user_id_str is None: # If we could not find a user via the external_id, it either does not exist, @@ -236,7 +239,7 @@ async def get_user_by_access_token( # And record the sub as external_id await self.store.record_user_external_id( - OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub, user_id.to_string() + MSC3861DelegatedAuth.EXTERNAL_ID_PROVIDER, sub, user_id.to_string() ) else: user_id = UserID.from_string(user_id_str) diff --git a/synapse/config/auth.py b/synapse/config/auth.py index 25b5cc60dcf1..12e853980e3f 100644 --- a/synapse/config/auth.py +++ b/synapse/config/auth.py @@ -14,11 +14,9 @@ # limitations under the License. from typing import Any -from authlib.jose.rfc7517 import JsonWebKey - from synapse.types import JsonDict -from ._base import Config, ConfigError +from ._base import Config class AuthConfig(Config): @@ -31,7 +29,14 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: if password_config is None: password_config = {} - passwords_enabled = password_config.get("enabled", True) + # The default value of password_config.enabled is True, unless msc3861 is enabled. + msc3861_enabled = ( + config.get("experimental_features", {}) + .get("msc3861", {}) + .get("enabled", False) + ) + passwords_enabled = password_config.get("enabled", not msc3861_enabled) + # 'only_for_reauth' allows users who have previously set a password to use it, # even though passwords would otherwise be disabled. passwords_for_reauth_only = passwords_enabled == "only_for_reauth" @@ -55,29 +60,3 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.ui_auth_session_timeout = self.parse_duration( ui_auth.get("session_timeout", 0) ) - - oauth_delegation = config.get("oauth_delegation", {}) - self.oauth_delegation_enabled = oauth_delegation.get("enabled", False) - self.oauth_delegation_issuer = oauth_delegation.get("issuer", "") - self.oauth_delegation_issuer_metadata = oauth_delegation.get("issuer_metadata") - self.oauth_delegation_account = oauth_delegation.get("account", "") - self.oauth_delegation_client_id = oauth_delegation.get("client_id", "") - self.oauth_delegation_client_secret = oauth_delegation.get("client_secret", "") - self.oauth_delegation_client_auth_method = oauth_delegation.get( - "client_auth_method", "client_secret_post" - ) - - self.password_enabled = password_config.get( - "enabled", not self.oauth_delegation_enabled - ) - - if self.oauth_delegation_client_auth_method == "private_key_jwt": - self.oauth_delegation_client_secret = JsonWebKey.import_key( - self.oauth_delegation_client_secret - ) - - # If we are delegating via OAuth then password cannot be supported as well - if self.oauth_delegation_enabled and self.password_enabled: - raise ConfigError( - "Password auth cannot be enabled when OAuth delegation is enabled" - ) diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index d769b7f6686e..b9607975f903 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -12,15 +12,196 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Optional +import enum +from typing import TYPE_CHECKING, Any, Optional import attr +import attr.validators from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions from synapse.config import ConfigError -from synapse.config._base import Config +from synapse.config._base import Config, RootConfig from synapse.types import JsonDict +# Determine whether authlib is installed. +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + +if TYPE_CHECKING: + # Only import this if we're type checking, as it might not be installed at runtime. + from authlib.jose.rfc7517 import JsonWebKey + + +class ClientAuthMethod(enum.Enum): + """List of supported client auth methods.""" + + CLIENT_SECRET_POST = "client_secret_post" + CLIENT_SECRET_BASIC = "client_secret_basic" + CLIENT_SECRET_JWT = "client_secret_jwt" + PRIVATE_KEY_JWT = "private_key_jwt" + + +def _parse_jwks(jwks: Optional[JsonDict]) -> Optional["JsonWebKey"]: + """A helper function to parse a JWK dict into a JsonWebKey.""" + + if jwks is None: + return None + + from authlib.jose.rfc7517 import JsonWebKey + + return JsonWebKey.import_key(jwks) + + +@attr.s(slots=True, frozen=True) +class MSC3861: + """Configuration for MSC3861: Matrix architecture change to delegate authentication via OIDC""" + + enabled: bool = attr.ib(default=False, validator=attr.validators.instance_of(bool)) + """Whether to enable MSC3861 auth delegation.""" + + @enabled.validator + def _check_enabled(self, attribute: attr.Attribute, value: bool) -> None: + # Only allow enabling MSC3861 if authlib is installed + if value and not HAS_AUTHLIB: + raise ConfigError( + "MSC3861 is enabled but authlib is not installed. " + "Please install authlib to use MSC3861." + ) + + issuer: str = attr.ib(default="", validator=attr.validators.instance_of(str)) + """The URL of the OIDC Provider.""" + + issuer_metadata: Optional[JsonDict] = attr.ib(default=None) + """The issuer metadata to use, otherwise discovered from /.well-known/openid-configuration as per MSC2965.""" + + client_id: str = attr.ib( + default="", + validator=attr.validators.instance_of(str), + ) + """The client ID to use when calling the introspection endpoint.""" + + client_auth_method: ClientAuthMethod = attr.ib( + default=ClientAuthMethod.CLIENT_SECRET_POST, converter=ClientAuthMethod + ) + """The auth method used when calling the introspection endpoint.""" + + client_secret: Optional[str] = attr.ib( + default=None, + validator=attr.validators.optional(attr.validators.instance_of(str)), + ) + """ + The client secret to use when calling the introspection endpoint, + when using any of the client_secret_* client auth methods. + """ + + jwk: Optional["JsonWebKey"] = attr.ib(default=None, converter=_parse_jwks) + """ + The JWKS to use when calling the introspection endpoint, + when using the private_key_jwt client auth method. + """ + + @client_auth_method.validator + def _check_client_auth_method( + self, attribute: attr.Attribute, value: ClientAuthMethod + ) -> None: + # Check that the right client credentials are provided for the client auth method. + if not self.enabled: + return + + if value == ClientAuthMethod.PRIVATE_KEY_JWT and self.jwk is None: + raise ConfigError( + "A JWKS must be provided when using the private_key_jwt client auth method" + ) + + if ( + value + in ( + ClientAuthMethod.CLIENT_SECRET_POST, + ClientAuthMethod.CLIENT_SECRET_BASIC, + ClientAuthMethod.CLIENT_SECRET_JWT, + ) + and self.client_secret is None + ): + raise ConfigError( + f"A client secret must be provided when using the {value} client auth method" + ) + + account_management_url: Optional[str] = attr.ib( + default=None, + validator=attr.validators.optional(attr.validators.instance_of(str)), + ) + """The URL of the My Account page on the OIDC Provider as per MSC2965.""" + + def check_config_conflicts(self, root: RootConfig) -> None: + """Checks for any configuration conflicts with other parts of Synapse. + + Raises: + ConfigError: If there are any configuration conflicts. + """ + + if not self.enabled: + return + + if ( + root.auth.password_enabled_for_reauth + or root.auth.password_enabled_for_login + ): + raise ConfigError( + "Password auth cannot be enabled when OAuth delegation is enabled" + ) + + if root.registration.enable_registration: + raise ConfigError( + "Registration cannot be enabled when OAuth delegation is enabled" + ) + + if ( + root.oidc.oidc_enabled + or root.saml2.saml2_enabled + or root.cas.cas_enabled + or root.jwt.jwt_enabled + ): + raise ConfigError("SSO cannot be enabled when OAuth delegation is enabled") + + if bool(root.authproviders.password_providers): + raise ConfigError( + "Password auth providers cannot be enabled when OAuth delegation is enabled" + ) + + if root.captcha.enable_registration_captcha: + raise ConfigError( + "CAPTCHA cannot be enabled when OAuth delegation is enabled" + ) + + if root.experimental.msc3882_enabled: + raise ConfigError( + "MSC3882 cannot be enabled when OAuth delegation is enabled" + ) + + if root.registration.refresh_token_lifetime: + raise ConfigError( + "refresh_token_lifetime cannot be set when OAuth delegation is enabled" + ) + + if root.registration.nonrefreshable_access_token_lifetime: + raise ConfigError( + "nonrefreshable_access_token_lifetime cannot be set when OAuth delegation is enabled" + ) + + if root.registration.session_lifetime: + raise ConfigError( + "session_lifetime cannot be set when OAuth delegation is enabled" + ) + + if not root.experimental.msc3970_enabled: + raise ConfigError( + "experimental_features.msc3970_enabled must be 'true' when OAuth delegation is enabled" + ) + @attr.s(auto_attribs=True, frozen=True, slots=True) class MSC3866Config: @@ -182,8 +363,14 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "msc3981_recurse_relations", False ) + # MSC3861: Matrix architecture change to delegate authentication via OIDC + self.msc3861 = MSC3861(**experimental.get("msc3861", {})) + # MSC3970: Scope transaction IDs to devices - self.msc3970_enabled = experimental.get("msc3970_enabled", False) + self.msc3970_enabled = experimental.get("msc3970_enabled", self.msc3861.enabled) + + # Check that none of the other config options conflict with MSC3861 when enabled + self.msc3861.check_config_conflicts(self.root) # MSC4009: E.164 Matrix IDs self.msc4009_e164_mxids = experimental.get("msc4009_e164_mxids", False) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index a53984be336f..4f986d90cbd9 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -274,7 +274,7 @@ def __init__(self, hs: "HomeServer"): # response. self._extra_attributes: Dict[str, SsoLoginExtraAttributes] = {} - self.oauth_delegation_enabled = hs.config.auth.oauth_delegation_enabled + self.msc3861_oauth_delegation_enabled = hs.config.experimental.msc3861.enabled async def validate_user_via_ui_auth( self, @@ -325,7 +325,7 @@ async def validate_user_via_ui_auth( LimitExceededError if the ratelimiter's failed request count for this user is too high to proceed """ - if self.oauth_delegation_enabled: + if self.msc3861_oauth_delegation_enabled: raise SynapseError( HTTPStatus.INTERNAL_SERVER_ERROR, "UIA shouldn't be used with MSC3861" ) diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 0e9f366cba62..134bd2e62021 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -38,6 +38,7 @@ from synapse.api import errors from synapse.api.errors import SynapseError +from synapse.config import ConfigError from synapse.events import EventBase from synapse.events.presence_router import ( GET_INTERESTED_USERS_CALLBACK, @@ -252,6 +253,7 @@ def __init__(self, hs: "HomeServer", auth_handler: AuthHandler) -> None: self._device_handler = hs.get_device_handler() self.custom_template_dir = hs.config.server.custom_template_directory self._callbacks = hs.get_module_api_callbacks() + self.msc3861_oauth_delegation_enabled = hs.config.experimental.msc3861.enabled try: app_name = self._hs.config.email.email_app_name @@ -419,6 +421,11 @@ def register_password_auth_provider_callbacks( Added in Synapse v1.46.0. """ + if self.msc3861_oauth_delegation_enabled: + raise ConfigError( + "Cannot use password auth provider callbacks when OAuth delegation is enabled" + ) + return self._password_auth_provider.register_password_auth_provider_callbacks( check_3pid_auth=check_3pid_auth, on_logged_out=on_logged_out, diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index ccd1f7509cde..679ab9f266c7 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -601,7 +601,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # ThreePidBindRestServelet.PostBody with an `alias_generator` to handle # `threePidCreds` versus `three_pid_creds`. async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: - if self.hs.config.auth.oauth_delegation_enabled: + if self.hs.config.experimental.msc3861.enabled: raise NotFoundError(errcode=Codes.UNRECOGNIZED) if not self.hs.config.registration.enable_3pid_changes: @@ -894,7 +894,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.worker.worker_app is None: - if not hs.config.auth.oauth_delegation_enabled: + if not hs.config.experimental.msc3861.enabled: EmailPasswordRequestTokenRestServlet(hs).register(http_server) DeactivateAccountRestServlet(hs).register(http_server) PasswordRestServlet(hs).register(http_server) @@ -906,7 +906,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.worker.worker_app is None: ThreepidBindRestServlet(hs).register(http_server) ThreepidUnbindRestServlet(hs).register(http_server) - if not hs.config.auth.oauth_delegation_enabled: + if not hs.config.experimental.msc3861.enabled: ThreepidAddRestServlet(hs).register(http_server) ThreepidDeleteRestServlet(hs).register(http_server) WhoamiRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index 00e9bff43f5c..38dff9703f96 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -135,7 +135,7 @@ def __init__(self, hs: "HomeServer"): self.device_handler = handler self.auth_handler = hs.get_auth_handler() self._msc3852_enabled = hs.config.experimental.msc3852_enabled - self.oauth_delegation_enabled = hs.config.auth.oauth_delegation_enabled + self._msc3861_oauth_delegation_enabled = hs.config.experimental.msc3861.enabled async def on_GET( self, request: SynapseRequest, device_id: str @@ -167,7 +167,7 @@ class DeleteBody(RequestBodyModel): async def on_DELETE( self, request: SynapseRequest, device_id: str ) -> Tuple[int, JsonDict]: - if self.oauth_delegation_enabled: + if self._msc3861_oauth_delegation_enabled: raise UnrecognizedRequestError(code=404) requester = await self.auth.get_user_by_req(request) @@ -350,7 +350,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if ( hs.config.worker.worker_app is None - and not hs.config.auth.oauth_delegation_enabled + and not hs.config.experimental.msc3861.enabled ): DeleteDevicesRestServlet(hs).register(http_server) DevicesRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index c3ca83c0c88e..70b8be1aa237 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -386,7 +386,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # time. Because there is no UIA in MSC3861, for now we throw an error if the # user tries to reset the device signing key when MSC3861 is enabled, but allow # first-time setup. - if self.hs.config.auth.oauth_delegation_enabled: + if self.hs.config.experimental.msc3861.enabled: # There is no way to reset the device signing key with MSC3861 if is_cross_signing_setup: raise SynapseError( diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 4d0eabcb840e..d4dc2462b9c0 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -633,7 +633,7 @@ async def on_GET(self, request: SynapseRequest) -> None: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.auth.oauth_delegation_enabled: + if hs.config.experimental.msc3861.enabled: return LoginRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/logout.py b/synapse/rest/client/logout.py index b64a6d5961b7..94ad90942f39 100644 --- a/synapse/rest/client/logout.py +++ b/synapse/rest/client/logout.py @@ -80,7 +80,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.auth.oauth_delegation_enabled: + if hs.config.experimental.msc3861.enabled: return LogoutRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 6866988c3809..f8fb0e1dee46 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -955,7 +955,7 @@ def _calculate_registration_flows( def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.auth.oauth_delegation_enabled: + if hs.config.experimental.msc3861.enabled: return if hs.config.worker.worker_app is None: diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py index dcfd0ad6aac6..57335fb913fc 100644 --- a/synapse/rest/synapse/client/__init__.py +++ b/synapse/rest/synapse/client/__init__.py @@ -47,7 +47,7 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc } # Expose the JWKS endpoint if OAuth2 delegation is enabled - if hs.config.auth.oauth_delegation_enabled: + if hs.config.experimental.msc3861.enabled: from synapse.rest.synapse.client.jwks import JwksResource resources["/_synapse/jwks"] = JwksResource(hs) diff --git a/synapse/rest/synapse/client/jwks.py b/synapse/rest/synapse/client/jwks.py index 818585843eaa..7c0a1223fb82 100644 --- a/synapse/rest/synapse/client/jwks.py +++ b/synapse/rest/synapse/client/jwks.py @@ -26,8 +26,6 @@ class JwksResource(DirectServeJsonResource): def __init__(self, hs: "HomeServer"): - from authlib.jose.rfc7517 import Key - super().__init__(extract_context=True) # Parameters that are allowed to be exposed in the public key. @@ -53,10 +51,10 @@ def __init__(self, hs: "HomeServer"): "ext", } - secret = hs.config.auth.oauth_delegation_client_secret + key = hs.config.experimental.msc3861.jwk - if isinstance(secret, Key): - private_key = secret.as_dict() + if key is not None: + private_key = key.as_dict() public_key = { k: v for k, v in private_key.items() if k in public_parameters } diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index fd3b17a5ad51..b8b4b5379b82 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -44,14 +44,15 @@ def get_well_known(self) -> Optional[JsonDict]: "base_url": self._config.registration.default_identity_server } - if self._config.auth.oauth_delegation_enabled: + # We use the MSC3861 values as they are used by multiple MSCs + if self._config.experimental.msc3861.enabled: result["org.matrix.msc2965.authentication"] = { - "issuer": self._config.auth.oauth_delegation_issuer + "issuer": self._config.experimental.msc3861.issuer } - if self._config.auth.oauth_delegation_account != "": + if self._config.experimental.msc3861.account_management_url is not None: result["org.matrix.msc2965.authentication"][ "account" - ] = self._config.auth.oauth_delegation_account + ] = self._config.experimental.msc3861.account_management_url if self._config.server.extra_well_known_client_content: for ( diff --git a/synapse/server.py b/synapse/server.py index 1c82500f3024..0f36ef69cb91 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -428,10 +428,10 @@ def get_replication_notifier(self) -> ReplicationNotifier: @cache_in_self def get_auth(self) -> Auth: - if self.config.auth.oauth_delegation_enabled: - from synapse.api.auth.oauth_delegated import OAuthDelegatedAuth + if self.config.experimental.msc3861.enabled: + from synapse.api.auth.msc3861_delegated import MSC3861DelegatedAuth - return OAuthDelegatedAuth(self) + return MSC3861DelegatedAuth(self) return InternalAuth(self) @cache_in_self diff --git a/tests/config/test_oauth_delegation.py b/tests/config/test_oauth_delegation.py new file mode 100644 index 000000000000..c5fc6d6ebb0b --- /dev/null +++ b/tests/config/test_oauth_delegation.py @@ -0,0 +1,202 @@ +# Copyright 2023 Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Dict +from unittest.mock import Mock + +from synapse.config import ConfigError +from synapse.module_api import ModuleApi +from synapse.types import JsonDict + +from tests.server import get_clock +from tests.unittest import HomeserverTestCase, override_config, skip_unless + +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + + +# These are a few constants that are used as config parameters in the tests. +SERVER_NAME = "test" +ISSUER = "https://issuer/" +CLIENT_ID = "test-client-id" +CLIENT_SECRET = "test-client-secret" +BASE_URL = "https://synapse/" + + +class CustomAuthModule: + """A module which registers a password auth provider.""" + + @staticmethod + def parse_config(config: JsonDict) -> None: + pass + + def __init__(self, config: None, api: ModuleApi): + api.register_password_auth_provider_callbacks( + auth_checkers={("m.login.password", ("password",)): Mock()}, + ) + + +@skip_unless(HAS_AUTHLIB, "requires authlib") +class MSC3861OAuthDelegation(HomeserverTestCase): + """Test that the Homeserver fails to initialize if the config is invalid.""" + + def setUp(self) -> None: + self.reactor, self.clock = get_clock() + self._hs_args = {"clock": self.clock, "reactor": self.reactor} + + def default_config(self) -> Dict[str, Any]: + config = super().default_config() + config["public_baseurl"] = BASE_URL + if "experimental_features" not in config: + config["experimental_features"] = {} + config["experimental_features"]["msc3861"] = { + "enabled": True, + "issuer": ISSUER, + "client_id": CLIENT_ID, + "client_auth_method": "client_secret_post", + "client_secret": CLIENT_SECRET, + } + return config + + def test_registration_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "password_config": { + "enabled": True, + }, + } + ) + def test_password_config_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "oidc_providers": [ + { + "idp_id": "microsoft", + "idp_name": "Microsoft", + "issuer": "https://login.microsoftonline.com//v2.0", + "client_id": "", + "client_secret": "", + "scopes": ["openid", "profile"], + "authorization_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/authorize", + "token_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/token", + "userinfo_endpoint": "https://graph.microsoft.com/oidc/userinfo", + } + ], + } + ) + def test_oidc_sso_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "cas_config": { + "enabled": True, + "server_url": "https://cas-server.com", + "displayname_attribute": "name", + "required_attributes": {"userGroup": "staff", "department": "None"}, + }, + } + ) + def test_cas_sso_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "modules": [ + { + "module": f"{__name__}.{CustomAuthModule.__qualname__}", + "config": {}, + } + ], + } + ) + def test_auth_providers_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "jwt_config": { + "enabled": True, + "secret": "my-secret-token", + "algorithm": "HS256", + }, + } + ) + def test_jwt_auth_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "experimental_features": { + "msc3882_enabled": True, + }, + } + ) + def test_msc3882_auth_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "recaptcha_public_key": "test", + "recaptcha_private_key": "test", + "enable_registration_captcha": True, + } + ) + def test_captcha_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "refresh_token_lifetime": "24h", + "refreshable_access_token_lifetime": "10m", + "nonrefreshable_access_token_lifetime": "24h", + } + ) + def test_refreshable_tokens_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "session_lifetime": "24h", + } + ) + def test_session_lifetime_cannot_be_set(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index ee1bc5ca7ac6..081fef51ecd8 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -109,12 +109,15 @@ class MSC3861OAuthDelegation(HomeserverTestCase): def default_config(self) -> Dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL - config["oauth_delegation"] = { - "enabled": True, - "issuer": ISSUER, - "client_id": CLIENT_ID, - "client_auth_method": "client_secret_post", - "client_secret": CLIENT_SECRET, + config["disable_registration"] = True + config["experimental_features"] = { + "msc3861": { + "enabled": True, + "issuer": ISSUER, + "client_id": CLIENT_ID, + "client_auth_method": "client_secret_post", + "client_secret": CLIENT_SECRET, + } } return config diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py index 34333d88df25..377243a1706d 100644 --- a/tests/rest/test_well_known.py +++ b/tests/rest/test_well_known.py @@ -108,14 +108,17 @@ def test_server_well_known_disabled(self) -> None: @unittest.override_config( { "public_baseurl": "https://homeserver", # this is only required so that client well known is served - "oauth_delegation": { - "enabled": True, - "issuer": "https://issuer", - "account": "https://my-account.issuer", - "client_id": "id", - "client_auth_method": "client_secret_post", - "client_secret": "secret", + "experimental_features": { + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "account_management_url": "https://my-account.issuer", + "client_id": "id", + "client_auth_method": "client_secret_post", + "client_secret": "secret", + }, }, + "disable_registration": True, } ) def test_client_well_known_msc3861_oauth_delegation(self) -> None: From bad1f2cd3558d908b579b6c191bcd7bebecd32be Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Tue, 7 Feb 2023 12:55:54 +0000 Subject: [PATCH 016/153] Tests for JWKS endpoint --- tests/config/test_oauth_delegation.py | 117 +++++++++++++++++++++++--- tests/rest/admin/test_jwks.py | 106 +++++++++++++++++++++++ 2 files changed, 212 insertions(+), 11 deletions(-) create mode 100644 tests/rest/admin/test_jwks.py diff --git a/tests/config/test_oauth_delegation.py b/tests/config/test_oauth_delegation.py index c5fc6d6ebb0b..6d294e0144f6 100644 --- a/tests/config/test_oauth_delegation.py +++ b/tests/config/test_oauth_delegation.py @@ -51,6 +51,34 @@ def __init__(self, config: None, api: ModuleApi): ) +def _dict_merge(merge_dict: dict, into_dict: dict) -> None: + """Do a deep merge of two dicts + + Recursively merges `merge_dict` into `into_dict`: + * For keys where both `merge_dict` and `into_dict` have a dict value, the values + are recursively merged + * For all other keys, the values in `into_dict` (if any) are overwritten with + the value from `merge_dict`. + + Args: + merge_dict: dict to merge + into_dict: target dict to be modified + """ + for k, v in merge_dict.items(): + if k not in into_dict: + into_dict[k] = v + continue + + current_val = into_dict[k] + + if isinstance(v, dict) and isinstance(current_val, dict): + _dict_merge(v, current_val) + continue + + # otherwise we just overwrite + into_dict[k] = v + + @skip_unless(HAS_AUTHLIB, "requires authlib") class MSC3861OAuthDelegation(HomeserverTestCase): """Test that the Homeserver fails to initialize if the config is invalid.""" @@ -60,18 +88,85 @@ def setUp(self) -> None: self._hs_args = {"clock": self.clock, "reactor": self.reactor} def default_config(self) -> Dict[str, Any]: - config = super().default_config() - config["public_baseurl"] = BASE_URL - if "experimental_features" not in config: - config["experimental_features"] = {} - config["experimental_features"]["msc3861"] = { - "enabled": True, - "issuer": ISSUER, - "client_id": CLIENT_ID, - "client_auth_method": "client_secret_post", - "client_secret": CLIENT_SECRET, + default_extra_config = { + "public_baseurl": BASE_URL, + "experimental_features": { + "msc3861": { + "enabled": True, + "issuer": ISSUER, + "client_id": CLIENT_ID, + "client_auth_method": "client_secret_post", + "client_secret": CLIENT_SECRET, + } + }, + } + _dict_merge( + {} if self._extra_config is None else self._extra_config, + default_extra_config, + ) + self._extra_config = default_extra_config + return super().default_config() + + @override_config( + { + "enable_registration": False, } - return config + ) + def test_client_secret_post_works(self) -> None: + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "experimental_features": { + "msc3861": { + "client_auth_method": "invalid", + } + }, + } + ) + def test_invalid_client_auth_method(self) -> None: + with self.assertRaises(ValueError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "experimental_features": { + "msc3861": { + "client_auth_method": "private_key_jwt", + } + }, + } + ) + def test_invalid_private_key_jwt(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "experimental_features": { + "msc3861": { + "client_auth_method": "private_key_jwt", + "jwk": { + "p": "-frVdP_tZ-J_nIR6HNMDq1N7aunwm51nAqNnhqIyuA8ikx7LlQED1tt2LD3YEvYyW8nxE2V95HlCRZXQPMiRJBFOsbmYkzl2t-MpavTaObB_fct_JqcRtdXddg4-_ihdjRDwUOreq_dpWh6MIKsC3UyekfkHmeEJg5YpOTL15j8", + "kty": "RSA", + "q": "oFw-Enr_YozQB1ab-kawn4jY3yHi8B1nSmYT0s8oTCflrmps5BFJfCkHL5ij3iY15z0o2m0N-jjB1oSJ98O4RayEEYNQlHnTNTl0kRIWzpoqblHUIxVcahIpP_xTovBJzwi8XXoLGqHOOMA-r40LSyVgP2Ut8D9qBwV6_UfT0LU", + "d": "WFkDPYo4b4LIS64D_QtQfGGuAObPvc3HFfp9VZXyq3SJR58XZRHE0jqtlEMNHhOTgbMYS3w8nxPQ_qVzY-5hs4fIanwvB64mAoOGl0qMHO65DTD_WsGFwzYClJPBVniavkLE2Hmpu8IGe6lGliN8vREC6_4t69liY-XcN_ECboVtC2behKkLOEASOIMuS7YcKAhTJFJwkl1dqDlliEn5A4u4xy7nuWQz3juB1OFdKlwGA5dfhDNglhoLIwNnkLsUPPFO-WB5ZNEW35xxHOToxj4bShvDuanVA6mJPtTKjz0XibjB36bj_nF_j7EtbE2PdGJ2KevAVgElR4lqS4ISgQ", + "e": "AQAB", + "kid": "test", + "qi": "cPfNk8l8W5exVNNea4d7QZZ8Qr8LgHghypYAxz8PQh1fNa8Ya1SNUDVzC2iHHhszxxA0vB9C7jGze8dBrvnzWYF1XvQcqNIVVgHhD57R1Nm3dj2NoHIKe0Cu4bCUtP8xnZQUN4KX7y4IIcgRcBWG1hT6DEYZ4BxqicnBXXNXAUI", + "dp": "dKlMHvslV1sMBQaKWpNb3gPq0B13TZhqr3-E2_8sPlvJ3fD8P4CmwwnOn50JDuhY3h9jY5L06sBwXjspYISVv8hX-ndMLkEeF3lrJeA5S70D8rgakfZcPIkffm3tlf1Ok3v5OzoxSv3-67Df4osMniyYwDUBCB5Oq1tTx77xpU8", + "dq": "S4ooU1xNYYcjl9FcuJEEMqKsRrAXzzSKq6laPTwIp5dDwt2vXeAm1a4eDHXC-6rUSZGt5PbqVqzV4s-cjnJMI8YYkIdjNg4NSE1Ac_YpeDl3M3Colb5CQlU7yUB7xY2bt0NOOFp9UJZYJrOo09mFMGjy5eorsbitoZEbVqS3SuE", + "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", + }, + } + }, + } + ) + def test_private_key_jwt_works(self) -> None: + self.setup_test_homeserver() def test_registration_cannot_be_enabled(self) -> None: with self.assertRaises(ConfigError): diff --git a/tests/rest/admin/test_jwks.py b/tests/rest/admin/test_jwks.py new file mode 100644 index 000000000000..a9a6191c7346 --- /dev/null +++ b/tests/rest/admin/test_jwks.py @@ -0,0 +1,106 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict + +from twisted.web.resource import Resource + +from synapse.rest.synapse.client import build_synapse_client_resource_tree + +from tests.unittest import HomeserverTestCase, override_config, skip_unless + +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + + +@skip_unless(HAS_AUTHLIB, "requires authlib") +class JWKSTestCase(HomeserverTestCase): + """Test /_synapse/jwks JWKS data.""" + + def create_resource_dict(self) -> Dict[str, Resource]: + d = super().create_resource_dict() + d.update(build_synapse_client_resource_tree(self.hs)) + return d + + def test_empty_jwks(self) -> None: + """Test that the JWKS endpoint is not present by default.""" + channel = self.make_request("GET", "/_synapse/jwks") + self.assertEqual(404, channel.code, channel.result) + + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc3861": { + "enabled": True, + "issuer": "https://issuer/", + "client_id": "test-client-id", + "client_auth_method": "client_secret_post", + "client_secret": "secret", + }, + }, + } + ) + def test_empty_jwks_for_msc3861_client_secret_post(self) -> None: + """Test that the JWKS endpoint is empty when plain auth is used.""" + channel = self.make_request("GET", "/_synapse/jwks") + self.assertEqual(200, channel.code, channel.result) + self.assertEqual({"keys": []}, channel.json_body) + + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc3861": { + "enabled": True, + "issuer": "https://issuer/", + "client_id": "test-client-id", + "client_auth_method": "private_key_jwt", + "jwk": { + "p": "-frVdP_tZ-J_nIR6HNMDq1N7aunwm51nAqNnhqIyuA8ikx7LlQED1tt2LD3YEvYyW8nxE2V95HlCRZXQPMiRJBFOsbmYkzl2t-MpavTaObB_fct_JqcRtdXddg4-_ihdjRDwUOreq_dpWh6MIKsC3UyekfkHmeEJg5YpOTL15j8", + "kty": "RSA", + "q": "oFw-Enr_YozQB1ab-kawn4jY3yHi8B1nSmYT0s8oTCflrmps5BFJfCkHL5ij3iY15z0o2m0N-jjB1oSJ98O4RayEEYNQlHnTNTl0kRIWzpoqblHUIxVcahIpP_xTovBJzwi8XXoLGqHOOMA-r40LSyVgP2Ut8D9qBwV6_UfT0LU", + "d": "WFkDPYo4b4LIS64D_QtQfGGuAObPvc3HFfp9VZXyq3SJR58XZRHE0jqtlEMNHhOTgbMYS3w8nxPQ_qVzY-5hs4fIanwvB64mAoOGl0qMHO65DTD_WsGFwzYClJPBVniavkLE2Hmpu8IGe6lGliN8vREC6_4t69liY-XcN_ECboVtC2behKkLOEASOIMuS7YcKAhTJFJwkl1dqDlliEn5A4u4xy7nuWQz3juB1OFdKlwGA5dfhDNglhoLIwNnkLsUPPFO-WB5ZNEW35xxHOToxj4bShvDuanVA6mJPtTKjz0XibjB36bj_nF_j7EtbE2PdGJ2KevAVgElR4lqS4ISgQ", + "e": "AQAB", + "kid": "test", + "qi": "cPfNk8l8W5exVNNea4d7QZZ8Qr8LgHghypYAxz8PQh1fNa8Ya1SNUDVzC2iHHhszxxA0vB9C7jGze8dBrvnzWYF1XvQcqNIVVgHhD57R1Nm3dj2NoHIKe0Cu4bCUtP8xnZQUN4KX7y4IIcgRcBWG1hT6DEYZ4BxqicnBXXNXAUI", + "dp": "dKlMHvslV1sMBQaKWpNb3gPq0B13TZhqr3-E2_8sPlvJ3fD8P4CmwwnOn50JDuhY3h9jY5L06sBwXjspYISVv8hX-ndMLkEeF3lrJeA5S70D8rgakfZcPIkffm3tlf1Ok3v5OzoxSv3-67Df4osMniyYwDUBCB5Oq1tTx77xpU8", + "dq": "S4ooU1xNYYcjl9FcuJEEMqKsRrAXzzSKq6laPTwIp5dDwt2vXeAm1a4eDHXC-6rUSZGt5PbqVqzV4s-cjnJMI8YYkIdjNg4NSE1Ac_YpeDl3M3Colb5CQlU7yUB7xY2bt0NOOFp9UJZYJrOo09mFMGjy5eorsbitoZEbVqS3SuE", + "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", + }, + }, + }, + } + ) + def test_key_returned_for_msc3861_client_secret_post(self) -> None: + """Test that the JWKS includes public part of JWK for private_key_jwt auth is used.""" + channel = self.make_request("GET", "/_synapse/jwks") + self.assertEqual(200, channel.code, channel.result) + self.assertEqual( + { + "keys": [ + { + "kty": "RSA", + "e": "AQAB", + "kid": "test", + "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", + } + ] + }, + channel.json_body, + ) From c008b44b4f7bb3604be77709c62e6ec78389f8ed Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 4 Apr 2023 18:11:17 +0200 Subject: [PATCH 017/153] Add an admin token for MAS -> Synapse calls --- synapse/api/auth/msc3861_delegated.py | 15 +++++++++++++++ synapse/config/experimental.py | 9 +++++++++ 2 files changed, 24 insertions(+) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 4ca3280bd3c4..a84b7730b340 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -90,6 +90,7 @@ def __init__(self, hs: "HomeServer"): self._http_client = hs.get_proxied_http_client() self._hostname = hs.hostname + self._admin_token = self._config.admin_token self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata) @@ -176,6 +177,20 @@ async def get_user_by_access_token( token: str, allow_expired: bool = False, ) -> Requester: + if self._admin_token is not None and token == self._admin_token: + # XXX: This is a temporary solution so that the admin API can be called by + # the OIDC provider. This will be removed once we have OIDC client + # credentials grant support in matrix-authentication-service. + logging.info("Admin toked used") + # XXX: that user doesn't exist and won't be provisioned. + # This is mostly fine for admin calls, but we should also think about doing + # requesters without a user_id. + admin_user = UserID("__oidc_admin", self._hostname) + return create_requester( + user_id=admin_user, + scope=["urn:synapse:admin:*"], + ) + introspection_result = await self._introspect_token(token) logger.info(f"Introspection result: {introspection_result!r}") diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index b9607975f903..d4dff22b0bae 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -136,6 +136,15 @@ def _check_client_auth_method( ) """The URL of the My Account page on the OIDC Provider as per MSC2965.""" + admin_token: Optional[str] = attr.ib( + default=None, + validator=attr.validators.optional(attr.validators.instance_of(str)), + ) + """ + A token that should be considered as an admin token. + This is used by the OIDC provider, to make admin calls to Synapse. + """ + def check_config_conflicts(self, root: RootConfig) -> None: """Checks for any configuration conflicts with other parts of Synapse. From 4d0231b3648d5d70a8e0f4d99a0c040f12f15669 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 16 May 2023 10:52:37 +0200 Subject: [PATCH 018/153] Make AS tokens work & allow ASes to /register --- synapse/api/auth/base.py | 80 +++++++++++++++++++++++- synapse/api/auth/internal.py | 82 +------------------------ synapse/api/auth/msc3861_delegated.py | 9 ++- synapse/rest/client/register.py | 69 +++++++++++++++++++++ tests/handlers/test_oauth_delegation.py | 4 +- 5 files changed, 159 insertions(+), 85 deletions(-) diff --git a/synapse/api/auth/base.py b/synapse/api/auth/base.py index 240f2b90dee2..9321d6f18637 100644 --- a/synapse/api/auth/base.py +++ b/synapse/api/auth/base.py @@ -14,6 +14,8 @@ import logging from typing import TYPE_CHECKING, Optional, Tuple +from netaddr import IPAddress + from twisted.web.server import Request from synapse import event_auth @@ -26,7 +28,8 @@ ) from synapse.appservice import ApplicationService from synapse.logging.opentracing import trace -from synapse.types import Requester +from synapse.types import Requester, create_requester +from synapse.util.cancellation import cancellable if TYPE_CHECKING: from synapse.server import HomeServer @@ -271,3 +274,78 @@ def get_access_token_from_request(request: Request) -> str: raise MissingClientTokenError() return query_params[0].decode("ascii") + + @cancellable + async def get_appservice_user( + self, request: Request, access_token: str + ) -> Optional[Requester]: + """ + Given a request, reads the request parameters to determine: + - whether it's an application service that's making this request + - what user the application service should be treated as controlling + (the user_id URI parameter allows an application service to masquerade + any applicable user in its namespace) + - what device the application service should be treated as controlling + (the device_id[^1] URI parameter allows an application service to masquerade + as any device that exists for the relevant user) + + [^1] Unstable and provided by MSC3202. + Must use `org.matrix.msc3202.device_id` in place of `device_id` for now. + + Returns: + the application service `Requester` of that request + + Postconditions: + - The `app_service` field in the returned `Requester` is set + - The `user_id` field in the returned `Requester` is either the application + service sender or the controlled user set by the `user_id` URI parameter + - The returned application service is permitted to control the returned user ID. + - The returned device ID, if present, has been checked to be a valid device ID + for the returned user ID. + """ + DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id" + + app_service = self.store.get_app_service_by_token(access_token) + if app_service is None: + return None + + if app_service.ip_range_whitelist: + ip_address = IPAddress(request.getClientAddress().host) + if ip_address not in app_service.ip_range_whitelist: + return None + + # This will always be set by the time Twisted calls us. + assert request.args is not None + + if b"user_id" in request.args: + effective_user_id = request.args[b"user_id"][0].decode("utf8") + await self.validate_appservice_can_control_user_id( + app_service, effective_user_id + ) + else: + effective_user_id = app_service.sender + + effective_device_id: Optional[str] = None + + if ( + self.hs.config.experimental.msc3202_device_masquerading_enabled + and DEVICE_ID_ARG_NAME in request.args + ): + effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8") + # We only just set this so it can't be None! + assert effective_device_id is not None + device_opt = await self.store.get_device( + effective_user_id, effective_device_id + ) + if device_opt is None: + # For now, use 400 M_EXCLUSIVE if the device doesn't exist. + # This is an open thread of discussion on MSC3202 as of 2021-12-09. + raise AuthError( + 400, + f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})", + Codes.EXCLUSIVE, + ) + + return create_requester( + effective_user_id, app_service=app_service, device_id=effective_device_id + ) diff --git a/synapse/api/auth/internal.py b/synapse/api/auth/internal.py index 813d537e537c..e2ae198b196e 100644 --- a/synapse/api/auth/internal.py +++ b/synapse/api/auth/internal.py @@ -12,12 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import pymacaroons -from netaddr import IPAddress - -from twisted.web.server import Request from synapse.api.errors import ( AuthError, @@ -122,7 +119,7 @@ async def _wrapped_get_user_by_req( access_token = self.get_access_token_from_request(request) # First check if it could be a request from an appservice - requester = await self._get_appservice_user(request) + requester = await self.get_appservice_user(request, access_token) if not requester: # If not, it should be from a regular user requester = await self.get_user_by_access_token( @@ -189,81 +186,6 @@ async def _wrapped_get_user_by_req( except KeyError: raise MissingClientTokenError() - @cancellable - async def _get_appservice_user(self, request: Request) -> Optional[Requester]: - """ - Given a request, reads the request parameters to determine: - - whether it's an application service that's making this request - - what user the application service should be treated as controlling - (the user_id URI parameter allows an application service to masquerade - any applicable user in its namespace) - - what device the application service should be treated as controlling - (the device_id[^1] URI parameter allows an application service to masquerade - as any device that exists for the relevant user) - - [^1] Unstable and provided by MSC3202. - Must use `org.matrix.msc3202.device_id` in place of `device_id` for now. - - Returns: - the application service `Requester` of that request - - Postconditions: - - The `app_service` field in the returned `Requester` is set - - The `user_id` field in the returned `Requester` is either the application - service sender or the controlled user set by the `user_id` URI parameter - - The returned application service is permitted to control the returned user ID. - - The returned device ID, if present, has been checked to be a valid device ID - for the returned user ID. - """ - DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id" - - app_service = self.store.get_app_service_by_token( - self.get_access_token_from_request(request) - ) - if app_service is None: - return None - - if app_service.ip_range_whitelist: - ip_address = IPAddress(request.getClientAddress().host) - if ip_address not in app_service.ip_range_whitelist: - return None - - # This will always be set by the time Twisted calls us. - assert request.args is not None - - if b"user_id" in request.args: - effective_user_id = request.args[b"user_id"][0].decode("utf8") - await self.validate_appservice_can_control_user_id( - app_service, effective_user_id - ) - else: - effective_user_id = app_service.sender - - effective_device_id: Optional[str] = None - - if ( - self.hs.config.experimental.msc3202_device_masquerading_enabled - and DEVICE_ID_ARG_NAME in request.args - ): - effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8") - # We only just set this so it can't be None! - assert effective_device_id is not None - device_opt = await self.store.get_device( - effective_user_id, effective_device_id - ) - if device_opt is None: - # For now, use 400 M_EXCLUSIVE if the device doesn't exist. - # This is an open thread of discussion on MSC3202 as of 2021-12-09. - raise AuthError( - 400, - f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})", - Codes.EXCLUSIVE, - ) - - return create_requester( - effective_user_id, app_service=app_service, device_id=effective_device_id - ) - async def get_user_by_access_token( self, token: str, diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index a84b7730b340..b84dce2563a7 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -162,14 +162,19 @@ async def get_user_by_req( ) -> Requester: access_token = self.get_access_token_from_request(request) - # TODO: we probably want to assert the allow_guest inside this call so that we don't provision the user if they don't have enough permission: - requester = await self.get_user_by_access_token(access_token, allow_expired) + requester = await self.get_appservice_user(request, access_token) + if not requester: + # TODO: we probably want to assert the allow_guest inside this call + # so that we don't provision the user if they don't have enough permission: + requester = await self.get_user_by_access_token(access_token, allow_expired) if not allow_guest and requester.is_guest: raise OAuthInsufficientScopeError( ["urn:matrix:org.matrix.msc2967.client:api:*"] ) + request.requester = requester + return requester async def get_user_by_access_token( diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index f8fb0e1dee46..d59669f0b6da 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -869,6 +869,74 @@ async def _do_guest_registration( return 200, result +class RegisterAppServiceOnlyRestServlet(RestServlet): + """An alternative registration API endpoint that only allows ASes to register + + This replaces the regular /register endpoint if MSC3861. There are two notable + differences with the regular /register endpoint: + - It only allows the `m.login.application_service` login type + - It does not create a device or access token for the just-registered user + + Note that the exact behaviour of this endpoint is not yet finalised. It should be + just good enough to make most ASes work. + """ + + PATTERNS = client_patterns("/register$") + CATEGORY = "Registration/login requests" + + def __init__(self, hs: "HomeServer"): + super().__init__() + + self.auth = hs.get_auth() + self.registration_handler = hs.get_registration_handler() + self.ratelimiter = hs.get_registration_ratelimiter() + + @interactive_auth_handler + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + body = parse_json_object_from_request(request) + + client_addr = request.getClientAddress().host + + await self.ratelimiter.ratelimit(None, client_addr, update=False) + + kind = parse_string(request, "kind", default="user") + + if kind == "guest": + raise SynapseError(403, "Guest access is disabled") + elif kind != "user": + raise UnrecognizedRequestError( + f"Do not understand membership kind: {kind}", + ) + + # Pull out the provided username and do basic sanity checks early since + # the auth layer will store these in sessions. + desired_username = body.get("username") + if not isinstance(desired_username, str) or len(desired_username) > 512: + raise SynapseError(400, "Invalid username") + + # Allow only ASes to use this API. + if body.get("type") != APP_SERVICE_REGISTRATION_TYPE: + raise SynapseError(403, "Non-application service registration type") + + if not self.auth.has_access_token(request): + raise SynapseError( + 400, + "Appservice token must be provided when using a type of m.login.application_service", + ) + + # XXX we should check that desired_username is valid. Currently + # we give appservices carte blanche for any insanity in mxids, + # because the IRC bridges rely on being able to register stupid + # IDs. + + as_token = self.auth.get_access_token_from_request(request) + + user_id = await self.registration_handler.appservice_register( + desired_username, as_token + ) + return 200, {"user_id": user_id} + + def _calculate_registration_flows( config: HomeServerConfig, auth_handler: AuthHandler ) -> List[List[str]]: @@ -956,6 +1024,7 @@ def _calculate_registration_flows( def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.experimental.msc3861.enabled: + RegisterAppServiceOnlyRestServlet(hs).register(http_server) return if hs.config.worker.worker_app is None: diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 081fef51ecd8..e53020a58a2a 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -527,8 +527,8 @@ def test_registration_endpoints_removed(self) -> None: self.expect_unrecognized( "GET", "/_matrix/client/v1/register/m.login.registration_token/validity" ) - self.expect_unrecognized("POST", "/_matrix/client/v3/register") - self.expect_unrecognized("GET", "/_matrix/client/v3/register") + # This is still available for AS registrations + # self.expect_unrecognized("POST", "/_matrix/client/v3/register") self.expect_unrecognized("GET", "/_matrix/client/v3/register/available") self.expect_unrecognized( "POST", "/_matrix/client/v3/register/email/requestToken" From e343125b3880bfc55223735a784eb1894db5e9be Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Wed, 10 May 2023 18:05:06 +0200 Subject: [PATCH 019/153] Disable incompatible Admin API endpoints --- synapse/rest/admin/__init__.py | 21 +++++++++++++-------- synapse/rest/admin/users.py | 8 ++++++++ tests/handlers/test_oauth_delegation.py | 19 +++++++++++++++++++ 3 files changed, 40 insertions(+), 8 deletions(-) diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index c729364839c0..fe8177ed4dcc 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -257,9 +257,11 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: DeleteRoomStatusByRoomIdRestServlet(hs).register(http_server) JoinRoomAliasServlet(hs).register(http_server) VersionServlet(hs).register(http_server) - UserAdminServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + UserAdminServlet(hs).register(http_server) UserMembershipRestServlet(hs).register(http_server) - UserTokenRestServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + UserTokenRestServlet(hs).register(http_server) UserRestServletV2(hs).register(http_server) UsersRestServletV2(hs).register(http_server) UserMediaStatisticsRestServlet(hs).register(http_server) @@ -274,9 +276,10 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: RoomEventContextServlet(hs).register(http_server) RateLimitRestServlet(hs).register(http_server) UsernameAvailableRestServlet(hs).register(http_server) - ListRegistrationTokensRestServlet(hs).register(http_server) - NewRegistrationTokenRestServlet(hs).register(http_server) - RegistrationTokenRestServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + ListRegistrationTokensRestServlet(hs).register(http_server) + NewRegistrationTokenRestServlet(hs).register(http_server) + RegistrationTokenRestServlet(hs).register(http_server) DestinationMembershipRestServlet(hs).register(http_server) DestinationResetConnectionRestServlet(hs).register(http_server) DestinationRestServlet(hs).register(http_server) @@ -306,10 +309,12 @@ def register_servlets_for_client_rest_resource( # The following resources can only be run on the main process. if hs.config.worker.worker_app is None: DeactivateAccountRestServlet(hs).register(http_server) - ResetPasswordRestServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + ResetPasswordRestServlet(hs).register(http_server) SearchUsersRestServlet(hs).register(http_server) - UserRegisterServlet(hs).register(http_server) - AccountValidityRenewServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + UserRegisterServlet(hs).register(http_server) + AccountValidityRenewServlet(hs).register(http_server) # Load the media repo ones if we're using them. Otherwise load the servlets which # don't need a media repo (typically readonly admin APIs). diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 932333ae5715..407fe9c8043a 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -71,6 +71,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.admin_handler = hs.get_admin_handler() self._msc3866_enabled = hs.config.experimental.msc3866.enabled + self._msc3861_enabled = hs.config.experimental.msc3861.enabled async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) @@ -94,7 +95,14 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: user_id = parse_string(request, "user_id") name = parse_string(request, "name") + guests = parse_boolean(request, "guests", default=True) + if self._msc3861_enabled and guests: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "The guests parameter is not supported when MSC3861 is enabled.", + errcode=Codes.INVALID_PARAM, + ) deactivated = parse_boolean(request, "deactivated", default=False) # If support for MSC3866 is not enabled, apply no filtering based on the diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index e53020a58a2a..b79c43a42472 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -31,6 +31,7 @@ InvalidClientTokenError, OAuthInsufficientScopeError, ) +from synapse.rest import admin from synapse.rest.client import account, devices, keys, login, logout, register from synapse.server import HomeServer from synapse.types import JsonDict @@ -104,6 +105,7 @@ class MSC3861OAuthDelegation(HomeserverTestCase): register.register_servlets, login.register_servlets, logout.register_servlets, + admin.register_servlets, ] def default_config(self) -> Dict[str, Any]: @@ -557,3 +559,20 @@ def test_openid_endpoints_removed(self) -> None: self.expect_unrecognized( "POST", "/_matrix/client/v3/user/{USERNAME}/openid/request_token" ) + + def test_admin_api_endpoints_removed(self) -> None: + """Test that admin API endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized("GET", "/_synapse/admin/v1/registration_tokens") + self.expect_unrecognized("POST", "/_synapse/admin/v1/registration_tokens/new") + self.expect_unrecognized("GET", "/_synapse/admin/v1/registration_tokens/abcd") + self.expect_unrecognized("PUT", "/_synapse/admin/v1/registration_tokens/abcd") + self.expect_unrecognized( + "DELETE", "/_synapse/admin/v1/registration_tokens/abcd" + ) + self.expect_unrecognized("POST", "/_synapse/admin/v1/reset_password/foo") + self.expect_unrecognized("POST", "/_synapse/admin/v1/users/foo/login") + self.expect_unrecognized("GET", "/_synapse/admin/v1/register") + self.expect_unrecognized("POST", "/_synapse/admin/v1/register") + self.expect_unrecognized("GET", "/_synapse/admin/v1/users/foo/admin") + self.expect_unrecognized("PUT", "/_synapse/admin/v1/users/foo/admin") + self.expect_unrecognized("POST", "/_synapse/admin/v1/account_validity/validity") From ec9379d7e298c24f3530cf48ee34c30aa038feb2 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 12 May 2023 15:22:46 +0200 Subject: [PATCH 020/153] Newsfile. --- changelog.d/15582.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/15582.feature diff --git a/changelog.d/15582.feature b/changelog.d/15582.feature new file mode 100644 index 000000000000..00959500a54e --- /dev/null +++ b/changelog.d/15582.feature @@ -0,0 +1 @@ +Experimental [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) support: delegate auth to an OIDC provider. From 14a5be9c4d69b5669792f2cdc658c266847a8c4a Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Mon, 22 May 2023 15:48:57 +0200 Subject: [PATCH 021/153] Handle errors when introspecting tokens This returns a proper 503 when the introspection endpoint is not working for some reason, which should avoid logging out clients in those cases. --- synapse/api/auth/msc3861_delegated.py | 42 ++++++++++++++++++++++--- tests/handlers/test_oauth_delegation.py | 35 +++++++++++++++++++++ tests/test_utils/__init__.py | 4 +-- 3 files changed, 74 insertions(+), 7 deletions(-) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index b84dce2563a7..82c66691dae1 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -27,9 +27,11 @@ from synapse.api.auth.base import BaseAuth from synapse.api.errors import ( AuthError, + HttpResponseException, InvalidClientTokenError, OAuthInsufficientScopeError, StoreError, + SynapseError, ) from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable @@ -117,6 +119,21 @@ async def _load_metadata(self) -> OpenIDProviderMetadata: return metadata async def _introspect_token(self, token: str) -> IntrospectionToken: + """ + Send a token to the introspection endpoint and returns the introspection response + + Parameters: + token: The token to introspect + + Raises: + HttpResponseException: If the introspection endpoint returns a non-2xx response + ValueError: If the introspection endpoint returns an invalid JSON response + JSONDecodeError: If the introspection endpoint returns a non-JSON response + Exception: If the HTTP request fails + + Returns: + The introspection response + """ metadata = await self._issuer_metadata.get() introspection_endpoint = metadata.get("introspection_endpoint") raw_headers: Dict[str, str] = { @@ -136,7 +153,7 @@ async def _introspect_token(self, token: str) -> IntrospectionToken: # Do the actual request # We're not using the SimpleHttpClient util methods as we don't want to - # check the HTTP status code and we do the body encoding ourself. + # check the HTTP status code, and we do the body encoding ourselves. response = await self._http_client.request( method="POST", uri=uri, @@ -145,10 +162,21 @@ async def _introspect_token(self, token: str) -> IntrospectionToken: ) resp_body = await make_deferred_yieldable(readBody(response)) - # TODO: Let's not worry about 5xx errors & co. for now and just try - # decoding that as JSON. We should also do some validation of the - # response + + if response.code < 200 or response.code >= 300: + raise HttpResponseException( + response.code, + response.phrase.decode("ascii", errors="replace"), + resp_body, + ) + resp = json_decoder.decode(resp_body.decode("utf-8")) + + if not isinstance(resp, dict): + raise ValueError( + "The introspection endpoint returned an invalid JSON response." + ) + return IntrospectionToken(**resp) async def is_server_admin(self, requester: Requester) -> bool: @@ -196,7 +224,11 @@ async def get_user_by_access_token( scope=["urn:synapse:admin:*"], ) - introspection_result = await self._introspect_token(token) + try: + introspection_result = await self._introspect_token(token) + except Exception: + logger.exception("Failed to introspect token") + raise SynapseError(503, "Unable to introspect the access token") logger.info(f"Introspection result: {introspection_result!r}") diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index b79c43a42472..16ce2c069d60 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -30,6 +30,7 @@ Codes, InvalidClientTokenError, OAuthInsufficientScopeError, + SynapseError, ) from synapse.rest import admin from synapse.rest.client import account, devices, keys, login, logout, register @@ -405,6 +406,40 @@ def test_active_guest_allowed(self) -> None: ) self.assertEqual(requester.device_id, DEVICE) + def test_unavailable_introspection_endpoint(self) -> None: + """The handler should return an internal server error.""" + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + + # The introspection endpoint is returning an error. + self.http_client.request = simple_async_mock( + return_value=FakeResponse(code=500, body=b"Internal Server Error") + ) + error = self.get_failure(self.auth.get_user_by_req(request), SynapseError) + self.assertEqual(error.value.code, 503) + + # The introspection endpoint request fails. + self.http_client.request = simple_async_mock(raises=Exception()) + error = self.get_failure(self.auth.get_user_by_req(request), SynapseError) + self.assertEqual(error.value.code, 503) + + # The introspection endpoint does not return a JSON object. + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, payload=["this is an array", "not an object"] + ) + ) + error = self.get_failure(self.auth.get_user_by_req(request), SynapseError) + self.assertEqual(error.value.code, 503) + + # The introspection endpoint does not return valid JSON. + self.http_client.request = simple_async_mock( + return_value=FakeResponse(code=200, body=b"this is not valid JSON") + ) + error = self.get_failure(self.auth.get_user_by_req(request), SynapseError) + self.assertEqual(error.value.code, 503) + def make_device_keys(self, user_id: str, device_id: str) -> JsonDict: # We only generate a master key to simplify the test. master_signing_key = generate_signing_key(device_id) diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py index e5dae670a70e..c8cc841d9540 100644 --- a/tests/test_utils/__init__.py +++ b/tests/test_utils/__init__.py @@ -33,7 +33,7 @@ from twisted.web.http_headers import Headers from twisted.web.iweb import IResponse -from synapse.types import JsonDict +from synapse.types import JsonSerializable if TYPE_CHECKING: from sys import UnraisableHookArgs @@ -145,7 +145,7 @@ def deliverBody(self, protocol: IProtocol) -> None: protocol.connectionLost(Failure(ResponseDone())) @classmethod - def json(cls, *, code: int = 200, payload: JsonDict) -> "FakeResponse": + def json(cls, *, code: int = 200, payload: JsonSerializable) -> "FakeResponse": headers = Headers({"Content-Type": ["application/json"]}) body = json.dumps(payload).encode("utf-8") return cls(code=code, body=body, headers=headers) From 98afc57d59df118a13f894fc66f206bc7409e14a Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Mon, 22 May 2023 17:17:49 +0200 Subject: [PATCH 022/153] Make OIDC scope constants --- synapse/api/auth/msc3861_delegated.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 82c66691dae1..5b0e678c0f59 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -44,6 +44,15 @@ logger = logging.getLogger(__name__) +# Scope as defined by MSC2967 +# https://github.com/matrix-org/matrix-spec-proposals/pull/2967 +SCOPE_MATRIX_API = "urn:matrix:org.matrix.msc2967.client:api:*" +SCOPE_MATRIX_GUEST = "urn:matrix:org.matrix.msc2967.client:api:guest" +SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:org.matrix.msc2967.client:device:" + +# Scope which allows access to the Synapse admin API +SCOPE_SYNAPSE_ADMIN = "urn:synapse:admin:*" + def scope_to_list(scope: str) -> List[str]: """Convert a scope string to a list of scope tokens""" @@ -197,9 +206,7 @@ async def get_user_by_req( requester = await self.get_user_by_access_token(access_token, allow_expired) if not allow_guest and requester.is_guest: - raise OAuthInsufficientScopeError( - ["urn:matrix:org.matrix.msc2967.client:api:*"] - ) + raise OAuthInsufficientScopeError([SCOPE_MATRIX_API]) request.requester = requester @@ -241,9 +248,9 @@ async def get_user_by_access_token( scope: List[str] = scope_to_list(introspection_result.get("scope", "")) # Determine type of user based on presence of particular scopes - has_admin_scope = "urn:synapse:admin:*" in scope - has_user_scope = "urn:matrix:org.matrix.msc2967.client:api:*" in scope - has_guest_scope = "urn:matrix:org.matrix.msc2967.client:api:guest" in scope + has_admin_scope = SCOPE_SYNAPSE_ADMIN in scope + has_user_scope = SCOPE_MATRIX_API in scope + has_guest_scope = SCOPE_MATRIX_GUEST in scope is_user = has_user_scope or has_admin_scope is_guest = has_guest_scope and not is_user @@ -299,10 +306,8 @@ async def get_user_by_access_token( # Find device_id in scope device_id = None for tok in scope: - if tok.startswith("urn:matrix:org.matrix.msc2967.client:device:"): - parts = tok.split(":") - if len(parts) == 5: - device_id = parts[4] + if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX): + device_id = tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :] if device_id: # Create the device on the fly if it does not exist From f739bde962daa9bc425c8343f35993ae889dbc67 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 23 May 2023 16:59:53 +0200 Subject: [PATCH 023/153] Reject tokens with multiple device scopes --- synapse/api/auth/msc3861_delegated.py | 30 ++++++++++++++++++++----- tests/handlers/test_oauth_delegation.py | 29 +++++++++++++++++++++++- 2 files changed, 52 insertions(+), 7 deletions(-) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 5b0e678c0f59..e4b16c0b5c87 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -303,13 +303,31 @@ async def get_user_by_access_token( else: user_id = UserID.from_string(user_id_str) - # Find device_id in scope - device_id = None - for tok in scope: - if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX): - device_id = tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :] + # Find device_ids in scope + # We only allow a single device_id in the scope, so we find them all in the + # scope list, and raise if there are more than one. The OIDC server should be + # the one enforcing valid scopes, so we raise a 500 if we find an invalid scope. + device_ids = [ + tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :] + for tok in scope + if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX) + ] + + if len(device_ids) > 1: + raise AuthError( + 500, + "Multiple device IDs in scope", + ) + + device_id = device_ids[0] if device_ids else None + if device_id is not None: + # Sanity check the device_id + if len(device_id) > 255 or len(device_id) < 1: + raise AuthError( + 500, + "Invalid device ID in scope", + ) - if device_id: # Create the device on the fly if it does not exist try: await self.store.get_device( diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 16ce2c069d60..064153551263 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -27,6 +27,7 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.api.errors import ( + AuthError, Codes, InvalidClientTokenError, OAuthInsufficientScopeError, @@ -68,8 +69,9 @@ SYNAPSE_ADMIN_SCOPE = "urn:synapse:admin:*" MATRIX_USER_SCOPE = "urn:matrix:org.matrix.msc2967.client:api:*" MATRIX_GUEST_SCOPE = "urn:matrix:org.matrix.msc2967.client:api:guest" +MATRIX_DEVICE_SCOPE_PREFIX = "urn:matrix:org.matrix.msc2967.client:device:" DEVICE = "AABBCCDD" -MATRIX_DEVICE_SCOPE = "urn:matrix:org.matrix.msc2967.client:device:" + DEVICE +MATRIX_DEVICE_SCOPE = MATRIX_DEVICE_SCOPE_PREFIX + DEVICE SUBJECT = "abc-def-ghi" USERNAME = "test-user" USER_ID = "@" + USERNAME + ":" + SERVER_NAME @@ -344,6 +346,31 @@ def test_active_user_with_device(self) -> None: ) self.assertEqual(requester.device_id, DEVICE) + def test_multiple_devices(self) -> None: + """The handler should raise an error if multiple devices are found in the scope.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join( + [ + MATRIX_USER_SCOPE, + f"{MATRIX_DEVICE_SCOPE_PREFIX}AABBCC", + f"{MATRIX_DEVICE_SCOPE_PREFIX}DDEEFF", + ] + ), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), AuthError) + def test_active_guest_not_allowed(self) -> None: """The handler should return an insufficient scope error.""" From 32a2f050042531ad4673b42789e833e9cd307740 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 26 May 2023 14:50:19 +0200 Subject: [PATCH 024/153] Make the config tests spawn the homeserver only when needed --- synapse/config/experimental.py | 40 ++- tests/config/test_oauth_delegation.py | 348 ++++++++++++-------------- 2 files changed, 182 insertions(+), 206 deletions(-) diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index d4dff22b0bae..1d189b2e26cf 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -69,7 +69,8 @@ def _check_enabled(self, attribute: attr.Attribute, value: bool) -> None: if value and not HAS_AUTHLIB: raise ConfigError( "MSC3861 is enabled but authlib is not installed. " - "Please install authlib to use MSC3861." + "Please install authlib to use MSC3861.", + ("experimental", "msc3861", "enabled"), ) issuer: str = attr.ib(default="", validator=attr.validators.instance_of(str)) @@ -114,7 +115,8 @@ def _check_client_auth_method( if value == ClientAuthMethod.PRIVATE_KEY_JWT and self.jwk is None: raise ConfigError( - "A JWKS must be provided when using the private_key_jwt client auth method" + "A JWKS must be provided when using the private_key_jwt client auth method", + ("experimental", "msc3861", "client_auth_method"), ) if ( @@ -127,7 +129,8 @@ def _check_client_auth_method( and self.client_secret is None ): raise ConfigError( - f"A client secret must be provided when using the {value} client auth method" + f"A client secret must be provided when using the {value} client auth method", + ("experimental", "msc3861", "client_auth_method"), ) account_management_url: Optional[str] = attr.ib( @@ -160,12 +163,14 @@ def check_config_conflicts(self, root: RootConfig) -> None: or root.auth.password_enabled_for_login ): raise ConfigError( - "Password auth cannot be enabled when OAuth delegation is enabled" + "Password auth cannot be enabled when OAuth delegation is enabled", + ("password_config", "enabled"), ) if root.registration.enable_registration: raise ConfigError( - "Registration cannot be enabled when OAuth delegation is enabled" + "Registration cannot be enabled when OAuth delegation is enabled", + ("enable_registration",), ) if ( @@ -183,32 +188,38 @@ def check_config_conflicts(self, root: RootConfig) -> None: if root.captcha.enable_registration_captcha: raise ConfigError( - "CAPTCHA cannot be enabled when OAuth delegation is enabled" + "CAPTCHA cannot be enabled when OAuth delegation is enabled", + ("captcha", "enable_registration_captcha"), ) if root.experimental.msc3882_enabled: raise ConfigError( - "MSC3882 cannot be enabled when OAuth delegation is enabled" + "MSC3882 cannot be enabled when OAuth delegation is enabled", + ("experimental_features", "msc3882_enabled"), ) if root.registration.refresh_token_lifetime: raise ConfigError( - "refresh_token_lifetime cannot be set when OAuth delegation is enabled" + "refresh_token_lifetime cannot be set when OAuth delegation is enabled", + ("refresh_token_lifetime",), ) if root.registration.nonrefreshable_access_token_lifetime: raise ConfigError( - "nonrefreshable_access_token_lifetime cannot be set when OAuth delegation is enabled" + "nonrefreshable_access_token_lifetime cannot be set when OAuth delegation is enabled", + ("nonrefreshable_access_token_lifetime",), ) if root.registration.session_lifetime: raise ConfigError( - "session_lifetime cannot be set when OAuth delegation is enabled" + "session_lifetime cannot be set when OAuth delegation is enabled", + ("session_lifetime",), ) if not root.experimental.msc3970_enabled: raise ConfigError( - "experimental_features.msc3970_enabled must be 'true' when OAuth delegation is enabled" + "experimental_features.msc3970_enabled must be 'true' when OAuth delegation is enabled", + ("experimental_features", "msc3970_enabled"), ) @@ -373,7 +384,12 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) # MSC3861: Matrix architecture change to delegate authentication via OIDC - self.msc3861 = MSC3861(**experimental.get("msc3861", {})) + try: + self.msc3861 = MSC3861(**experimental.get("msc3861", {})) + except ValueError as exc: + raise ConfigError( + "Invalid MSC3861 configuration", ("experimental", "msc3861") + ) from exc # MSC3970: Scope transaction IDs to devices self.msc3970_enabled = experimental.get("msc3970_enabled", self.msc3861.enabled) diff --git a/tests/config/test_oauth_delegation.py b/tests/config/test_oauth_delegation.py index 6d294e0144f6..2ead721b00fc 100644 --- a/tests/config/test_oauth_delegation.py +++ b/tests/config/test_oauth_delegation.py @@ -12,15 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict from unittest.mock import Mock from synapse.config import ConfigError +from synapse.config.homeserver import HomeServerConfig from synapse.module_api import ModuleApi from synapse.types import JsonDict -from tests.server import get_clock -from tests.unittest import HomeserverTestCase, override_config, skip_unless +from tests.server import get_clock, setup_test_homeserver +from tests.unittest import TestCase, skip_unless +from tests.utils import default_config try: import authlib # noqa: F401 @@ -51,45 +52,15 @@ def __init__(self, config: None, api: ModuleApi): ) -def _dict_merge(merge_dict: dict, into_dict: dict) -> None: - """Do a deep merge of two dicts - - Recursively merges `merge_dict` into `into_dict`: - * For keys where both `merge_dict` and `into_dict` have a dict value, the values - are recursively merged - * For all other keys, the values in `into_dict` (if any) are overwritten with - the value from `merge_dict`. - - Args: - merge_dict: dict to merge - into_dict: target dict to be modified - """ - for k, v in merge_dict.items(): - if k not in into_dict: - into_dict[k] = v - continue - - current_val = into_dict[k] - - if isinstance(v, dict) and isinstance(current_val, dict): - _dict_merge(v, current_val) - continue - - # otherwise we just overwrite - into_dict[k] = v - - @skip_unless(HAS_AUTHLIB, "requires authlib") -class MSC3861OAuthDelegation(HomeserverTestCase): +class MSC3861OAuthDelegation(TestCase): """Test that the Homeserver fails to initialize if the config is invalid.""" def setUp(self) -> None: - self.reactor, self.clock = get_clock() - self._hs_args = {"clock": self.clock, "reactor": self.reactor} - - def default_config(self) -> Dict[str, Any]: - default_extra_config = { + self.config_dict: JsonDict = { + **default_config("test"), "public_baseurl": BASE_URL, + "enable_registration": False, "experimental_features": { "msc3861": { "enabled": True, @@ -100,198 +71,187 @@ def default_config(self) -> Dict[str, Any]: } }, } - _dict_merge( - {} if self._extra_config is None else self._extra_config, - default_extra_config, - ) - self._extra_config = default_extra_config - return super().default_config() - @override_config( - { - "enable_registration": False, - } - ) + def parse_config(self) -> HomeServerConfig: + config = HomeServerConfig() + config.parse_config_dict(self.config_dict, "", "") + return config + def test_client_secret_post_works(self) -> None: - self.setup_test_homeserver() + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_post", + client_secret=CLIENT_SECRET, + ) + + self.parse_config() + + def test_client_secret_post_requires_client_secret(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_post", + client_secret=None, + ) + + with self.assertRaises(ConfigError): + self.parse_config() + + def test_client_secret_basic_works(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_basic", + client_secret=CLIENT_SECRET, + ) + + self.parse_config() + + def test_client_secret_basic_requires_client_secret(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_basic", + client_secret=None, + ) + + with self.assertRaises(ConfigError): + self.parse_config() + + def test_client_secret_jwt_works(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_jwt", + client_secret=CLIENT_SECRET, + ) + + self.parse_config() + + def test_client_secret_jwt_requires_client_secret(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_jwt", + client_secret=None, + ) + + with self.assertRaises(ConfigError): + self.parse_config() - @override_config( - { - "enable_registration": False, - "experimental_features": { - "msc3861": { - "client_auth_method": "invalid", - } - }, - } - ) def test_invalid_client_auth_method(self) -> None: - with self.assertRaises(ValueError): - self.setup_test_homeserver() + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="invalid", + ) - @override_config( - { - "enable_registration": False, - "experimental_features": { - "msc3861": { - "client_auth_method": "private_key_jwt", - } - }, - } - ) - def test_invalid_private_key_jwt(self) -> None: with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() + + def test_private_key_jwt_requires_jwk(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="private_key_jwt", + ) + + with self.assertRaises(ConfigError): + self.parse_config() - @override_config( - { - "enable_registration": False, - "experimental_features": { - "msc3861": { - "client_auth_method": "private_key_jwt", - "jwk": { - "p": "-frVdP_tZ-J_nIR6HNMDq1N7aunwm51nAqNnhqIyuA8ikx7LlQED1tt2LD3YEvYyW8nxE2V95HlCRZXQPMiRJBFOsbmYkzl2t-MpavTaObB_fct_JqcRtdXddg4-_ihdjRDwUOreq_dpWh6MIKsC3UyekfkHmeEJg5YpOTL15j8", - "kty": "RSA", - "q": "oFw-Enr_YozQB1ab-kawn4jY3yHi8B1nSmYT0s8oTCflrmps5BFJfCkHL5ij3iY15z0o2m0N-jjB1oSJ98O4RayEEYNQlHnTNTl0kRIWzpoqblHUIxVcahIpP_xTovBJzwi8XXoLGqHOOMA-r40LSyVgP2Ut8D9qBwV6_UfT0LU", - "d": "WFkDPYo4b4LIS64D_QtQfGGuAObPvc3HFfp9VZXyq3SJR58XZRHE0jqtlEMNHhOTgbMYS3w8nxPQ_qVzY-5hs4fIanwvB64mAoOGl0qMHO65DTD_WsGFwzYClJPBVniavkLE2Hmpu8IGe6lGliN8vREC6_4t69liY-XcN_ECboVtC2behKkLOEASOIMuS7YcKAhTJFJwkl1dqDlliEn5A4u4xy7nuWQz3juB1OFdKlwGA5dfhDNglhoLIwNnkLsUPPFO-WB5ZNEW35xxHOToxj4bShvDuanVA6mJPtTKjz0XibjB36bj_nF_j7EtbE2PdGJ2KevAVgElR4lqS4ISgQ", - "e": "AQAB", - "kid": "test", - "qi": "cPfNk8l8W5exVNNea4d7QZZ8Qr8LgHghypYAxz8PQh1fNa8Ya1SNUDVzC2iHHhszxxA0vB9C7jGze8dBrvnzWYF1XvQcqNIVVgHhD57R1Nm3dj2NoHIKe0Cu4bCUtP8xnZQUN4KX7y4IIcgRcBWG1hT6DEYZ4BxqicnBXXNXAUI", - "dp": "dKlMHvslV1sMBQaKWpNb3gPq0B13TZhqr3-E2_8sPlvJ3fD8P4CmwwnOn50JDuhY3h9jY5L06sBwXjspYISVv8hX-ndMLkEeF3lrJeA5S70D8rgakfZcPIkffm3tlf1Ok3v5OzoxSv3-67Df4osMniyYwDUBCB5Oq1tTx77xpU8", - "dq": "S4ooU1xNYYcjl9FcuJEEMqKsRrAXzzSKq6laPTwIp5dDwt2vXeAm1a4eDHXC-6rUSZGt5PbqVqzV4s-cjnJMI8YYkIdjNg4NSE1Ac_YpeDl3M3Colb5CQlU7yUB7xY2bt0NOOFp9UJZYJrOo09mFMGjy5eorsbitoZEbVqS3SuE", - "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", - }, - } - }, - } - ) def test_private_key_jwt_works(self) -> None: - self.setup_test_homeserver() + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="private_key_jwt", + jwk={ + "p": "-frVdP_tZ-J_nIR6HNMDq1N7aunwm51nAqNnhqIyuA8ikx7LlQED1tt2LD3YEvYyW8nxE2V95HlCRZXQPMiRJBFOsbmYkzl2t-MpavTaObB_fct_JqcRtdXddg4-_ihdjRDwUOreq_dpWh6MIKsC3UyekfkHmeEJg5YpOTL15j8", + "kty": "RSA", + "q": "oFw-Enr_YozQB1ab-kawn4jY3yHi8B1nSmYT0s8oTCflrmps5BFJfCkHL5ij3iY15z0o2m0N-jjB1oSJ98O4RayEEYNQlHnTNTl0kRIWzpoqblHUIxVcahIpP_xTovBJzwi8XXoLGqHOOMA-r40LSyVgP2Ut8D9qBwV6_UfT0LU", + "d": "WFkDPYo4b4LIS64D_QtQfGGuAObPvc3HFfp9VZXyq3SJR58XZRHE0jqtlEMNHhOTgbMYS3w8nxPQ_qVzY-5hs4fIanwvB64mAoOGl0qMHO65DTD_WsGFwzYClJPBVniavkLE2Hmpu8IGe6lGliN8vREC6_4t69liY-XcN_ECboVtC2behKkLOEASOIMuS7YcKAhTJFJwkl1dqDlliEn5A4u4xy7nuWQz3juB1OFdKlwGA5dfhDNglhoLIwNnkLsUPPFO-WB5ZNEW35xxHOToxj4bShvDuanVA6mJPtTKjz0XibjB36bj_nF_j7EtbE2PdGJ2KevAVgElR4lqS4ISgQ", + "e": "AQAB", + "kid": "test", + "qi": "cPfNk8l8W5exVNNea4d7QZZ8Qr8LgHghypYAxz8PQh1fNa8Ya1SNUDVzC2iHHhszxxA0vB9C7jGze8dBrvnzWYF1XvQcqNIVVgHhD57R1Nm3dj2NoHIKe0Cu4bCUtP8xnZQUN4KX7y4IIcgRcBWG1hT6DEYZ4BxqicnBXXNXAUI", + "dp": "dKlMHvslV1sMBQaKWpNb3gPq0B13TZhqr3-E2_8sPlvJ3fD8P4CmwwnOn50JDuhY3h9jY5L06sBwXjspYISVv8hX-ndMLkEeF3lrJeA5S70D8rgakfZcPIkffm3tlf1Ok3v5OzoxSv3-67Df4osMniyYwDUBCB5Oq1tTx77xpU8", + "dq": "S4ooU1xNYYcjl9FcuJEEMqKsRrAXzzSKq6laPTwIp5dDwt2vXeAm1a4eDHXC-6rUSZGt5PbqVqzV4s-cjnJMI8YYkIdjNg4NSE1Ac_YpeDl3M3Colb5CQlU7yUB7xY2bt0NOOFp9UJZYJrOo09mFMGjy5eorsbitoZEbVqS3SuE", + "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", + }, + ) + self.parse_config() def test_registration_cannot_be_enabled(self) -> None: + self.config_dict["enable_registration"] = True with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "password_config": { - "enabled": True, - }, - } - ) def test_password_config_cannot_be_enabled(self) -> None: + self.config_dict["password_config"] = {"enabled": True} with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "oidc_providers": [ - { - "idp_id": "microsoft", - "idp_name": "Microsoft", - "issuer": "https://login.microsoftonline.com//v2.0", - "client_id": "", - "client_secret": "", - "scopes": ["openid", "profile"], - "authorization_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/authorize", - "token_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/token", - "userinfo_endpoint": "https://graph.microsoft.com/oidc/userinfo", - } - ], - } - ) def test_oidc_sso_cannot_be_enabled(self) -> None: + self.config_dict["oidc_providers"] = [ + { + "idp_id": "microsoft", + "idp_name": "Microsoft", + "issuer": "https://login.microsoftonline.com//v2.0", + "client_id": "", + "client_secret": "", + "scopes": ["openid", "profile"], + "authorization_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/authorize", + "token_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/token", + "userinfo_endpoint": "https://graph.microsoft.com/oidc/userinfo", + } + ] + with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "cas_config": { - "enabled": True, - "server_url": "https://cas-server.com", - "displayname_attribute": "name", - "required_attributes": {"userGroup": "staff", "department": "None"}, - }, - } - ) def test_cas_sso_cannot_be_enabled(self) -> None: + self.config_dict["cas_config"] = { + "enabled": True, + "server_url": "https://cas-server.com", + "displayname_attribute": "name", + "required_attributes": {"userGroup": "staff", "department": "None"}, + } + with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "modules": [ - { - "module": f"{__name__}.{CustomAuthModule.__qualname__}", - "config": {}, - } - ], - } - ) def test_auth_providers_cannot_be_enabled(self) -> None: + self.config_dict["modules"] = [ + { + "module": f"{__name__}.{CustomAuthModule.__qualname__}", + "config": {}, + } + ] + + # This requires actually setting up an HS, as the module will be run on setup, + # which should raise as the module tries to register an auth provider + config = self.parse_config() + reactor, clock = get_clock() with self.assertRaises(ConfigError): - self.setup_test_homeserver() + setup_test_homeserver( + self.addCleanup, reactor=reactor, clock=clock, config=config + ) - @override_config( - { - "enable_registration": False, - "jwt_config": { - "enabled": True, - "secret": "my-secret-token", - "algorithm": "HS256", - }, - } - ) def test_jwt_auth_cannot_be_enabled(self) -> None: + self.config_dict["jwt_config"] = { + "enabled": True, + "secret": "my-secret-token", + "algorithm": "HS256", + } + with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "experimental_features": { - "msc3882_enabled": True, - }, - } - ) def test_msc3882_auth_cannot_be_enabled(self) -> None: + self.config_dict["experimental_features"]["msc3882_enabled"] = True with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "recaptcha_public_key": "test", - "recaptcha_private_key": "test", - "enable_registration_captcha": True, - } - ) def test_captcha_cannot_be_enabled(self) -> None: + self.config_dict.update( + enable_registration_captcha=True, + recaptcha_public_key="test", + recaptcha_private_key="test", + ) with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "refresh_token_lifetime": "24h", - "refreshable_access_token_lifetime": "10m", - "nonrefreshable_access_token_lifetime": "24h", - } - ) def test_refreshable_tokens_cannot_be_enabled(self) -> None: + self.config_dict.update( + refresh_token_lifetime="24h", + refreshable_access_token_lifetime="10m", + nonrefreshable_access_token_lifetime="24h", + ) with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "session_lifetime": "24h", - } - ) def test_session_lifetime_cannot_be_set(self) -> None: + self.config_dict["session_lifetime"] = "24h" with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() From ceb3dd77db0d3ce992d40175c3f53f6b6ddfa168 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 26 May 2023 15:16:34 +0200 Subject: [PATCH 025/153] Enforce that an admin token also has the basic Matrix API scope --- synapse/api/auth/msc3861_delegated.py | 7 ++----- tests/handlers/test_oauth_delegation.py | 26 ++++++++++++++++++++++++- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index e4b16c0b5c87..31c1de0119a2 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -248,13 +248,10 @@ async def get_user_by_access_token( scope: List[str] = scope_to_list(introspection_result.get("scope", "")) # Determine type of user based on presence of particular scopes - has_admin_scope = SCOPE_SYNAPSE_ADMIN in scope has_user_scope = SCOPE_MATRIX_API in scope has_guest_scope = SCOPE_MATRIX_GUEST in scope - is_user = has_user_scope or has_admin_scope - is_guest = has_guest_scope and not is_user - if not is_user and not is_guest: + if not has_user_scope and not has_guest_scope: raise InvalidClientTokenError("No scope in token granting user rights") # Match via the sub claim @@ -351,5 +348,5 @@ async def get_user_by_access_token( user_id=user_id, device_id=device_id, scope=scope, - is_guest=is_guest, + is_guest=(has_guest_scope and not has_user_scope), ) diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 064153551263..6309d7b36e8a 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -224,6 +224,30 @@ def test_active_no_user_scope(self) -> None: ) self._assertParams() + def test_active_admin_not_user(self) -> None: + """The handler should raise when the scope has admin right but not user.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([SYNAPSE_ADMIN_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + def test_active_admin(self) -> None: """The handler should return a requester with admin rights.""" @@ -233,7 +257,7 @@ def test_active_admin(self) -> None: payload={ "active": True, "sub": SUBJECT, - "scope": " ".join([SYNAPSE_ADMIN_SCOPE]), + "scope": " ".join([SYNAPSE_ADMIN_SCOPE, MATRIX_USER_SCOPE]), "username": USERNAME, }, ) From c01343de43b86eb4a6c055547369d07c198a435f Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 31 May 2023 07:18:29 -0400 Subject: [PATCH 026/153] Add stricter mypy options (#15694) Enable warn_unused_configs, strict_concatenate, disallow_subclassing_any, and disallow_incomplete_defs. --- changelog.d/15694.misc | 1 + mypy.ini | 23 ++++++++++++++++++++--- synapse/api/auth/msc3861_delegated.py | 2 +- synapse/federation/federation_server.py | 4 ++-- synapse/handlers/oidc.py | 2 +- synapse/handlers/pagination.py | 4 ++-- synapse/http/server.py | 14 +++++++------- synapse/util/__init__.py | 4 ++-- synapse/util/async_helpers.py | 2 +- synapse/util/caches/lrucache.py | 6 ++---- tests/server.py | 2 +- 11 files changed, 40 insertions(+), 24 deletions(-) create mode 100644 changelog.d/15694.misc diff --git a/changelog.d/15694.misc b/changelog.d/15694.misc new file mode 100644 index 000000000000..93ceaeafc9b9 --- /dev/null +++ b/changelog.d/15694.misc @@ -0,0 +1 @@ +Improve type hints. diff --git a/mypy.ini b/mypy.ini index a7ec66196d41..56cd1d560ea8 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2,17 +2,29 @@ namespace_packages = True plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py follow_imports = normal -check_untyped_defs = True show_error_codes = True show_traceback = True mypy_path = stubs warn_unreachable = True -warn_unused_ignores = True local_partial_types = True no_implicit_optional = True + +# Strict checks, see mypy --help +warn_unused_configs = True +# disallow_any_generics = True +disallow_subclassing_any = True +# disallow_untyped_calls = True disallow_untyped_defs = True -strict_equality = True +disallow_incomplete_defs = True +# check_untyped_defs = True +# disallow_untyped_decorators = True warn_redundant_casts = True +warn_unused_ignores = True +# warn_return_any = True +# no_implicit_reexport = True +strict_equality = True +strict_concatenate = True + # Run mypy type checking with the minimum supported Python version to catch new usage # that isn't backwards-compatible (types, overloads, etc). python_version = 3.8 @@ -31,6 +43,7 @@ warn_unused_ignores = False [mypy-synapse.util.caches.treecache] disallow_untyped_defs = False +disallow_incomplete_defs = False ;; Dependencies without annotations ;; Before ignoring a module, check to see if type stubs are available. @@ -40,6 +53,7 @@ disallow_untyped_defs = False ;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s ;; `[tool.poetry.dev-dependencies]` list. +# https://github.com/lepture/authlib/issues/460 [mypy-authlib.*] ignore_missing_imports = True @@ -49,9 +63,11 @@ ignore_missing_imports = True [mypy-lxml] ignore_missing_imports = True +# https://github.com/msgpack/msgpack-python/issues/448 [mypy-msgpack] ignore_missing_imports = True +# https://github.com/wolever/parameterized/issues/143 [mypy-parameterized.*] ignore_missing_imports = True @@ -73,6 +89,7 @@ ignore_missing_imports = True [mypy-srvlookup.*] ignore_missing_imports = True +# https://github.com/twisted/treq/pull/366 [mypy-treq.*] ignore_missing_imports = True diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 31c1de0119a2..bd4fc9c0ee3d 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -59,7 +59,7 @@ def scope_to_list(scope: str) -> List[str]: return scope.strip().split(" ") -class PrivateKeyJWTWithKid(PrivateKeyJWT): +class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc] """An implementation of the private_key_jwt client auth method that includes a kid header. This is needed because some providers (Keycloak) require the kid header to figure diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index e17cb840de99..149351dda025 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -515,7 +515,7 @@ async def process_pdu(pdu: EventBase) -> JsonDict: logger.error( "Failed to handle PDU %s", event_id, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore + exc_info=(f.type, f.value, f.getTracebackObject()), ) return {"error": str(e)} @@ -1247,7 +1247,7 @@ async def _process_incoming_pdus_in_room_inner( logger.error( "Failed to handle PDU %s", event.event_id, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore + exc_info=(f.type, f.value, f.getTracebackObject()), ) received_ts = await self.store.remove_received_event_from_staging( diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index e7e0b5e049b3..24b68e03012d 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -1354,7 +1354,7 @@ async def handle_backchannel_logout( finish_request(request) -class LogoutToken(JWTClaims): +class LogoutToken(JWTClaims): # type: ignore[misc] """ Holds and verify claims of a logout token, as per https://openid.net/specs/openid-connect-backchannel-1_0.html#LogoutToken diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 63b35c8d621f..d5257acb7da3 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -360,7 +360,7 @@ async def _purge_history( except Exception: f = Failure() logger.error( - "[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject()) # type: ignore + "[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject()) ) self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED self._purges_by_id[purge_id].error = f.getErrorMessage() @@ -689,7 +689,7 @@ async def _shutdown_and_purge_room( f = Failure() logger.error( "failed", - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore + exc_info=(f.type, f.value, f.getTracebackObject()), ) self._delete_by_id[delete_id].status = DeleteStatus.STATUS_FAILED self._delete_by_id[delete_id].error = f.getErrorMessage() diff --git a/synapse/http/server.py b/synapse/http/server.py index 04768c6a237f..933172c87327 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -108,7 +108,7 @@ def return_json_error( if f.check(SynapseError): # mypy doesn't understand that f.check asserts the type. - exc: SynapseError = f.value # type: ignore + exc: SynapseError = f.value error_code = exc.code error_dict = exc.error_dict(config) if exc.headers is not None: @@ -124,7 +124,7 @@ def return_json_error( "Got cancellation before client disconnection from %r: %r", request.request_metrics.name, request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) else: error_code = 500 @@ -134,7 +134,7 @@ def return_json_error( "Failed handle request via %r: %r", request.request_metrics.name, request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) # Only respond with an error response if we haven't already started writing, @@ -172,7 +172,7 @@ def return_html_error( """ if f.check(CodeMessageException): # mypy doesn't understand that f.check asserts the type. - cme: CodeMessageException = f.value # type: ignore + cme: CodeMessageException = f.value code = cme.code msg = cme.msg if cme.headers is not None: @@ -189,7 +189,7 @@ def return_html_error( logger.error( "Failed handle request %r", request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) elif f.check(CancelledError): code = HTTP_STATUS_REQUEST_CANCELLED @@ -199,7 +199,7 @@ def return_html_error( logger.error( "Got cancellation before client disconnection when handling request %r", request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) else: code = HTTPStatus.INTERNAL_SERVER_ERROR @@ -208,7 +208,7 @@ def return_html_error( logger.error( "Failed handle request %r", request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) if isinstance(error_template, str): diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 9ddd26ccaa2d..7ea0c4c36bcc 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -76,7 +76,7 @@ def unwrapFirstError(failure: Failure) -> Failure: # the subFailure's value, which will do a better job of preserving stacktraces. # (actually, you probably want to use yieldable_gather_results anyway) failure.trap(defer.FirstError) - return failure.value.subFailure # type: ignore[union-attr] # Issue in Twisted's annotations + return failure.value.subFailure P = ParamSpec("P") @@ -178,7 +178,7 @@ def log_failure( """ logger.error( - msg, exc_info=(failure.type, failure.value, failure.getTracebackObject()) # type: ignore[arg-type] + msg, exc_info=(failure.type, failure.value, failure.getTracebackObject()) ) if not consumeErrors: diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 01e3cd46f650..4041e49e71dc 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -138,7 +138,7 @@ def errback(f: Failure) -> Optional[Failure]: for observer in observers: # This is a little bit of magic to correctly propagate stack # traces when we `await` on one of the observer deferreds. - f.value.__failure__ = f # type: ignore[union-attr] + f.value.__failure__ = f try: observer.errback(f) except Exception as e: diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 452d5d04c1c0..ed0da17227d3 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -93,10 +93,8 @@ def _get_size_of(val: Any, *, recurse: bool = True) -> int: # a general type var, distinct from either KT or VT T = TypeVar("T") -P = TypeVar("P") - -class _TimedListNode(ListNode[P]): +class _TimedListNode(ListNode[T]): """A `ListNode` that tracks last access time.""" __slots__ = ["last_access_ts_secs"] @@ -821,7 +819,7 @@ class AsyncLruCache(Generic[KT, VT]): utilize external cache systems that require await behaviour to be created. """ - def __init__(self, *args, **kwargs): # type: ignore + def __init__(self, *args: Any, **kwargs: Any): self._lru_cache: LruCache[KT, VT] = LruCache(*args, **kwargs) async def get( diff --git a/tests/server.py b/tests/server.py index 7296f0a55281..a12c3e3b9a09 100644 --- a/tests/server.py +++ b/tests/server.py @@ -642,7 +642,7 @@ def runInteraction( pool.runWithConnection = runWithConnection # type: ignore[assignment] pool.runInteraction = runInteraction # type: ignore[assignment] # Replace the thread pool with a threadless 'thread' pool - pool.threadpool = ThreadPool(clock._reactor) # type: ignore[assignment] + pool.threadpool = ThreadPool(clock._reactor) pool.running = True # We've just changed the Databases to run DB transactions on the same From daf3a679089770e00d1b70d8ed2f91ab108b73e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gabriel=20F=C3=A9ron?= Date: Wed, 31 May 2023 15:18:37 +0200 Subject: [PATCH 027/153] Add get_canonical_room_alias to module API (#15450) Co-authored-by: Boxdot --- changelog.d/15450.feature | 1 + synapse/module_api/__init__.py | 27 +++++++++++++++++++++++++++ synapse/storage/controllers/state.py | 2 +- 3 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15450.feature diff --git a/changelog.d/15450.feature b/changelog.d/15450.feature new file mode 100644 index 000000000000..2102381143ba --- /dev/null +++ b/changelog.d/15450.feature @@ -0,0 +1 @@ +Support resolving a room's [canonical alias](https://spec.matrix.org/v1.7/client-server-api/#mroomcanonical_alias) via the module API. \ No newline at end of file diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 134bd2e62021..a8d6224a4528 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -122,6 +122,7 @@ JsonMapping, Requester, RoomAlias, + RoomID, StateMap, UserID, UserInfo, @@ -1570,6 +1571,32 @@ async def get_monthly_active_users_by_service( start_timestamp, end_timestamp ) + async def get_canonical_room_alias(self, room_id: RoomID) -> Optional[RoomAlias]: + """ + Retrieve the given room's current canonical alias. + + A room may declare an alias as "canonical", meaning that it is the + preferred alias to use when referring to the room. This function + retrieves that alias from the room's state. + + Added in Synapse v1.86.0. + + Args: + room_id: The Room ID to find the alias of. + + Returns: + None if the room ID does not exist, or if the room exists but has no canonical alias. + Otherwise, the parsed room alias. + """ + room_alias_str = ( + await self._storage_controllers.state.get_canonical_alias_for_room( + room_id.to_string() + ) + ) + if room_alias_str: + return RoomAlias.from_string(room_alias_str) + return None + async def lookup_room_alias(self, room_alias: str) -> Tuple[str, List[str]]: """ Get the room ID associated with a room alias. diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index 7089b0a1d85d..233df7cce24e 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -485,7 +485,7 @@ async def get_canonical_alias_for_room(self, room_id: str) -> Optional[str]: if not event: return None - return event.content.get("canonical_alias") + return event.content.get("alias") @trace @tag_args From 874378c0523bb82314434f1f0f2c5e1462a34a5b Mon Sep 17 00:00:00 2001 From: Jason Little Date: Wed, 31 May 2023 10:13:31 -0500 Subject: [PATCH 028/153] Docker fully qualified image names (#15689) * Fully qualified docker image names for the main Dockerfile and Complement related. * Fully qualified docker image names for Dockerfiles associated with building Debian release artifacts. This one is harder and is separate from the other commit in case it wasn't correct or was unwanted. I decided to do the expansion on the docker images in the Dockerfile itself, instead of the various source places that build which distribution that is selected, as it would have been more invasive with the scripts breaking up the string for tagging and such. This one is untested. * Changelog * Update docker/Dockerfile-workers * Update docker/complement/Dockerfile --------- Co-authored-by: reivilibre --- .github/workflows/release-artifacts.yml | 1 + changelog.d/15689.misc | 1 + docker/Dockerfile | 6 +++--- docker/Dockerfile-dhvirtualenv | 4 ++-- docker/Dockerfile-workers | 4 ++-- docker/complement/Dockerfile | 5 +++-- docker/editable.Dockerfile | 2 +- scripts-dev/build_debian_packages.py | 2 ++ 8 files changed, 15 insertions(+), 10 deletions(-) create mode 100644 changelog.d/15689.misc diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index ebd7d298a9e9..09812004017f 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -34,6 +34,7 @@ jobs: - id: set-distros run: | # if we're running from a tag, get the full list of distros; otherwise just use debian:sid + # NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path dists='["debian:sid"]' if [[ $GITHUB_REF == refs/tags/* ]]; then dists=$(scripts-dev/build_debian_packages.py --show-dists-json) diff --git a/changelog.d/15689.misc b/changelog.d/15689.misc new file mode 100644 index 000000000000..4262cc951507 --- /dev/null +++ b/changelog.d/15689.misc @@ -0,0 +1 @@ +Add fully qualified docker image names to Dockerfiles. diff --git a/docker/Dockerfile b/docker/Dockerfile index 6107dced43f9..12cff84131b7 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -27,7 +27,7 @@ ARG PYTHON_VERSION=3.11 ### # We hardcode the use of Debian bullseye here because this could change upstream # and other Dockerfiles used for testing are expecting bullseye. -FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements +FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye as requirements # RUN --mount is specific to buildkit and is documented at # https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount. @@ -87,7 +87,7 @@ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \ ### ### Stage 1: builder ### -FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder +FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye as builder # install the OS build deps RUN \ @@ -158,7 +158,7 @@ RUN --mount=type=cache,target=/synapse/target,sharing=locked \ ### Stage 2: runtime ### -FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye +FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse' LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md' diff --git a/docker/Dockerfile-dhvirtualenv b/docker/Dockerfile-dhvirtualenv index 2013732422ce..861129ebc225 100644 --- a/docker/Dockerfile-dhvirtualenv +++ b/docker/Dockerfile-dhvirtualenv @@ -24,7 +24,7 @@ ARG distro="" # https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but # it's not obviously easier to use that than to build our own.) -FROM ${distro} as builder +FROM docker.io/library/${distro} as builder RUN apt-get update -qq -o Acquire::Languages=none RUN env DEBIAN_FRONTEND=noninteractive apt-get install \ @@ -55,7 +55,7 @@ RUN cd /dh-virtualenv && DEB_BUILD_OPTIONS=nodoc dpkg-buildpackage -us -uc -b ### ### Stage 1 ### -FROM ${distro} +FROM docker.io/library/${distro} # Get the distro we want to pull from as a dynamic build variable # (We need to define it in each build stage) diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers index faf7f2cef8a5..adb9a725e33f 100644 --- a/docker/Dockerfile-workers +++ b/docker/Dockerfile-workers @@ -7,7 +7,7 @@ ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION # target image. For repeated rebuilds, this is much faster than apt installing # each time. -FROM debian:bullseye-slim AS deps_base +FROM docker.io/library/debian:bullseye-slim AS deps_base RUN \ --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ @@ -21,7 +21,7 @@ FROM debian:bullseye-slim AS deps_base # which makes it much easier to copy (but we need to make sure we use an image # based on the same debian version as the synapse image, to make sure we get # the expected version of libc. -FROM redis:6-bullseye AS redis_base +FROM docker.io/library/redis:6-bullseye AS redis_base # now build the final image, based on the the regular Synapse docker image FROM $FROM diff --git a/docker/complement/Dockerfile b/docker/complement/Dockerfile index be1aa1c55e09..5103068a49fc 100644 --- a/docker/complement/Dockerfile +++ b/docker/complement/Dockerfile @@ -7,6 +7,7 @@ # https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse ARG SYNAPSE_VERSION=latest +# This is an intermediate image, to be built locally (not pulled from a registry). ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION FROM $FROM @@ -19,8 +20,8 @@ FROM $FROM # the same debian version as Synapse's docker image (so the versions of the # shared libraries match). RUN adduser --system --uid 999 postgres --home /var/lib/postgresql - COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql - COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql + COPY --from=docker.io/library/postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql + COPY --from=docker.io/library/postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql ENV PATH="${PATH}:/usr/lib/postgresql/13/bin" ENV PGDATA=/var/lib/postgresql/data diff --git a/docker/editable.Dockerfile b/docker/editable.Dockerfile index 0e8cf2e712ff..c53ce1c718e8 100644 --- a/docker/editable.Dockerfile +++ b/docker/editable.Dockerfile @@ -10,7 +10,7 @@ ARG PYTHON_VERSION=3.9 ### # We hardcode the use of Debian bullseye here because this could change upstream # and other Dockerfiles used for testing are expecting bullseye. -FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye +FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye # Install Rust and other dependencies (stolen from normal Dockerfile) # install the OS build deps diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py index ede766501100..4c9f134ddd83 100755 --- a/scripts-dev/build_debian_packages.py +++ b/scripts-dev/build_debian_packages.py @@ -20,6 +20,8 @@ from types import FrameType from typing import Collection, Optional, Sequence, Set +# These are expanded inside the dockerfile to be a fully qualified image name. +# e.g. docker.io/library/debian:bullseye DISTS = ( "debian:buster", # oldstable: EOL 2022-08 "debian:bullseye", From 6f18812bb044a2959fdc9881c328578adb7b33f2 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 31 May 2023 13:06:57 -0400 Subject: [PATCH 029/153] Add stubs package for lxml. (#15697) The stubs have some issues so this has some generous cast and ignores in it, but it is better than not having stubs. Note that confusing that Element is a function which creates _Element instances (and similarly for Comment). --- changelog.d/15697.misc | 1 + mypy.ini | 3 -- poetry.lock | 25 +++++++-- pyproject.toml | 1 + synapse/media/oembed.py | 32 ++++++----- synapse/media/preview_html.py | 79 ++++++++++++++++++++-------- tests/media/test_html_preview.py | 18 ++++++- tests/media/test_oembed.py | 2 +- tests/media/test_url_previewer.py | 2 +- tests/rest/media/test_url_preview.py | 2 +- 10 files changed, 117 insertions(+), 48 deletions(-) create mode 100644 changelog.d/15697.misc diff --git a/changelog.d/15697.misc b/changelog.d/15697.misc new file mode 100644 index 000000000000..93ceaeafc9b9 --- /dev/null +++ b/changelog.d/15697.misc @@ -0,0 +1 @@ +Improve type hints. diff --git a/mypy.ini b/mypy.ini index 56cd1d560ea8..1038b7d8c7a4 100644 --- a/mypy.ini +++ b/mypy.ini @@ -60,9 +60,6 @@ ignore_missing_imports = True [mypy-ijson.*] ignore_missing_imports = True -[mypy-lxml] -ignore_missing_imports = True - # https://github.com/msgpack/msgpack-python/issues/448 [mypy-msgpack] ignore_missing_imports = True diff --git a/poetry.lock b/poetry.lock index 0879e64cf176..d8964f5719de 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "alabaster" @@ -1215,6 +1215,21 @@ html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.7)"] +[[package]] +name = "lxml-stubs" +version = "0.4.0" +description = "Type annotations for the lxml package" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "lxml-stubs-0.4.0.tar.gz", hash = "sha256:184877b42127256abc2b932ba8bd0ab5ea80bd0b0fee618d16daa40e0b71abee"}, + {file = "lxml_stubs-0.4.0-py3-none-any.whl", hash = "sha256:3b381e9e82397c64ea3cc4d6f79d1255d015f7b114806d4826218805c10ec003"}, +] + +[package.extras] +test = ["coverage[toml] (==5.2)", "pytest (>=6.0.0)", "pytest-mypy-plugins (==1.9.3)"] + [[package]] name = "markdown-it-py" version = "2.2.0" @@ -3409,22 +3424,22 @@ docs = ["Sphinx", "repoze.sphinx.autointerface"] test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"] [extras] -all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler", "pyicu"] +all = ["Pympler", "authlib", "hiredis", "jaeger-client", "lxml", "matrix-synapse-ldap3", "opentracing", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pyicu", "pysaml2", "sentry-sdk", "txredisapi"] cache-memory = ["Pympler"] jwt = ["authlib"] matrix-synapse-ldap3 = ["matrix-synapse-ldap3"] oidc = ["authlib"] opentracing = ["jaeger-client", "opentracing"] postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"] -redis = ["txredisapi", "hiredis"] +redis = ["hiredis", "txredisapi"] saml2 = ["pysaml2"] sentry = ["sentry-sdk"] systemd = ["systemd-python"] -test = ["parameterized", "idna"] +test = ["idna", "parameterized"] url-preview = ["lxml"] user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.7.1" -content-hash = "ef3a16dd66177f7141239e1a2d3e07cc14c08f1e4e0c5127184d022bc062da52" +content-hash = "7ad11e62a675e09444cf33ca2de3216fc4efc5874a2575e54d95d577a52439d3" diff --git a/pyproject.toml b/pyproject.toml index 7227bc752316..4476f57ca74f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -314,6 +314,7 @@ black = ">=22.3.0" ruff = "0.0.265" # Typechecking +lxml-stubs = ">=0.4.0" mypy = "*" mypy-zope = "*" types-bleach = ">=4.1.0" diff --git a/synapse/media/oembed.py b/synapse/media/oembed.py index c0eaf04be544..5ad9eec80b97 100644 --- a/synapse/media/oembed.py +++ b/synapse/media/oembed.py @@ -14,7 +14,7 @@ import html import logging import urllib.parse -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, List, Optional, cast import attr @@ -98,7 +98,7 @@ def get_oembed_url(self, url: str) -> Optional[str]: # No match. return None - def autodiscover_from_html(self, tree: "etree.Element") -> Optional[str]: + def autodiscover_from_html(self, tree: "etree._Element") -> Optional[str]: """ Search an HTML document for oEmbed autodiscovery information. @@ -109,18 +109,22 @@ def autodiscover_from_html(self, tree: "etree.Element") -> Optional[str]: The URL to use for oEmbed information, or None if no URL was found. """ # Search for link elements with the proper rel and type attributes. - for tag in tree.xpath( - "//link[@rel='alternate'][@type='application/json+oembed']" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + for tag in cast( + List["etree._Element"], + tree.xpath("//link[@rel='alternate'][@type='application/json+oembed']"), ): if "href" in tag.attrib: - return tag.attrib["href"] + return cast(str, tag.attrib["href"]) # Some providers (e.g. Flickr) use alternative instead of alternate. - for tag in tree.xpath( - "//link[@rel='alternative'][@type='application/json+oembed']" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + for tag in cast( + List["etree._Element"], + tree.xpath("//link[@rel='alternative'][@type='application/json+oembed']"), ): if "href" in tag.attrib: - return tag.attrib["href"] + return cast(str, tag.attrib["href"]) return None @@ -212,11 +216,12 @@ def parse_oembed_response(self, url: str, raw_body: bytes) -> OEmbedResult: return OEmbedResult(open_graph_response, author_name, cache_age) -def _fetch_urls(tree: "etree.Element", tag_name: str) -> List[str]: +def _fetch_urls(tree: "etree._Element", tag_name: str) -> List[str]: results = [] - for tag in tree.xpath("//*/" + tag_name): + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + for tag in cast(List["etree._Element"], tree.xpath("//*/" + tag_name)): if "src" in tag.attrib: - results.append(tag.attrib["src"]) + results.append(cast(str, tag.attrib["src"])) return results @@ -244,11 +249,12 @@ def calc_description_and_urls(open_graph_response: JsonDict, html_body: str) -> parser = etree.HTMLParser(recover=True, encoding="utf-8") # Attempt to parse the body. If this fails, log and return no metadata. - tree = etree.fromstring(html_body, parser) + # TODO Develop of lxml-stubs has this correct. + tree = etree.fromstring(html_body, parser) # type: ignore[arg-type] # The data was successfully parsed, but no tree was found. if tree is None: - return + return # type: ignore[unreachable] # Attempt to find interesting URLs (images, videos, embeds). if "og:image" not in open_graph_response: diff --git a/synapse/media/preview_html.py b/synapse/media/preview_html.py index 516d0434f095..1bc7ccb7f3b5 100644 --- a/synapse/media/preview_html.py +++ b/synapse/media/preview_html.py @@ -24,6 +24,7 @@ Optional, Set, Union, + cast, ) if TYPE_CHECKING: @@ -115,7 +116,7 @@ def _get_html_media_encodings( def decode_body( body: bytes, uri: str, content_type: Optional[str] = None -) -> Optional["etree.Element"]: +) -> Optional["etree._Element"]: """ This uses lxml to parse the HTML document. @@ -152,11 +153,12 @@ def decode_body( # Attempt to parse the body. Returns None if the body was successfully # parsed, but no tree was found. - return etree.fromstring(body, parser) + # TODO Develop of lxml-stubs has this correct. + return etree.fromstring(body, parser) # type: ignore[arg-type] def _get_meta_tags( - tree: "etree.Element", + tree: "etree._Element", property: str, prefix: str, property_mapper: Optional[Callable[[str], Optional[str]]] = None, @@ -175,9 +177,15 @@ def _get_meta_tags( Returns: A map of tag name to value. """ + # This actually returns Dict[str, str], but the caller sets this as a variable + # which is Dict[str, Optional[str]]. results: Dict[str, Optional[str]] = {} - for tag in tree.xpath( - f"//*/meta[starts-with(@{property}, '{prefix}:')][@content][not(@content='')]" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + for tag in cast( + List["etree._Element"], + tree.xpath( + f"//*/meta[starts-with(@{property}, '{prefix}:')][@content][not(@content='')]" + ), ): # if we've got more than 50 tags, someone is taking the piss if len(results) >= 50: @@ -187,14 +195,15 @@ def _get_meta_tags( ) return {} - key = tag.attrib[property] + key = cast(str, tag.attrib[property]) if property_mapper: - key = property_mapper(key) + new_key = property_mapper(key) # None is a special value used to ignore a value. - if key is None: + if new_key is None: continue + key = new_key - results[key] = tag.attrib["content"] + results[key] = cast(str, tag.attrib["content"]) return results @@ -219,7 +228,7 @@ def _map_twitter_to_open_graph(key: str) -> Optional[str]: return "og" + key[7:] -def parse_html_to_open_graph(tree: "etree.Element") -> Dict[str, Optional[str]]: +def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]]: """ Parse the HTML document into an Open Graph response. @@ -276,24 +285,36 @@ def parse_html_to_open_graph(tree: "etree.Element") -> Dict[str, Optional[str]]: if "og:title" not in og: # Attempt to find a title from the title tag, or the biggest header on the page. - title = tree.xpath("((//title)[1] | (//h1)[1] | (//h2)[1] | (//h3)[1])/text()") + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + title = cast( + List["etree._ElementUnicodeResult"], + tree.xpath("((//title)[1] | (//h1)[1] | (//h2)[1] | (//h3)[1])/text()"), + ) if title: og["og:title"] = title[0].strip() else: og["og:title"] = None if "og:image" not in og: - meta_image = tree.xpath( - "//*/meta[translate(@itemprop, 'IMAGE', 'image')='image'][not(@content='')]/@content[1]" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + meta_image = cast( + List["etree._ElementUnicodeResult"], + tree.xpath( + "//*/meta[translate(@itemprop, 'IMAGE', 'image')='image'][not(@content='')]/@content[1]" + ), ) # If a meta image is found, use it. if meta_image: og["og:image"] = meta_image[0] else: # Try to find images which are larger than 10px by 10px. + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. # # TODO: consider inlined CSS styles as well as width & height attribs - images = tree.xpath("//img[@src][number(@width)>10][number(@height)>10]") + images = cast( + List["etree._Element"], + tree.xpath("//img[@src][number(@width)>10][number(@height)>10]"), + ) images = sorted( images, key=lambda i: ( @@ -302,20 +323,29 @@ def parse_html_to_open_graph(tree: "etree.Element") -> Dict[str, Optional[str]]: ) # If no images were found, try to find *any* images. if not images: - images = tree.xpath("//img[@src][1]") + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + images = cast(List["etree._Element"], tree.xpath("//img[@src][1]")) if images: - og["og:image"] = images[0].attrib["src"] + og["og:image"] = cast(str, images[0].attrib["src"]) # Finally, fallback to the favicon if nothing else. else: - favicons = tree.xpath("//link[@href][contains(@rel, 'icon')]/@href[1]") + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + favicons = cast( + List["etree._ElementUnicodeResult"], + tree.xpath("//link[@href][contains(@rel, 'icon')]/@href[1]"), + ) if favicons: og["og:image"] = favicons[0] if "og:description" not in og: # Check the first meta description tag for content. - meta_description = tree.xpath( - "//*/meta[translate(@name, 'DESCRIPTION', 'description')='description'][not(@content='')]/@content[1]" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + meta_description = cast( + List["etree._ElementUnicodeResult"], + tree.xpath( + "//*/meta[translate(@name, 'DESCRIPTION', 'description')='description'][not(@content='')]/@content[1]" + ), ) # If a meta description is found with content, use it. if meta_description: @@ -332,7 +362,7 @@ def parse_html_to_open_graph(tree: "etree.Element") -> Dict[str, Optional[str]]: return og -def parse_html_description(tree: "etree.Element") -> Optional[str]: +def parse_html_description(tree: "etree._Element") -> Optional[str]: """ Calculate a text description based on an HTML document. @@ -368,6 +398,9 @@ def parse_html_description(tree: "etree.Element") -> Optional[str]: "canvas", "img", "picture", + # etree.Comment is a function which creates an etree._Comment element. + # The "tag" attribute of an etree._Comment instance is confusingly the + # etree.Comment function instead of a string. etree.Comment, } @@ -381,8 +414,8 @@ def parse_html_description(tree: "etree.Element") -> Optional[str]: def _iterate_over_text( - tree: Optional["etree.Element"], - tags_to_ignore: Set[Union[str, "etree.Comment"]], + tree: Optional["etree._Element"], + tags_to_ignore: Set[object], stack_limit: int = 1024, ) -> Generator[str, None, None]: """Iterate over the tree returning text nodes in a depth first fashion, @@ -402,7 +435,7 @@ def _iterate_over_text( # This is a stack whose items are elements to iterate over *or* strings # to be returned. - elements: List[Union[str, "etree.Element"]] = [tree] + elements: List[Union[str, "etree._Element"]] = [tree] while elements: el = elements.pop() diff --git a/tests/media/test_html_preview.py b/tests/media/test_html_preview.py index e7da75db3ee0..ea84bb3d3d53 100644 --- a/tests/media/test_html_preview.py +++ b/tests/media/test_html_preview.py @@ -24,7 +24,7 @@ try: import lxml except ImportError: - lxml = None + lxml = None # type: ignore[assignment] class SummarizeTestCase(unittest.TestCase): @@ -160,6 +160,7 @@ def test_simple(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -176,6 +177,7 @@ def test_comment(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -195,6 +197,7 @@ def test_comment2(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual( @@ -217,6 +220,7 @@ def test_script(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -231,6 +235,7 @@ def test_missing_title(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": None, "og:description": "Some text."}) @@ -246,6 +251,7 @@ def test_missing_title(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Title", "og:description": "Title"}) @@ -261,6 +267,7 @@ def test_h1_as_title(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Title", "og:description": "Some text."}) @@ -281,6 +288,7 @@ def test_empty_description(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Title", "og:description": "Finally!"}) @@ -296,6 +304,7 @@ def test_missing_title_and_broken_h1(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": None, "og:description": "Some text."}) @@ -324,6 +333,7 @@ def test_xml(self) -> None: FooSome text. """.strip() tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -338,6 +348,7 @@ def test_invalid_encoding(self) -> None: """ tree = decode_body(html, "http://example.com/test.html", "invalid-encoding") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -353,6 +364,7 @@ def test_invalid_encoding2(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "ÿÿ Foo", "og:description": "Some text."}) @@ -367,6 +379,7 @@ def test_windows_1252(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "ó", "og:description": "Some text."}) @@ -380,6 +393,7 @@ def test_twitter_tag(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual( og, @@ -401,6 +415,7 @@ def test_twitter_tag(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual( og, @@ -419,6 +434,7 @@ def test_nested_nodes(self) -> None: with a cheeky SVG and some tail text """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual( og, diff --git a/tests/media/test_oembed.py b/tests/media/test_oembed.py index c8bf8421daf1..3bc19cb1ccd0 100644 --- a/tests/media/test_oembed.py +++ b/tests/media/test_oembed.py @@ -28,7 +28,7 @@ try: import lxml except ImportError: - lxml = None + lxml = None # type: ignore[assignment] class OEmbedTests(HomeserverTestCase): diff --git a/tests/media/test_url_previewer.py b/tests/media/test_url_previewer.py index 3c4c7d676520..46ecde534441 100644 --- a/tests/media/test_url_previewer.py +++ b/tests/media/test_url_previewer.py @@ -24,7 +24,7 @@ try: import lxml except ImportError: - lxml = None + lxml = None # type: ignore[assignment] class URLPreviewTests(unittest.HomeserverTestCase): diff --git a/tests/rest/media/test_url_preview.py b/tests/rest/media/test_url_preview.py index 170fb0534ad5..05d5e39cabd4 100644 --- a/tests/rest/media/test_url_preview.py +++ b/tests/rest/media/test_url_preview.py @@ -40,7 +40,7 @@ try: import lxml except ImportError: - lxml = None + lxml = None # type: ignore[assignment] class URLPreviewTests(unittest.HomeserverTestCase): From 0b5f64ff09d44338d2514cbdba80aa4a4f11d1aa Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 31 May 2023 14:35:49 -0500 Subject: [PATCH 030/153] Add Synapse version deploy annotations to Grafana dashboard (#15674) Fix https://github.com/matrix-org/synapse/issues/15662 This manifests as purple lines that show up on all time series panels that you can hover and see what version was deployed. Also added a new "Deployed Synapse versions over time" panel where the color block changes with each version. And mixed this color block into the "Up" time series panel. To get the Grafana dashboard JSON to copy here: use the **Share** icon at the top -> **Export** -> check the **Export for sharing externally** option -> **View JSON** or **Save to file** --- changelog.d/15674.feature | 1 + contrib/grafana/synapse.json | 1240 +++++++++++++++++++++++++++------- 2 files changed, 981 insertions(+), 260 deletions(-) create mode 100644 changelog.d/15674.feature diff --git a/changelog.d/15674.feature b/changelog.d/15674.feature new file mode 100644 index 000000000000..68cf207dc0f6 --- /dev/null +++ b/changelog.d/15674.feature @@ -0,0 +1 @@ +Add Syanpse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index f09cd6f87c28..f3253b32b929 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -56,6 +56,17 @@ "name": "Annotations & Alerts", "showIn": 0, "type": "dashboard" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "enable": true, + "expr": "changes(process_start_time_seconds{instance=\"matrix.org\",job=~\"synapse\"}[$bucket_size]) * on (instance, job) group_left(version) synapse_build_info{instance=\"matrix.org\",job=\"synapse\"}", + "iconColor": "purple", + "name": "deploys", + "titleFormat": "Deployed {{version}}" } ] }, @@ -670,6 +681,95 @@ "align": false } }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "axisSoftMax": 1, + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 10, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 19 + }, + "id": 245, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "synapse_build_info{instance=\"$instance\", job=\"synapse\"} - 1", + "legendFormat": "version {{version}}", + "range": true, + "refId": "deployed_synapse_versions" + } + ], + "title": "Deployed Synapse versions over time", + "type": "timeseries" + }, { "aliasColors": {}, "bars": false, @@ -809,6 +909,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "editable": true, @@ -874,11 +975,13 @@ "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "rate(process_cpu_system_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", "format": "time_series", "intervalFactor": 1, "legendFormat": "{{job}}-{{index}} system ", "metric": "", + "range": true, "refId": "B", "step": 20 }, @@ -1328,6 +1431,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "fieldConfig": { @@ -1368,7 +1472,15 @@ "pointradius": 5, "points": false, "renderer": "flot", - "seriesOverrides": [], + "seriesOverrides": [ + { + "$$hashKey": "object:116", + "alias": "/^version .*/", + "lines": true, + "linewidth": 6, + "points": false + } + ], "spaceLength": 10, "stack": false, "steppedLine": false, @@ -1377,11 +1489,25 @@ "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "min_over_time(up{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", "format": "time_series", "intervalFactor": 2, "legendFormat": "{{job}}-{{index}}", + "range": true, "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "$datasource" + }, + "editorMode": "code", + "expr": "synapse_build_info{instance=\"$instance\", job=\"synapse\"} - 1", + "hide": false, + "legendFormat": "version {{version}}", + "range": true, + "refId": "deployed_synapse_versions" } ], "thresholds": [], @@ -1788,7 +1914,7 @@ "h": 9, "w": 12, "x": 0, - "y": 56 + "y": 28 }, "heatmap": {}, "hideZeroBuckets": false, @@ -1890,7 +2016,7 @@ "h": 9, "w": 12, "x": 12, - "y": 56 + "y": 28 }, "hiddenSeries": false, "id": 33, @@ -1982,7 +2108,7 @@ "h": 7, "w": 12, "x": 0, - "y": 65 + "y": 37 }, "hiddenSeries": false, "id": 40, @@ -2070,7 +2196,7 @@ "h": 7, "w": 12, "x": 12, - "y": 65 + "y": 37 }, "hiddenSeries": false, "id": 46, @@ -2161,7 +2287,7 @@ "h": 7, "w": 12, "x": 0, - "y": 72 + "y": 44 }, "hiddenSeries": false, "id": 44, @@ -2253,7 +2379,7 @@ "h": 7, "w": 12, "x": 12, - "y": 72 + "y": 44 }, "hiddenSeries": false, "id": 45, @@ -2354,7 +2480,7 @@ "h": 9, "w": 12, "x": 0, - "y": 79 + "y": 51 }, "hiddenSeries": false, "id": 118, @@ -2547,7 +2673,7 @@ "h": 9, "w": 12, "x": 12, - "y": 79 + "y": 51 }, "id": 222, "options": { @@ -2646,7 +2772,7 @@ "h": 8, "w": 12, "x": 0, - "y": 57 + "y": 29 }, "hiddenSeries": false, "id": 4, @@ -2768,7 +2894,7 @@ "h": 8, "w": 12, "x": 12, - "y": 57 + "y": 29 }, "hiddenSeries": false, "id": 32, @@ -2867,7 +2993,7 @@ "h": 8, "w": 12, "x": 0, - "y": 65 + "y": 37 }, "hiddenSeries": false, "id": 139, @@ -2989,7 +3115,7 @@ "h": 8, "w": 12, "x": 12, - "y": 65 + "y": 37 }, "hiddenSeries": false, "id": 52, @@ -3111,7 +3237,7 @@ "h": 8, "w": 12, "x": 0, - "y": 73 + "y": 45 }, "hiddenSeries": false, "id": 7, @@ -3212,7 +3338,7 @@ "h": 8, "w": 12, "x": 12, - "y": 73 + "y": 45 }, "hiddenSeries": false, "id": 47, @@ -3310,7 +3436,7 @@ "h": 9, "w": 12, "x": 0, - "y": 81 + "y": 53 }, "hiddenSeries": false, "id": 103, @@ -3445,7 +3571,7 @@ "h": 9, "w": 12, "x": 0, - "y": 5 + "y": 30 }, "hiddenSeries": false, "id": 99, @@ -3467,7 +3593,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "8.4.3", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -3538,7 +3664,7 @@ "h": 9, "w": 12, "x": 12, - "y": 5 + "y": 30 }, "hiddenSeries": false, "id": 101, @@ -3560,7 +3686,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "8.4.3", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -3631,7 +3757,7 @@ "h": 8, "w": 12, "x": 0, - "y": 14 + "y": 39 }, "hiddenSeries": false, "id": 138, @@ -3651,7 +3777,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "8.4.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -3746,7 +3872,7 @@ "h": 9, "w": 12, "x": 0, - "y": 59 + "y": 31 }, "hiddenSeries": false, "id": 79, @@ -3846,7 +3972,7 @@ "h": 9, "w": 12, "x": 12, - "y": 59 + "y": 31 }, "hiddenSeries": false, "id": 83, @@ -3934,6 +4060,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "fieldConfig": { @@ -3948,7 +4075,7 @@ "h": 9, "w": 12, "x": 0, - "y": 68 + "y": 40 }, "hiddenSeries": false, "id": 109, @@ -3983,11 +4110,13 @@ "datasource": { "uid": "$datasource" }, - "expr": "sum(rate(synapse_federation_client_sent_pdu_destinations:total_total{instance=\"$instance\"}[$bucket_size]))", + "editorMode": "code", + "expr": "sum(rate(synapse_federation_client_sent_pdu_destinations_count_total{instance=\"$instance\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 1, "legendFormat": "pdus", + "range": true, "refId": "A" }, { @@ -4052,7 +4181,7 @@ "h": 9, "w": 12, "x": 12, - "y": 68 + "y": 40 }, "hiddenSeries": false, "id": 111, @@ -4130,119 +4259,363 @@ } }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "$datasource" + "uid": "${DS_PROMETHEUS}" }, - "description": "The number of events in the in-memory queues ", + "description": "Triangular growth may indicate a problem with federation sending from the remote host --- but it may also be the case that everyone is asleep and no messages are being sent.\n\nSee https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#federation_metrics_domains", "fieldConfig": { "defaults": { - "links": [] + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "axisSoftMax": 60, + "axisSoftMin": 0, + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 60 + } + ] + }, + "unit": "m" }, - "overrides": [] + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "libera.chat " + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, - "y": 77 - }, - "hiddenSeries": false, - "id": 142, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false + "y": 49 }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", + "id": 243, "options": { - "alertThreshold": true + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } }, - "percentage": false, - "pluginVersion": "9.2.2", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, "targets": [ { "datasource": { "type": "prometheus", - "uid": "$datasource" + "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "synapse_federation_transaction_queue_pending_pdus{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", - "interval": "", - "legendFormat": "pending PDUs {{job}}-{{index}}", + "exemplar": false, + "expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60", + "instant": false, + "legendFormat": "{{server_name}} ", "range": true, "refId": "A" - }, - { - "datasource": { - "type": "prometheus", - "uid": "$datasource" - }, - "expr": "synapse_federation_transaction_queue_pending_edus{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", - "interval": "", - "legendFormat": "pending EDUs {{job}}-{{index}}", - "refId": "B" - } - ], - "thresholds": [], - "timeRegions": [], - "title": "In-memory federation transmission queues", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:547", - "format": "short", - "label": "events", - "logBase": 1, - "min": "0", - "show": true - }, - { - "$$hashKey": "object:548", - "format": "short", - "label": "", - "logBase": 1, - "show": true } ], - "yaxis": { - "align": false - } + "title": "Age of last PDU received from nominated hosts", + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "description": "Triangular growth may indicate a problem with federation senders on the monitored instance---but it may also be the case that everyone is asleep and no messages are being sent.\n\nSee https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#federation_metrics_domains", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "axisSoftMax": 60, + "axisSoftMin": 0, + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 60 + } + ] + }, + "unit": "m" + }, + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "libera.chat" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 49 + }, + "id": 241, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "exemplar": false, + "expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60", + "instant": false, + "legendFormat": "{{server_name}}", + "range": true, + "refId": "A" + } + ], + "title": "Age of last PDU sent to nominated hosts", + "type": "timeseries" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": { + "type": "prometheus", + "uid": "$datasource" + }, + "description": "The number of events in the in-memory queues ", + "fieldConfig": { + "defaults": { + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 57 + }, + "hiddenSeries": false, + "id": 142, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "9.2.2", + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "$datasource" + }, + "editorMode": "code", + "expr": "synapse_federation_transaction_queue_pending_pdus{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "interval": "", + "legendFormat": "pending PDUs {{job}}-{{index}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "$datasource" + }, + "expr": "synapse_federation_transaction_queue_pending_edus{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "interval": "", + "legendFormat": "pending EDUs {{job}}-{{index}}", + "refId": "B" + } + ], + "thresholds": [], + "timeRegions": [], + "title": "In-memory federation transmission queues", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "mode": "time", + "show": true, + "values": [] + }, + "yaxes": [ + { + "$$hashKey": "object:547", + "format": "short", + "label": "events", + "logBase": 1, + "min": "0", + "show": true + }, + { + "$$hashKey": "object:548", + "format": "short", + "label": "", + "logBase": 1, + "show": true + } + ], + "yaxis": { + "align": false + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, "datasource": { "uid": "$datasource" }, @@ -4259,7 +4632,7 @@ "h": 9, "w": 12, "x": 12, - "y": 77 + "y": 57 }, "hiddenSeries": false, "id": 140, @@ -4428,7 +4801,7 @@ "h": 9, "w": 12, "x": 0, - "y": 85 + "y": 66 }, "heatmap": {}, "hideZeroBuckets": false, @@ -4533,7 +4906,7 @@ "h": 9, "w": 12, "x": 12, - "y": 86 + "y": 66 }, "hiddenSeries": false, "id": 162, @@ -4745,11 +5118,26 @@ "datasource": { "uid": "$datasource" }, + "fieldConfig": { + "defaults": { + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } + }, + "overrides": [] + }, "gridPos": { "h": 9, "w": 12, "x": 0, - "y": 94 + "y": 75 }, "heatmap": {}, "hideZeroBuckets": false, @@ -4759,6 +5147,48 @@ "show": false }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": -1, + "cellValues": { + "decimals": 2 + }, + "color": { + "exponent": 0.5, + "fill": "#b4ff00", + "min": 0, + "mode": "scheme", + "reverse": false, + "scale": "exponential", + "scheme": "Inferno", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": false + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "s" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -4798,6 +5228,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "editable": true, @@ -4815,7 +5246,7 @@ "h": 9, "w": 12, "x": 12, - "y": 95 + "y": 75 }, "hiddenSeries": false, "id": 203, @@ -4837,7 +5268,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -4850,11 +5281,13 @@ "datasource": { "uid": "$datasource" }, - "expr": "synapse_federation_server_oldest_inbound_pdu_in_staging{job=\"$job\",index=~\"$index\",instance=\"$instance\"}", + "editorMode": "code", + "expr": "synapse_federation_server_oldest_inbound_pdu_in_staging{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}", "format": "time_series", "interval": "", "intervalFactor": 1, - "legendFormat": "rss {{index}}", + "legendFormat": "{{job}}-{{index}}", + "range": true, "refId": "A", "step": 4 } @@ -4899,6 +5332,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "editable": true, @@ -4916,7 +5350,7 @@ "h": 9, "w": 12, "x": 0, - "y": 103 + "y": 84 }, "hiddenSeries": false, "id": 202, @@ -4938,7 +5372,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -4951,11 +5385,13 @@ "datasource": { "uid": "$datasource" }, - "expr": "synapse_federation_server_number_inbound_pdu_in_staging{job=\"$job\",index=~\"$index\",instance=\"$instance\"}", + "editorMode": "code", + "expr": "synapse_federation_server_number_inbound_pdu_in_staging{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}", "format": "time_series", "interval": "", "intervalFactor": 1, - "legendFormat": "rss {{index}}", + "legendFormat": "{{job}}-{{index}}", + "range": true, "refId": "A", "step": 4 } @@ -5009,7 +5445,7 @@ "h": 8, "w": 12, "x": 12, - "y": 104 + "y": 84 }, "hiddenSeries": false, "id": 205, @@ -5029,7 +5465,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -5115,6 +5551,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5162,7 +5600,7 @@ "h": 8, "w": 12, "x": 0, - "y": 1 + "y": 154 }, "id": 239, "options": { @@ -5201,6 +5639,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5248,7 +5688,7 @@ "h": 8, "w": 12, "x": 12, - "y": 1 + "y": 154 }, "id": 235, "options": { @@ -5288,6 +5728,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5335,7 +5777,7 @@ "h": 8, "w": 12, "x": 0, - "y": 9 + "y": 162 }, "id": 237, "options": { @@ -5376,6 +5818,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5423,7 +5867,7 @@ "h": 8, "w": 12, "x": 12, - "y": 9 + "y": 162 }, "id": 233, "options": { @@ -5474,7 +5918,7 @@ "h": 8, "w": 12, "x": 0, - "y": 17 + "y": 170 }, "hiddenSeries": false, "id": 229, @@ -5497,7 +5941,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -5709,6 +6153,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5773,7 +6219,7 @@ "h": 8, "w": 12, "x": 12, - "y": 17 + "y": 170 }, "id": 231, "options": { @@ -5832,65 +6278,96 @@ "id": 60, "panels": [ { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "fieldConfig": { "defaults": { - "links": [] + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "links": [], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "hertz" }, "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, - "y": 32 + "y": 155 }, - "hiddenSeries": false, "id": 51, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, "links": [], - "nullPointMode": "null", "options": { - "alertThreshold": true + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } }, - "paceLength": 10, - "percentage": false, - "pluginVersion": "8.4.3", - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "pluginVersion": "9.2.2", "targets": [ { "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "rate(synapse_http_httppusher_http_pushes_processed_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) and on (instance, job, index) (synapse_http_httppusher_http_pushes_failed_total + synapse_http_httppusher_http_pushes_processed_total) > 0", "format": "time_series", "interval": "", "intervalFactor": 2, - "legendFormat": "processed {{job}}", + "legendFormat": "processed {{job}}-{{index}}", + "range": true, "refId": "A", "step": 20 }, @@ -5898,43 +6375,18 @@ "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "rate(synapse_http_httppusher_http_pushes_failed_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) and on (instance, job, index) (synapse_http_httppusher_http_pushes_failed_total + synapse_http_httppusher_http_pushes_processed_total) > 0", "format": "time_series", "intervalFactor": 2, - "legendFormat": "failed {{job}}", + "legendFormat": "failed {{job}}-{{index}}", + "range": true, "refId": "B", "step": 20 } ], - "thresholds": [], - "timeRegions": [], "title": "HTTP Push rate", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "hertz", - "logBase": 1, - "show": true - }, - { - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { "aliasColors": {}, @@ -5957,7 +6409,7 @@ "h": 8, "w": 12, "x": 12, - "y": 32 + "y": 155 }, "hiddenSeries": false, "id": 134, @@ -5978,7 +6430,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "8.4.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -7344,7 +7796,7 @@ "h": 13, "w": 12, "x": 0, - "y": 35 + "y": 158 }, "hiddenSeries": false, "id": 12, @@ -7367,7 +7819,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7442,7 +7894,7 @@ "h": 13, "w": 12, "x": 12, - "y": 35 + "y": 158 }, "hiddenSeries": false, "id": 26, @@ -7465,7 +7917,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7541,7 +7993,7 @@ "h": 13, "w": 12, "x": 0, - "y": 48 + "y": 171 }, "hiddenSeries": false, "id": 13, @@ -7564,7 +8016,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7645,7 +8097,7 @@ "h": 13, "w": 12, "x": 12, - "y": 48 + "y": 171 }, "hiddenSeries": false, "id": 27, @@ -7668,7 +8120,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7743,7 +8195,7 @@ "h": 13, "w": 12, "x": 0, - "y": 61 + "y": 184 }, "hiddenSeries": false, "id": 28, @@ -7765,7 +8217,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7840,7 +8292,7 @@ "h": 13, "w": 12, "x": 12, - "y": 61 + "y": 184 }, "hiddenSeries": false, "id": 25, @@ -7862,7 +8314,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7930,7 +8382,7 @@ "h": 15, "w": 12, "x": 0, - "y": 74 + "y": 197 }, "hiddenSeries": false, "id": 154, @@ -7951,7 +8403,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -9363,7 +9815,7 @@ "h": 7, "w": 12, "x": 0, - "y": 40 + "y": 162 }, "hiddenSeries": false, "id": 43, @@ -9385,7 +9837,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -9449,6 +9901,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -9498,7 +9952,7 @@ "h": 7, "w": 12, "x": 12, - "y": 40 + "y": 162 }, "id": 41, "links": [], @@ -9545,6 +9999,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -9595,7 +10051,7 @@ "h": 7, "w": 12, "x": 0, - "y": 47 + "y": 169 }, "id": 42, "links": [], @@ -9642,6 +10098,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "axisSoftMin": 1, @@ -9693,7 +10151,7 @@ "h": 7, "w": 12, "x": 12, - "y": 47 + "y": 169 }, "id": 220, "links": [], @@ -9751,7 +10209,7 @@ "h": 7, "w": 12, "x": 0, - "y": 54 + "y": 176 }, "hiddenSeries": false, "id": 144, @@ -9771,7 +10229,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -9844,7 +10302,7 @@ "h": 7, "w": 12, "x": 12, - "y": 54 + "y": 176 }, "hiddenSeries": false, "id": 115, @@ -9866,7 +10324,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -9938,7 +10396,7 @@ "h": 7, "w": 12, "x": 0, - "y": 61 + "y": 183 }, "hiddenSeries": false, "id": 113, @@ -9960,7 +10418,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -10058,7 +10516,6 @@ }, "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10069,7 +10526,7 @@ "h": 9, "w": 12, "x": 0, - "y": 41 + "y": 163 }, "hiddenSeries": false, "id": 67, @@ -10091,7 +10548,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "7.3.7", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -10154,7 +10611,6 @@ }, "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10165,7 +10621,7 @@ "h": 9, "w": 12, "x": 12, - "y": 41 + "y": 163 }, "hiddenSeries": false, "id": 71, @@ -10187,7 +10643,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "7.3.7", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -10250,7 +10706,6 @@ }, "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10261,7 +10716,7 @@ "h": 9, "w": 12, "x": 0, - "y": 50 + "y": 172 }, "hiddenSeries": false, "id": 121, @@ -10284,7 +10739,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "7.3.7", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -10383,7 +10838,16 @@ "description": "Colour reflects the number of rooms with the given number of forward extremities, or fewer.\n\nThis is only updated once an hour.", "fieldConfig": { "defaults": { - "custom": {} + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } }, "overrides": [] }, @@ -10400,7 +10864,47 @@ "legend": { "show": true }, - "links": [], + "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": 1, + "cellValues": {}, + "color": { + "exponent": 0.5, + "fill": "#B877D9", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "short" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -10442,7 +10946,6 @@ "description": "Number of rooms with the given number of forward extremities or fewer.\n\nThis is only updated once an hour.", "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10471,8 +10974,11 @@ "linewidth": 1, "links": [], "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -10543,7 +11049,16 @@ "description": "Colour reflects the number of events persisted to rooms with the given number of forward extremities, or fewer.", "fieldConfig": { "defaults": { - "custom": {} + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } }, "overrides": [] }, @@ -10561,6 +11076,46 @@ "show": true }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": 1, + "cellValues": {}, + "color": { + "exponent": 0.5, + "fill": "#5794F2", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "short" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -10602,7 +11157,6 @@ "description": "For a given percentage P, the number X where P% of events were persisted to rooms with X forward extremities or fewer.", "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10630,8 +11184,11 @@ "linewidth": 1, "links": [], "nullPointMode": "null", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -10732,7 +11289,16 @@ "description": "Colour reflects the number of events persisted to rooms with the given number of stale forward extremities, or fewer.\n\nStale forward extremities are those that were in the previous set of extremities as well as the new.", "fieldConfig": { "defaults": { - "custom": {} + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } }, "overrides": [] }, @@ -10750,6 +11316,46 @@ "show": true }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": 1, + "cellValues": {}, + "color": { + "exponent": 0.5, + "fill": "#FF9830", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "short" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -10791,7 +11397,6 @@ "description": "For given percentage P, the number X where P% of events were persisted to rooms with X stale forward extremities or fewer.\n\nStale forward extremities are those that were in the previous set of extremities as well as the new.", "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10819,8 +11424,11 @@ "linewidth": 1, "links": [], "nullPointMode": "null", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -10921,7 +11529,16 @@ "description": "Colour reflects the number of state resolution operations performed over the given number of state groups, or fewer.", "fieldConfig": { "defaults": { - "custom": {} + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } }, "overrides": [] }, @@ -10939,6 +11556,46 @@ "show": true }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": 1, + "cellValues": {}, + "color": { + "exponent": 0.5, + "fill": "#73BF69", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "short" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -10976,12 +11633,12 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "description": "For a given percentage P, the number X where P% of state resolution operations took place over X state groups or fewer.", "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -11010,8 +11667,11 @@ "linewidth": 1, "links": [], "nullPointMode": "null", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -11024,11 +11684,13 @@ "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "histogram_quantile(0.5, rate(synapse_state_number_state_groups_in_resolution_bucket{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 1, "legendFormat": "50%", + "range": true, "refId": "A" }, { @@ -11106,12 +11768,6 @@ "uid": "$datasource" }, "description": "When we do a state res while persisting events we try and see if we can prune any stale extremities.", - "fieldConfig": { - "defaults": { - "custom": {} - }, - "overrides": [] - }, "fill": 1, "fillGradient": 0, "gridPos": { @@ -11134,8 +11790,11 @@ "lines": true, "linewidth": 1, "nullPointMode": "null", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -12218,6 +12877,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -12266,7 +12927,7 @@ "h": 8, "w": 12, "x": 0, - "y": 46 + "y": 47 }, "id": 191, "options": { @@ -12314,7 +12975,7 @@ "h": 8, "w": 12, "x": 12, - "y": 46 + "y": 47 }, "hiddenSeries": false, "id": 193, @@ -12334,7 +12995,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -12404,11 +13065,26 @@ "type": "prometheus", "uid": "$datasource" }, + "fieldConfig": { + "defaults": { + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } + }, + "overrides": [] + }, "gridPos": { "h": 8, "w": 12, "x": 0, - "y": 54 + "y": 55 }, "heatmap": {}, "hideZeroBuckets": false, @@ -12418,6 +13094,48 @@ "show": false }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": -1, + "cellValues": { + "decimals": 2 + }, + "color": { + "exponent": 0.5, + "fill": "#b4ff00", + "min": 0, + "mode": "scheme", + "reverse": false, + "scale": "exponential", + "scheme": "Inferno", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": false + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "s" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -12463,6 +13181,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -12507,7 +13227,7 @@ "h": 8, "w": 12, "x": 12, - "y": 54 + "y": 55 }, "id": 223, "options": { @@ -12757,6 +13477,6 @@ "timezone": "", "title": "Synapse", "uid": "000000012", - "version": 150, + "version": 160, "weekStart": "" -} \ No newline at end of file +} From d1693f03626391097b59ea9568cd8a869ed89569 Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Thu, 1 Jun 2023 13:52:51 +0100 Subject: [PATCH 031/153] Implement stable support for MSC3882 to allow an existing device/session to generate a login token for use on a new device/session (#15388) Implements stable support for MSC3882; this involves updating Synapse's support to match the MSC / the spec says. Continue to support the unstable version to allow clients to transition. --- changelog.d/15388.feature | 1 + .../configuration/config_documentation.md | 65 +++++++++++------ synapse/config/auth.py | 10 +++ synapse/config/experimental.py | 13 +--- synapse/rest/client/capabilities.py | 3 + synapse/rest/client/login.py | 31 +++++--- synapse/rest/client/login_token_request.py | 47 ++++++++---- synapse/rest/client/versions.py | 4 +- tests/config/test_oauth_delegation.py | 4 +- tests/rest/client/test_capabilities.py | 28 ++++++++ tests/rest/client/test_login.py | 23 ++++++ tests/rest/client/test_login_token_request.py | 71 ++++++++++++++----- 12 files changed, 225 insertions(+), 75 deletions(-) create mode 100644 changelog.d/15388.feature diff --git a/changelog.d/15388.feature b/changelog.d/15388.feature new file mode 100644 index 000000000000..6cc55cafa2e1 --- /dev/null +++ b/changelog.d/15388.feature @@ -0,0 +1 @@ +Stable support for [MSC3882](https://github.com/matrix-org/matrix-spec-proposals/pull/3882) to allow an existing device/session to generate a login token for use on a new device/session. \ No newline at end of file diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 5ede6d0a827a..0cf6e075ff11 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -2570,7 +2570,50 @@ Example configuration: ```yaml nonrefreshable_access_token_lifetime: 24h ``` +--- +### `ui_auth` + +The amount of time to allow a user-interactive authentication session to be active. +This defaults to 0, meaning the user is queried for their credentials +before every action, but this can be overridden to allow a single +validation to be re-used. This weakens the protections afforded by +the user-interactive authentication process, by allowing for multiple +(and potentially different) operations to use the same validation session. + +This is ignored for potentially "dangerous" operations (including +deactivating an account, modifying an account password, adding a 3PID, +and minting additional login tokens). + +Use the `session_timeout` sub-option here to change the time allowed for credential validation. + +Example configuration: +```yaml +ui_auth: + session_timeout: "15s" +``` +--- +### `login_via_existing_session` + +Matrix supports the ability of an existing session to mint a login token for +another client. + +Synapse disables this by default as it has security ramifications -- a malicious +client could use the mechanism to spawn more than one session. + +The duration of time the generated token is valid for can be configured with the +`token_timeout` sub-option. + +User-interactive authentication is required when this is enabled unless the +`require_ui_auth` sub-option is set to `False`. + +Example configuration: +```yaml +login_via_existing_session: + enabled: true + require_ui_auth: false + token_timeout: "5m" +``` --- ## Metrics Config options related to metrics. @@ -3415,28 +3458,6 @@ password_config: require_uppercase: true ``` --- -### `ui_auth` - -The amount of time to allow a user-interactive authentication session to be active. - -This defaults to 0, meaning the user is queried for their credentials -before every action, but this can be overridden to allow a single -validation to be re-used. This weakens the protections afforded by -the user-interactive authentication process, by allowing for multiple -(and potentially different) operations to use the same validation session. - -This is ignored for potentially "dangerous" operations (including -deactivating an account, modifying an account password, and -adding a 3PID). - -Use the `session_timeout` sub-option here to change the time allowed for credential validation. - -Example configuration: -```yaml -ui_auth: - session_timeout: "15s" -``` ---- ## Push Configuration settings related to push notifications diff --git a/synapse/config/auth.py b/synapse/config/auth.py index 12e853980e3f..c7ab428f28af 100644 --- a/synapse/config/auth.py +++ b/synapse/config/auth.py @@ -60,3 +60,13 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.ui_auth_session_timeout = self.parse_duration( ui_auth.get("session_timeout", 0) ) + + # Logging in with an existing session. + login_via_existing = config.get("login_via_existing_session", {}) + self.login_via_existing_enabled = login_via_existing.get("enabled", False) + self.login_via_existing_require_ui_auth = login_via_existing.get( + "require_ui_auth", True + ) + self.login_via_existing_token_timeout = self.parse_duration( + login_via_existing.get("token_timeout", "5m") + ) diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 1d189b2e26cf..a9e002cf08f8 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -192,10 +192,10 @@ def check_config_conflicts(self, root: RootConfig) -> None: ("captcha", "enable_registration_captcha"), ) - if root.experimental.msc3882_enabled: + if root.auth.login_via_existing_enabled: raise ConfigError( - "MSC3882 cannot be enabled when OAuth delegation is enabled", - ("experimental_features", "msc3882_enabled"), + "Login via existing session cannot be enabled when OAuth delegation is enabled", + ("login_via_existing_session", "enabled"), ) if root.registration.refresh_token_lifetime: @@ -319,13 +319,6 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # MSC3881: Remotely toggle push notifications for another client self.msc3881_enabled: bool = experimental.get("msc3881_enabled", False) - # MSC3882: Allow an existing session to sign in a new session - self.msc3882_enabled: bool = experimental.get("msc3882_enabled", False) - self.msc3882_ui_auth: bool = experimental.get("msc3882_ui_auth", True) - self.msc3882_token_timeout = self.parse_duration( - experimental.get("msc3882_token_timeout", "5m") - ) - # MSC3874: Filtering /messages with rel_types / not_rel_types. self.msc3874_enabled: bool = experimental.get("msc3874_enabled", False) diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py index 0dbf8f6818ab..3154b9f77e95 100644 --- a/synapse/rest/client/capabilities.py +++ b/synapse/rest/client/capabilities.py @@ -65,6 +65,9 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: "m.3pid_changes": { "enabled": self.config.registration.enable_3pid_changes }, + "m.get_login_token": { + "enabled": self.config.auth.login_via_existing_enabled, + }, } } diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index d4dc2462b9c0..6493b00bb803 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -104,6 +104,9 @@ def __init__(self, hs: "HomeServer"): and hs.config.experimental.msc3866.require_approval_for_new_accounts ) + # Whether get login token is enabled. + self._get_login_token_enabled = hs.config.auth.login_via_existing_enabled + self.auth = hs.get_auth() self.clock = hs.get_clock() @@ -142,6 +145,9 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # to SSO. flows.append({"type": LoginRestServlet.CAS_TYPE}) + # The login token flow requires m.login.token to be advertised. + support_login_token_flow = self._get_login_token_enabled + if self.cas_enabled or self.saml2_enabled or self.oidc_enabled: flows.append( { @@ -153,14 +159,23 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: } ) - # While it's valid for us to advertise this login type generally, - # synapse currently only gives out these tokens as part of the - # SSO login flow. - # Generally we don't want to advertise login flows that clients - # don't know how to implement, since they (currently) will always - # fall back to the fallback API if they don't understand one of the - # login flow types returned. - flows.append({"type": LoginRestServlet.TOKEN_TYPE}) + # SSO requires a login token to be generated, so we need to advertise that flow + support_login_token_flow = True + + # While it's valid for us to advertise this login type generally, + # synapse currently only gives out these tokens as part of the + # SSO login flow or as part of login via an existing session. + # + # Generally we don't want to advertise login flows that clients + # don't know how to implement, since they (currently) will always + # fall back to the fallback API if they don't understand one of the + # login flow types returned. + if support_login_token_flow: + tokenTypeFlow: Dict[str, Any] = {"type": LoginRestServlet.TOKEN_TYPE} + # If the login token flow is enabled advertise the get_login_token flag. + if self._get_login_token_enabled: + tokenTypeFlow["get_login_token"] = True + flows.append(tokenTypeFlow) flows.extend({"type": t} for t in self.auth_handler.get_supported_login_types()) diff --git a/synapse/rest/client/login_token_request.py b/synapse/rest/client/login_token_request.py index 43ea21d5e6ac..b1629f94a5f8 100644 --- a/synapse/rest/client/login_token_request.py +++ b/synapse/rest/client/login_token_request.py @@ -15,6 +15,7 @@ import logging from typing import TYPE_CHECKING, Tuple +from synapse.api.ratelimiting import Ratelimiter from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.site import SynapseRequest @@ -33,7 +34,7 @@ class LoginTokenRequestServlet(RestServlet): Request: - POST /login/token HTTP/1.1 + POST /login/get_token HTTP/1.1 Content-Type: application/json {} @@ -43,30 +44,45 @@ class LoginTokenRequestServlet(RestServlet): HTTP/1.1 200 OK { "login_token": "ABDEFGH", - "expires_in": 3600, + "expires_in_ms": 3600000, } """ - PATTERNS = client_patterns( - "/org.matrix.msc3882/login/token$", releases=[], v1=False, unstable=True - ) + PATTERNS = [ + *client_patterns( + "/login/get_token$", releases=["v1"], v1=False, unstable=False + ), + # TODO: this is no longer needed once unstable MSC3882 does not need to be supported: + *client_patterns( + "/org.matrix.msc3882/login/token$", releases=[], v1=False, unstable=True + ), + ] def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() - self.store = hs.get_datastores().main - self.clock = hs.get_clock() - self.server_name = hs.config.server.server_name + self._main_store = hs.get_datastores().main self.auth_handler = hs.get_auth_handler() - self.token_timeout = hs.config.experimental.msc3882_token_timeout - self.ui_auth = hs.config.experimental.msc3882_ui_auth + self.token_timeout = hs.config.auth.login_via_existing_token_timeout + self._require_ui_auth = hs.config.auth.login_via_existing_require_ui_auth + + # Ratelimit aggressively to a maxmimum of 1 request per minute. + # + # This endpoint can be used to spawn additional sessions and could be + # abused by a malicious client to create many sessions. + self._ratelimiter = Ratelimiter( + store=self._main_store, + clock=hs.get_clock(), + rate_hz=1 / 60, + burst_count=1, + ) @interactive_auth_handler async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) body = parse_json_object_from_request(request) - if self.ui_auth: + if self._require_ui_auth: await self.auth_handler.validate_user_via_ui_auth( requester, request, @@ -75,9 +91,12 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: can_skip_ui_auth=False, # Don't allow skipping of UI auth ) + # Ensure that this endpoint isn't being used too often. (Ensure this is + # done *after* UI auth.) + await self._ratelimiter.ratelimit(None, requester.user.to_string().lower()) + login_token = await self.auth_handler.create_login_token_for_user_id( user_id=requester.user.to_string(), - auth_provider_id="org.matrix.msc3882.login_token_request", duration_ms=self.token_timeout, ) @@ -85,11 +104,13 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: 200, { "login_token": login_token, + # TODO: this is no longer needed once unstable MSC3882 does not need to be supported: "expires_in": self.token_timeout // 1000, + "expires_in_ms": self.token_timeout, }, ) def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.experimental.msc3882_enabled: + if hs.config.auth.login_via_existing_enabled: LoginTokenRequestServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 32df054f56c3..547bf34df15e 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -113,8 +113,8 @@ def on_GET(self, request: Request) -> Tuple[int, JsonDict]: "fi.mau.msc2815": self.config.experimental.msc2815_enabled, # Adds a ping endpoint for appservices to check HS->AS connection "fi.mau.msc2659.stable": True, # TODO: remove when "v1.7" is added above - # Adds support for login token requests as per MSC3882 - "org.matrix.msc3882": self.config.experimental.msc3882_enabled, + # TODO: this is no longer needed once unstable MSC3882 does not need to be supported: + "org.matrix.msc3882": self.config.auth.login_via_existing_enabled, # Adds support for remotely enabling/disabling pushers, as per MSC3881 "org.matrix.msc3881": self.config.experimental.msc3881_enabled, # Adds support for filtering /messages by event relation. diff --git a/tests/config/test_oauth_delegation.py b/tests/config/test_oauth_delegation.py index 2ead721b00fc..f57c813a581e 100644 --- a/tests/config/test_oauth_delegation.py +++ b/tests/config/test_oauth_delegation.py @@ -228,8 +228,8 @@ def test_jwt_auth_cannot_be_enabled(self) -> None: with self.assertRaises(ConfigError): self.parse_config() - def test_msc3882_auth_cannot_be_enabled(self) -> None: - self.config_dict["experimental_features"]["msc3882_enabled"] = True + def test_login_via_existing_session_cannot_be_enabled(self) -> None: + self.config_dict["login_via_existing_session"] = {"enabled": True} with self.assertRaises(ConfigError): self.parse_config() diff --git a/tests/rest/client/test_capabilities.py b/tests/rest/client/test_capabilities.py index c16e8d43f419..cf23430f6ad4 100644 --- a/tests/rest/client/test_capabilities.py +++ b/tests/rest/client/test_capabilities.py @@ -186,3 +186,31 @@ def test_get_does_include_msc3244_fields_when_enabled(self) -> None: self.assertGreater(len(details["support"]), 0) for room_version in details["support"]: self.assertTrue(room_version in KNOWN_ROOM_VERSIONS, str(room_version)) + + def test_get_get_token_login_fields_when_disabled(self) -> None: + """By default login via an existing session is disabled.""" + access_token = self.get_success( + self.auth_handler.create_access_token_for_user_id( + self.user, device_id=None, valid_until_ms=None + ) + ) + + channel = self.make_request("GET", self.url, access_token=access_token) + capabilities = channel.json_body["capabilities"] + + self.assertEqual(channel.code, HTTPStatus.OK) + self.assertFalse(capabilities["m.get_login_token"]["enabled"]) + + @override_config({"login_via_existing_session": {"enabled": True}}) + def test_get_get_token_login_fields_when_enabled(self) -> None: + access_token = self.get_success( + self.auth_handler.create_access_token_for_user_id( + self.user, device_id=None, valid_until_ms=None + ) + ) + + channel = self.make_request("GET", self.url, access_token=access_token) + capabilities = channel.json_body["capabilities"] + + self.assertEqual(channel.code, HTTPStatus.OK) + self.assertTrue(capabilities["m.get_login_token"]["enabled"]) diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index dc32982e22f8..f3c3bc69a97a 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -446,6 +446,29 @@ def test_require_approval(self) -> None: ApprovalNoticeMedium.NONE, channel.json_body["approval_notice_medium"] ) + def test_get_login_flows_with_login_via_existing_disabled(self) -> None: + """GET /login should return m.login.token without get_login_token""" + channel = self.make_request("GET", "/_matrix/client/r0/login") + self.assertEqual(channel.code, 200, channel.result) + + flows = {flow["type"]: flow for flow in channel.json_body["flows"]} + self.assertNotIn("m.login.token", flows) + + @override_config({"login_via_existing_session": {"enabled": True}}) + def test_get_login_flows_with_login_via_existing_enabled(self) -> None: + """GET /login should return m.login.token with get_login_token true""" + channel = self.make_request("GET", "/_matrix/client/r0/login") + self.assertEqual(channel.code, 200, channel.result) + + self.assertCountEqual( + channel.json_body["flows"], + [ + {"type": "m.login.token", "get_login_token": True}, + {"type": "m.login.password"}, + {"type": "m.login.application_service"}, + ], + ) + @skip_unless(has_saml2 and HAS_OIDC, "Requires SAML2 and OIDC") class MultiSSOTestCase(unittest.HomeserverTestCase): diff --git a/tests/rest/client/test_login_token_request.py b/tests/rest/client/test_login_token_request.py index b8187db982bf..f05e619aa86d 100644 --- a/tests/rest/client/test_login_token_request.py +++ b/tests/rest/client/test_login_token_request.py @@ -15,14 +15,14 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.rest import admin -from synapse.rest.client import login, login_token_request +from synapse.rest.client import login, login_token_request, versions from synapse.server import HomeServer from synapse.util import Clock from tests import unittest from tests.unittest import override_config -endpoint = "/_matrix/client/unstable/org.matrix.msc3882/login/token" +GET_TOKEN_ENDPOINT = "/_matrix/client/v1/login/get_token" class LoginTokenRequestServletTestCase(unittest.HomeserverTestCase): @@ -30,6 +30,7 @@ class LoginTokenRequestServletTestCase(unittest.HomeserverTestCase): login.register_servlets, admin.register_servlets, login_token_request.register_servlets, + versions.register_servlets, # TODO: remove once unstable revision 0 support is removed ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: @@ -46,26 +47,26 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.password = "password" def test_disabled(self) -> None: - channel = self.make_request("POST", endpoint, {}, access_token=None) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=None) self.assertEqual(channel.code, 404) self.register_user(self.user, self.password) token = self.login(self.user, self.password) - channel = self.make_request("POST", endpoint, {}, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=token) self.assertEqual(channel.code, 404) - @override_config({"experimental_features": {"msc3882_enabled": True}}) + @override_config({"login_via_existing_session": {"enabled": True}}) def test_require_auth(self) -> None: - channel = self.make_request("POST", endpoint, {}, access_token=None) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=None) self.assertEqual(channel.code, 401) - @override_config({"experimental_features": {"msc3882_enabled": True}}) + @override_config({"login_via_existing_session": {"enabled": True}}) def test_uia_on(self) -> None: user_id = self.register_user(self.user, self.password) token = self.login(self.user, self.password) - channel = self.make_request("POST", endpoint, {}, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=token) self.assertEqual(channel.code, 401) self.assertIn({"stages": ["m.login.password"]}, channel.json_body["flows"]) @@ -80,9 +81,9 @@ def test_uia_on(self) -> None: }, } - channel = self.make_request("POST", endpoint, uia, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, uia, access_token=token) self.assertEqual(channel.code, 200) - self.assertEqual(channel.json_body["expires_in"], 300) + self.assertEqual(channel.json_body["expires_in_ms"], 300000) login_token = channel.json_body["login_token"] @@ -95,15 +96,15 @@ def test_uia_on(self) -> None: self.assertEqual(channel.json_body["user_id"], user_id) @override_config( - {"experimental_features": {"msc3882_enabled": True, "msc3882_ui_auth": False}} + {"login_via_existing_session": {"enabled": True, "require_ui_auth": False}} ) def test_uia_off(self) -> None: user_id = self.register_user(self.user, self.password) token = self.login(self.user, self.password) - channel = self.make_request("POST", endpoint, {}, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=token) self.assertEqual(channel.code, 200) - self.assertEqual(channel.json_body["expires_in"], 300) + self.assertEqual(channel.json_body["expires_in_ms"], 300000) login_token = channel.json_body["login_token"] @@ -117,10 +118,10 @@ def test_uia_off(self) -> None: @override_config( { - "experimental_features": { - "msc3882_enabled": True, - "msc3882_ui_auth": False, - "msc3882_token_timeout": "15s", + "login_via_existing_session": { + "enabled": True, + "require_ui_auth": False, + "token_timeout": "15s", } } ) @@ -128,6 +129,40 @@ def test_expires_in(self) -> None: self.register_user(self.user, self.password) token = self.login(self.user, self.password) - channel = self.make_request("POST", endpoint, {}, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=token) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.json_body["expires_in_ms"], 15000) + + @override_config( + { + "login_via_existing_session": { + "enabled": True, + "require_ui_auth": False, + "token_timeout": "15s", + } + } + ) + def test_unstable_support(self) -> None: + # TODO: remove support for unstable MSC3882 is no longer needed + + # check feature is advertised in versions response: + channel = self.make_request( + "GET", "/_matrix/client/versions", {}, access_token=None + ) + self.assertEqual(channel.code, 200) + self.assertEqual( + channel.json_body["unstable_features"]["org.matrix.msc3882"], True + ) + + self.register_user(self.user, self.password) + token = self.login(self.user, self.password) + + # check feature is available via the unstable endpoint and returns an expires_in value in seconds + channel = self.make_request( + "POST", + "/_matrix/client/unstable/org.matrix.msc3882/login/token", + {}, + access_token=token, + ) self.assertEqual(channel.code, 200) self.assertEqual(channel.json_body["expires_in"], 15) From 5ed0e8c61f6b46289fdc5609e8e573b67c2c1982 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 1 Jun 2023 14:25:20 +0100 Subject: [PATCH 032/153] Cache requests for user's devices from federation (#15675) This should mitigate the issue where lots of different servers requests the same user's devices all at once. --- changelog.d/15675.misc | 1 + synapse/storage/databases/main/devices.py | 4 ++ .../storage/databases/main/end_to_end_keys.py | 67 ++++++++++++++++++- 3 files changed, 70 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15675.misc diff --git a/changelog.d/15675.misc b/changelog.d/15675.misc new file mode 100644 index 000000000000..05538fdbeff9 --- /dev/null +++ b/changelog.d/15675.misc @@ -0,0 +1 @@ +Cache requests for user's devices over federation. diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index a67fdb3c22ce..f677d048aafb 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1941,6 +1941,10 @@ def _add_device_change_to_stream_txn( user_id, stream_ids[-1], ) + txn.call_after( + self._get_e2e_device_keys_for_federation_query_inner.invalidate, + (user_id,), + ) min_stream_id = stream_ids[0] diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index 4bc391f21316..91ae9c457d78 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -16,6 +16,7 @@ import abc from typing import ( TYPE_CHECKING, + Any, Collection, Dict, Iterable, @@ -39,6 +40,7 @@ TransactionUnusedFallbackKeys, ) from synapse.logging.opentracing import log_kv, set_tag, trace +from synapse.replication.tcp.streams._base import DeviceListsStream from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage.database import ( DatabasePool, @@ -104,6 +106,23 @@ def __init__( self.hs.config.federation.allow_device_name_lookup_over_federation ) + def process_replication_rows( + self, + stream_name: str, + instance_name: str, + token: int, + rows: Iterable[Any], + ) -> None: + if stream_name == DeviceListsStream.NAME: + for row in rows: + assert isinstance(row, DeviceListsStream.DeviceListsStreamRow) + if row.entity.startswith("@"): + self._get_e2e_device_keys_for_federation_query_inner.invalidate( + (row.entity,) + ) + + super().process_replication_rows(stream_name, instance_name, token, rows) + async def get_e2e_device_keys_for_federation_query( self, user_id: str ) -> Tuple[int, List[JsonDict]]: @@ -114,6 +133,50 @@ async def get_e2e_device_keys_for_federation_query( """ now_stream_id = self.get_device_stream_token() + # We need to be careful with the caching here, as we need to always + # return *all* persisted devices, however there may be a lag between a + # new device being persisted and the cache being invalidated. + cached_results = ( + self._get_e2e_device_keys_for_federation_query_inner.cache.get_immediate( + user_id, None + ) + ) + if cached_results is not None: + # Check that there have been no new devices added by another worker + # after the cache. This should be quick as there should be few rows + # with a higher stream ordering. + # + # Note that we invalidate based on the device stream, so we only + # have to check for potential invalidations after the + # `now_stream_id`. + sql = """ + SELECT user_id FROM device_lists_stream + WHERE stream_id >= ? AND user_id = ? + """ + rows = await self.db_pool.execute( + "get_e2e_device_keys_for_federation_query_check", + None, + sql, + now_stream_id, + user_id, + ) + if not rows: + # No new rows, so cache is still valid. + return now_stream_id, cached_results + + # There has, so let's invalidate the cache and run the query. + self._get_e2e_device_keys_for_federation_query_inner.invalidate((user_id,)) + + results = await self._get_e2e_device_keys_for_federation_query_inner(user_id) + + return now_stream_id, results + + @cached(iterable=True) + async def _get_e2e_device_keys_for_federation_query_inner( + self, user_id: str + ) -> List[JsonDict]: + """Get all devices (with any device keys) for a user""" + devices = await self.get_e2e_device_keys_and_signatures([(user_id, None)]) if devices: @@ -134,9 +197,9 @@ async def get_e2e_device_keys_for_federation_query( results.append(result) - return now_stream_id, results + return results - return now_stream_id, [] + return [] @trace @cancellable From 30a5076da8ad776c150ad2745b5f34b4446012e0 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 1 Jun 2023 21:27:18 -0500 Subject: [PATCH 033/153] Log when events are (unexpectedly) filtered out of responses in tests (#14213) See https://github.com/matrix-org/synapse/pull/14095#discussion_r990335492 This is useful because when see that a relevant event is an `outlier` or `soft-failed`, then that's a good unexpected indicator explaining why it's not showing up. `filter_events_for_client` is used in `/sync`, `/messages`, `/context` which are all common end-to-end assertion touch points (also notifications, relations). --- changelog.d/14213.misc | 1 + docker/README.md | 3 ++- docker/conf/log.config | 30 +++++++++++++++++++++------ docker/configure_workers_and_start.py | 3 +++ scripts-dev/complement.sh | 4 ++++ synapse/visibility.py | 14 ++++++------- tests/test_utils/logging_setup.py | 12 +++++++++++ 7 files changed, 53 insertions(+), 14 deletions(-) create mode 100644 changelog.d/14213.misc diff --git a/changelog.d/14213.misc b/changelog.d/14213.misc new file mode 100644 index 000000000000..b0689f3d1574 --- /dev/null +++ b/changelog.d/14213.misc @@ -0,0 +1 @@ +Log when events are (maybe unexpectedly) filtered out of responses in tests. diff --git a/docker/README.md b/docker/README.md index eda3221c2339..08372e95c647 100644 --- a/docker/README.md +++ b/docker/README.md @@ -73,7 +73,8 @@ The following environment variables are supported in `generate` mode: will log sensitive information such as access tokens. This should not be needed unless you are a developer attempting to debug something particularly tricky. - +* `SYNAPSE_LOG_TESTING`: if set, Synapse will log additional information useful + for testing. ## Postgres diff --git a/docker/conf/log.config b/docker/conf/log.config index 90b5179838ca..577232120204 100644 --- a/docker/conf/log.config +++ b/docker/conf/log.config @@ -49,17 +49,35 @@ handlers: class: logging.StreamHandler formatter: precise -{% if not SYNAPSE_LOG_SENSITIVE %} -{# - If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO - so that DEBUG entries (containing sensitive information) are not emitted. -#} loggers: + # This is just here so we can leave `loggers` in the config regardless of whether + # we configure other loggers below (avoid empty yaml dict error). + _placeholder: + level: "INFO" + + {% if not SYNAPSE_LOG_SENSITIVE %} + {# + If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO + so that DEBUG entries (containing sensitive information) are not emitted. + #} synapse.storage.SQL: # beware: increasing this to DEBUG will make synapse log sensitive # information such as access tokens. level: INFO -{% endif %} + {% endif %} + + {% if SYNAPSE_LOG_TESTING %} + {# + If Synapse is under test, log a few more useful things for a developer + attempting to debug something particularly tricky. + + With `synapse.visibility.filtered_event_debug`, it logs when events are (maybe + unexpectedly) filtered out of responses in tests. It's just nice to be able to + look at the CI log and figure out why an event isn't being returned. + #} + synapse.visibility.filtered_event_debug: + level: DEBUG + {% endif %} root: level: {{ SYNAPSE_LOG_LEVEL or "INFO" }} diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 79b5b8739764..87a740e3d433 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -40,6 +40,8 @@ # log level. INFO is the default. # * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged, # regardless of the SYNAPSE_LOG_LEVEL setting. +# * SYNAPSE_LOG_TESTING: if set, Synapse will log additional information useful +# for testing. # # NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined # in the project's README), this script may be run multiple times, and functionality should @@ -947,6 +949,7 @@ def generate_worker_log_config( extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get( "SYNAPSE_LOG_SENSITIVE" ) + extra_log_template_args["SYNAPSE_LOG_TESTING"] = environ.get("SYNAPSE_LOG_TESTING") # Render and write the file log_config_filepath = f"/conf/workers/{worker_name}.log.config" diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index cba2799f1589..131f26234ece 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -269,6 +269,10 @@ if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then export PASS_SYNAPSE_LOG_SENSITIVE=1 fi +# Log a few more useful things for a developer attempting to debug something +# particularly tricky. +export PASS_SYNAPSE_LOG_TESTING=1 + # Run the tests! echo "Images built; running complement" cd "$COMPLEMENT_DIR" diff --git a/synapse/visibility.py b/synapse/visibility.py index 468e22f8f64e..fc71dc92a4e7 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -41,7 +41,7 @@ from synapse.util import Clock logger = logging.getLogger(__name__) - +filtered_event_logger = logging.getLogger("synapse.visibility.filtered_event_debug") VISIBILITY_PRIORITY = ( HistoryVisibility.WORLD_READABLE, @@ -97,8 +97,8 @@ async def filter_events_for_client( events_before_filtering = events events = [e for e in events if not e.internal_metadata.is_soft_failed()] if len(events_before_filtering) != len(events): - if logger.isEnabledFor(logging.DEBUG): - logger.debug( + if filtered_event_logger.isEnabledFor(logging.DEBUG): + filtered_event_logger.debug( "filter_events_for_client: Filtered out soft-failed events: Before=%s, After=%s", [event.event_id for event in events_before_filtering], [event.event_id for event in events], @@ -319,7 +319,7 @@ def _check_client_allowed_to_see_event( _check_filter_send_to_client(event, clock, retention_policy, sender_ignored) == _CheckFilter.DENIED ): - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Filtered out event because `_check_filter_send_to_client` returned `_CheckFilter.DENIED`", event.event_id, ) @@ -341,7 +341,7 @@ def _check_client_allowed_to_see_event( ) return event - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Filtered out event because it's an outlier", event.event_id, ) @@ -367,7 +367,7 @@ def _check_client_allowed_to_see_event( membership_result = _check_membership(user_id, event, visibility, state, is_peeking) if not membership_result.allowed: - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Filtered out event because the user can't see the event because of their membership, membership_result.allowed=%s membership_result.joined=%s", event.event_id, membership_result.allowed, @@ -378,7 +378,7 @@ def _check_client_allowed_to_see_event( # If the sender has been erased and the user was not joined at the time, we # must only return the redacted form. if sender_erased and not membership_result.joined: - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Returning pruned event because `sender_erased` and the user was not joined at the time", event.event_id, ) diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py index c37f205ed064..199bb06a81a8 100644 --- a/tests/test_utils/logging_setup.py +++ b/tests/test_utils/logging_setup.py @@ -53,4 +53,16 @@ def setup_logging() -> None: log_level = os.environ.get("SYNAPSE_TEST_LOG_LEVEL", "ERROR") root_logger.setLevel(log_level) + # In order to not add noise by default (since we only log ERROR messages for trial + # tests as configured above), we only enable this for developers for looking for + # more INFO or DEBUG. + if root_logger.isEnabledFor(logging.INFO): + # Log when events are (maybe unexpectedly) filtered out of responses in tests. It's + # just nice to be able to look at the CI log and figure out why an event isn't being + # returned. + logging.getLogger("synapse.visibility.filtered_event_debug").setLevel( + logging.DEBUG + ) + + # Blow away the pyo3-log cache so that it reloads the configuration. reset_logging_config() From e0f2429d137c74059f5b7f151297e28dbfd82d48 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Fri, 2 Jun 2023 15:13:50 +0200 Subject: [PATCH 034/153] Add a catch-all * to the supported relation types when redacting (#15705) This is an update to MSC3912 implementation --- changelog.d/15705.feature | 1 + synapse/handlers/relations.py | 16 ++- synapse/storage/databases/main/relations.py | 30 ++++++ tests/rest/client/test_redactions.py | 104 +++++++++++++++++++- 4 files changed, 143 insertions(+), 8 deletions(-) create mode 100644 changelog.d/15705.feature diff --git a/changelog.d/15705.feature b/changelog.d/15705.feature new file mode 100644 index 000000000000..e3cbb5a12e28 --- /dev/null +++ b/changelog.d/15705.feature @@ -0,0 +1 @@ +Add a catch-all * to the supported relation types when redacting an event and its related events. This is an update to [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) implementation. diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py index 48246351625a..db97f7aedee6 100644 --- a/synapse/handlers/relations.py +++ b/synapse/handlers/relations.py @@ -205,16 +205,22 @@ async def redact_events_related_to( event_id: The event IDs to look and redact relations of. initial_redaction_event: The redaction for the event referred to by event_id. - relation_types: The types of relations to look for. + relation_types: The types of relations to look for. If "*" is in the list, + all related events will be redacted regardless of the type. Raises: ShadowBanError if the requester is shadow-banned """ - related_event_ids = ( - await self._main_store.get_all_relations_for_event_with_types( - event_id, relation_types + if "*" in relation_types: + related_event_ids = await self._main_store.get_all_relations_for_event( + event_id + ) + else: + related_event_ids = ( + await self._main_store.get_all_relations_for_event_with_types( + event_id, relation_types + ) ) - ) for related_event_id in related_event_ids: try: diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index 4a6c6c724d33..96908f14ba35 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -365,6 +365,36 @@ def get_all_relation_ids_for_event_with_types_txn( func=get_all_relation_ids_for_event_with_types_txn, ) + async def get_all_relations_for_event( + self, + event_id: str, + ) -> List[str]: + """Get the event IDs of all events that have a relation to the given event. + + Args: + event_id: The event for which to look for related events. + + Returns: + A list of the IDs of the events that relate to the given event. + """ + + def get_all_relation_ids_for_event_txn( + txn: LoggingTransaction, + ) -> List[str]: + rows = self.db_pool.simple_select_list_txn( + txn=txn, + table="event_relations", + keyvalues={"relates_to_id": event_id}, + retcols=["event_id"], + ) + + return [row["event_id"] for row in rows] + + return await self.db_pool.runInteraction( + desc="get_all_relation_ids_for_event", + func=get_all_relation_ids_for_event_txn, + ) + async def event_includes_relation(self, event_id: str) -> bool: """Check if the given event relates to another event. diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py index 84a60c0b0721..b43e95292c13 100644 --- a/tests/rest/client/test_redactions.py +++ b/tests/rest/client/test_redactions.py @@ -217,9 +217,9 @@ def test_redact_event_as_moderator_ratelimit(self) -> None: self._redact_event(self.mod_access_token, self.room_id, msg_id) @override_config({"experimental_features": {"msc3912_enabled": True}}) - def test_redact_relations(self) -> None: - """Tests that we can redact the relations of an event at the same time as the - event itself. + def test_redact_relations_with_types(self) -> None: + """Tests that we can redact the relations of an event of specific types + at the same time as the event itself. """ # Send a root event. res = self.helper.send_event( @@ -317,6 +317,104 @@ def test_redact_relations(self) -> None: ) self.assertNotIn("redacted_because", event_dict, event_dict) + @override_config({"experimental_features": {"msc3912_enabled": True}}) + def test_redact_all_relations(self) -> None: + """Tests that we can redact all the relations of an event at the same time as the + event itself. + """ + # Send a root event. + res = self.helper.send_event( + room_id=self.room_id, + type=EventTypes.Message, + content={"msgtype": "m.text", "body": "hello"}, + tok=self.mod_access_token, + ) + root_event_id = res["event_id"] + + # Send an edit to this root event. + res = self.helper.send_event( + room_id=self.room_id, + type=EventTypes.Message, + content={ + "body": " * hello world", + "m.new_content": { + "body": "hello world", + "msgtype": "m.text", + }, + "m.relates_to": { + "event_id": root_event_id, + "rel_type": RelationTypes.REPLACE, + }, + "msgtype": "m.text", + }, + tok=self.mod_access_token, + ) + edit_event_id = res["event_id"] + + # Also send a threaded message whose root is the same as the edit's. + res = self.helper.send_event( + room_id=self.room_id, + type=EventTypes.Message, + content={ + "msgtype": "m.text", + "body": "message 1", + "m.relates_to": { + "event_id": root_event_id, + "rel_type": RelationTypes.THREAD, + }, + }, + tok=self.mod_access_token, + ) + threaded_event_id = res["event_id"] + + # Also send a reaction, again with the same root. + res = self.helper.send_event( + room_id=self.room_id, + type=EventTypes.Reaction, + content={ + "m.relates_to": { + "rel_type": RelationTypes.ANNOTATION, + "event_id": root_event_id, + "key": "👍", + } + }, + tok=self.mod_access_token, + ) + reaction_event_id = res["event_id"] + + # Redact the root event, specifying that we also want to delete all events that + # relate to it. + self._redact_event( + self.mod_access_token, + self.room_id, + root_event_id, + with_relations=["*"], + ) + + # Check that the root event got redacted. + event_dict = self.helper.get_event( + self.room_id, root_event_id, self.mod_access_token + ) + self.assertIn("redacted_because", event_dict, event_dict) + + # Check that the edit got redacted. + event_dict = self.helper.get_event( + self.room_id, edit_event_id, self.mod_access_token + ) + self.assertIn("redacted_because", event_dict, event_dict) + + # Check that the threaded message got redacted. + event_dict = self.helper.get_event( + self.room_id, threaded_event_id, self.mod_access_token + ) + self.assertIn("redacted_because", event_dict, event_dict) + + # Check that the reaction got redacted. + event_dict = self.helper.get_event( + self.room_id, reaction_event_id, self.mod_access_token + ) + self.assertIn("redacted_because", event_dict, event_dict) + @override_config({"experimental_features": {"msc3912_enabled": True}}) def test_redact_relations_no_perms(self) -> None: """Tests that, when redacting a message along with its relations, if not all From d0c4257f14addbf0c9072c2e34ae1c8294716ed5 Mon Sep 17 00:00:00 2001 From: Shay Date: Fri, 2 Jun 2023 17:24:13 -0700 Subject: [PATCH 035/153] `N + 3`: Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters` (#15649) --- changelog.d/15649.misc | 1 + synapse/api/filtering.py | 4 +- synapse/handlers/account_validity.py | 2 +- synapse/handlers/admin.py | 2 +- synapse/handlers/auth.py | 2 +- synapse/handlers/deactivate_account.py | 2 +- synapse/handlers/profile.py | 26 ++--- synapse/handlers/register.py | 2 +- synapse/module_api/__init__.py | 4 +- synapse/push/mailer.py | 2 +- synapse/rest/client/filter.py | 2 +- synapse/rest/client/sync.py | 2 +- synapse/storage/databases/main/filtering.py | 12 +-- synapse/storage/databases/main/profile.py | 12 +-- synapse/storage/schema/__init__.py | 5 +- .../78/01_validate_and_update_profiles.py | 92 ++++++++++++++++++ .../78/02_validate_and_update_user_filters.py | 95 +++++++++++++++++++ tests/api/test_filtering.py | 25 ++--- tests/handlers/test_profile.py | 28 ++---- tests/module_api/test_api.py | 6 +- tests/rest/client/test_filter.py | 4 +- tests/storage/test_profile.py | 17 +--- 22 files changed, 252 insertions(+), 95 deletions(-) create mode 100644 changelog.d/15649.misc create mode 100644 synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py create mode 100644 synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py diff --git a/changelog.d/15649.misc b/changelog.d/15649.misc new file mode 100644 index 000000000000..fca38abe0f60 --- /dev/null +++ b/changelog.d/15649.misc @@ -0,0 +1 @@ +Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters`. diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 82aeef8d1913..0995ecbe832a 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -152,9 +152,9 @@ def __init__(self, hs: "HomeServer"): self.DEFAULT_FILTER_COLLECTION = FilterCollection(hs, {}) async def get_user_filter( - self, user_localpart: str, filter_id: Union[int, str] + self, user_id: UserID, filter_id: Union[int, str] ) -> "FilterCollection": - result = await self.store.get_user_filter(user_localpart, filter_id) + result = await self.store.get_user_filter(user_id, filter_id) return FilterCollection(self._hs, result) def add_user_filter(self, user_id: UserID, user_filter: JsonDict) -> Awaitable[int]: diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index 4aa4ebf7e4a8..f1a7a05df6bc 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -164,7 +164,7 @@ async def _send_renewal_email(self, user_id: str, expiration_ts: int) -> None: try: user_display_name = await self.store.get_profile_displayname( - UserID.from_string(user_id).localpart + UserID.from_string(user_id) ) if user_display_name is None: user_display_name = user_id diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py index b06f25b03c21..119c7f838481 100644 --- a/synapse/handlers/admin.py +++ b/synapse/handlers/admin.py @@ -89,7 +89,7 @@ async def get_user(self, user: UserID) -> Optional[JsonDict]: } # Add additional user metadata - profile = await self._store.get_profileinfo(user.localpart) + profile = await self._store.get_profileinfo(user) threepids = await self._store.user_get_threepids(user.to_string()) external_ids = [ ({"auth_provider": auth_provider, "external_id": external_id}) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 4f986d90cbd9..59ecafa6a094 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -1759,7 +1759,7 @@ async def complete_sso_login( return user_profile_data = await self.store.get_profileinfo( - UserID.from_string(registered_user_id).localpart + UserID.from_string(registered_user_id) ) # Store any extra attributes which will be passed in the login response. diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index f299b89a1b2f..67adeae6a7a6 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -297,5 +297,5 @@ async def activate_account(self, user_id: str) -> None: # Add the user to the directory, if necessary. Note that # this must be done after the user is re-activated, because # deactivated users are excluded from the user directory. - profile = await self.store.get_profileinfo(user.localpart) + profile = await self.store.get_profileinfo(user) await self.user_directory_handler.handle_local_profile_change(user_id, profile) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index a9160c87e304..a7f8c5e636f8 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -67,7 +67,7 @@ async def get_profile(self, user_id: str, ignore_backoff: bool = True) -> JsonDi target_user = UserID.from_string(user_id) if self.hs.is_mine(target_user): - profileinfo = await self.store.get_profileinfo(target_user.localpart) + profileinfo = await self.store.get_profileinfo(target_user) if profileinfo.display_name is None: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) @@ -99,9 +99,7 @@ async def get_profile(self, user_id: str, ignore_backoff: bool = True) -> JsonDi async def get_displayname(self, target_user: UserID) -> Optional[str]: if self.hs.is_mine(target_user): try: - displayname = await self.store.get_profile_displayname( - target_user.localpart - ) + displayname = await self.store.get_profile_displayname(target_user) except StoreError as e: if e.code == 404: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) @@ -147,7 +145,7 @@ async def set_displayname( raise AuthError(400, "Cannot set another user's displayname") if not by_admin and not self.hs.config.registration.enable_set_displayname: - profile = await self.store.get_profileinfo(target_user.localpart) + profile = await self.store.get_profileinfo(target_user) if profile.display_name: raise SynapseError( 400, @@ -180,7 +178,7 @@ async def set_displayname( await self.store.set_profile_displayname(target_user, displayname_to_set) - profile = await self.store.get_profileinfo(target_user.localpart) + profile = await self.store.get_profileinfo(target_user) await self.user_directory_handler.handle_local_profile_change( target_user.to_string(), profile ) @@ -194,9 +192,7 @@ async def set_displayname( async def get_avatar_url(self, target_user: UserID) -> Optional[str]: if self.hs.is_mine(target_user): try: - avatar_url = await self.store.get_profile_avatar_url( - target_user.localpart - ) + avatar_url = await self.store.get_profile_avatar_url(target_user) except StoreError as e: if e.code == 404: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) @@ -241,7 +237,7 @@ async def set_avatar_url( raise AuthError(400, "Cannot set another user's avatar_url") if not by_admin and not self.hs.config.registration.enable_set_avatar_url: - profile = await self.store.get_profileinfo(target_user.localpart) + profile = await self.store.get_profileinfo(target_user) if profile.avatar_url: raise SynapseError( 400, "Changing avatar is disabled on this server", Codes.FORBIDDEN @@ -272,7 +268,7 @@ async def set_avatar_url( await self.store.set_profile_avatar_url(target_user, avatar_url_to_set) - profile = await self.store.get_profileinfo(target_user.localpart) + profile = await self.store.get_profileinfo(target_user) await self.user_directory_handler.handle_local_profile_change( target_user.to_string(), profile ) @@ -369,14 +365,10 @@ async def on_profile_query(self, args: JsonDict) -> JsonDict: response = {} try: if just_field is None or just_field == "displayname": - response["displayname"] = await self.store.get_profile_displayname( - user.localpart - ) + response["displayname"] = await self.store.get_profile_displayname(user) if just_field is None or just_field == "avatar_url": - response["avatar_url"] = await self.store.get_profile_avatar_url( - user.localpart - ) + response["avatar_url"] = await self.store.get_profile_avatar_url(user) except StoreError as e: if e.code == 404: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index c80946c2e976..a2d3f03061fc 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -315,7 +315,7 @@ async def register_user( approved=approved, ) - profile = await self.store.get_profileinfo(localpart) + profile = await self.store.get_profileinfo(user) await self.user_directory_handler.handle_local_profile_change( user_id, profile ) diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index a8d6224a4528..84b2aef62086 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -655,7 +655,9 @@ async def get_profile_for_user(self, localpart: str) -> ProfileInfo: Returns: The profile information (i.e. display name and avatar URL). """ - return await self._store.get_profileinfo(localpart) + server_name = self._hs.hostname + user_id = UserID.from_string(f"@{localpart}:{server_name}") + return await self._store.get_profileinfo(user_id) async def get_threepids_for_user(self, user_id: str) -> List[Dict[str, str]]: """Look up the threepids (email addresses and phone numbers) associated with the diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 491a09b71d54..79e0627b6a66 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -247,7 +247,7 @@ async def send_notification_mail( try: user_display_name = await self.store.get_profile_displayname( - UserID.from_string(user_id).localpart + UserID.from_string(user_id) ) if user_display_name is None: user_display_name = user_id diff --git a/synapse/rest/client/filter.py b/synapse/rest/client/filter.py index 04561f36d7a1..5da1e511a281 100644 --- a/synapse/rest/client/filter.py +++ b/synapse/rest/client/filter.py @@ -58,7 +58,7 @@ async def on_GET( try: filter_collection = await self.filtering.get_user_filter( - user_localpart=target_user.localpart, filter_id=filter_id_int + user_id=target_user, filter_id=filter_id_int ) except StoreError as e: if e.code != 404: diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 03b05789456b..d7854ed4fd9d 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -178,7 +178,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: else: try: filter_collection = await self.filtering.get_user_filter( - user.localpart, filter_id + user, filter_id ) except StoreError as err: if err.code != 404: diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py index f777777cbf48..fff417f9e3bb 100644 --- a/synapse/storage/databases/main/filtering.py +++ b/synapse/storage/databases/main/filtering.py @@ -145,7 +145,7 @@ def _final_batch(txn: LoggingTransaction, lower_bound_id: str) -> None: @cached(num_args=2) async def get_user_filter( - self, user_localpart: str, filter_id: Union[int, str] + self, user_id: UserID, filter_id: Union[int, str] ) -> JsonDict: # filter_id is BIGINT UNSIGNED, so if it isn't a number, fail # with a coherent error message rather than 500 M_UNKNOWN. @@ -156,7 +156,7 @@ async def get_user_filter( def_json = await self.db_pool.simple_select_one_onecol( table="user_filters", - keyvalues={"user_id": user_localpart, "filter_id": filter_id}, + keyvalues={"full_user_id": user_id.to_string(), "filter_id": filter_id}, retcol="filter_json", allow_none=False, desc="get_user_filter", @@ -172,15 +172,15 @@ async def add_user_filter(self, user_id: UserID, user_filter: JsonDict) -> int: def _do_txn(txn: LoggingTransaction) -> int: sql = ( "SELECT filter_id FROM user_filters " - "WHERE user_id = ? AND filter_json = ?" + "WHERE full_user_id = ? AND filter_json = ?" ) - txn.execute(sql, (user_id.localpart, bytearray(def_json))) + txn.execute(sql, (user_id.to_string(), bytearray(def_json))) filter_id_response = txn.fetchone() if filter_id_response is not None: return filter_id_response[0] - sql = "SELECT MAX(filter_id) FROM user_filters WHERE user_id = ?" - txn.execute(sql, (user_id.localpart,)) + sql = "SELECT MAX(filter_id) FROM user_filters WHERE full_user_id = ?" + txn.execute(sql, (user_id.to_string(),)) max_id = cast(Tuple[Optional[int]], txn.fetchone())[0] if max_id is None: filter_id = 0 diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index 21d54c7a7a7e..3ba9cc88537a 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -137,11 +137,11 @@ def _final_batch(txn: LoggingTransaction, lower_bound_id: str) -> None: return 50 - async def get_profileinfo(self, user_localpart: str) -> ProfileInfo: + async def get_profileinfo(self, user_id: UserID) -> ProfileInfo: try: profile = await self.db_pool.simple_select_one( table="profiles", - keyvalues={"user_id": user_localpart}, + keyvalues={"full_user_id": user_id.to_string()}, retcols=("displayname", "avatar_url"), desc="get_profileinfo", ) @@ -156,18 +156,18 @@ async def get_profileinfo(self, user_localpart: str) -> ProfileInfo: avatar_url=profile["avatar_url"], display_name=profile["displayname"] ) - async def get_profile_displayname(self, user_localpart: str) -> Optional[str]: + async def get_profile_displayname(self, user_id: UserID) -> Optional[str]: return await self.db_pool.simple_select_one_onecol( table="profiles", - keyvalues={"user_id": user_localpart}, + keyvalues={"full_user_id": user_id.to_string()}, retcol="displayname", desc="get_profile_displayname", ) - async def get_profile_avatar_url(self, user_localpart: str) -> Optional[str]: + async def get_profile_avatar_url(self, user_id: UserID) -> Optional[str]: return await self.db_pool.simple_select_one_onecol( table="profiles", - keyvalues={"user_id": user_localpart}, + keyvalues={"full_user_id": user_id.to_string()}, retcol="avatar_url", desc="get_profile_avatar_url", ) diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 5cc786f0303d..fc190a8b13cf 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -SCHEMA_VERSION = 77 # remember to update the list below when updating +SCHEMA_VERSION = 78 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -103,6 +103,9 @@ Changes in SCHEMA_VERSION = 77 - (Postgres) Add NOT VALID CHECK (full_user_id IS NOT NULL) to tables profiles and user_filters + +Changes in SCHEMA_VERSION = 78 + - Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters """ diff --git a/synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py b/synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py new file mode 100644 index 000000000000..8398d8f54882 --- /dev/null +++ b/synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py @@ -0,0 +1,92 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.config.homeserver import HomeServerConfig +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine + + +def run_upgrade( + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + config: HomeServerConfig, +) -> None: + """ + Part 3 of a multi-step migration to drop the column `user_id` and replace it with + `full_user_id`. See the database schema docs for more information on the full + migration steps. + """ + hostname = config.server.server_name + + if isinstance(database_engine, PostgresEngine): + # check if the constraint can be validated + check_sql = """ + SELECT user_id from profiles WHERE full_user_id IS NULL + """ + cur.execute(check_sql) + res = cur.fetchall() + + if res: + # there are rows the background job missed, finish them here before we validate the constraint + process_rows_sql = """ + UPDATE profiles + SET full_user_id = '@' || user_id || ? + WHERE user_id IN ( + SELECT user_id FROM profiles WHERE full_user_id IS NULL + ) + """ + cur.execute(process_rows_sql, (f":{hostname}",)) + + # Now we can validate + validate_sql = """ + ALTER TABLE profiles VALIDATE CONSTRAINT full_user_id_not_null + """ + cur.execute(validate_sql) + + else: + # in SQLite we need to rewrite the table to add the constraint. + # First drop any temporary table that might be here from a previous failed migration. + cur.execute("DROP TABLE IF EXISTS temp_profiles") + + create_sql = """ + CREATE TABLE temp_profiles ( + full_user_id text NOT NULL, + user_id text, + displayname text, + avatar_url text, + UNIQUE (full_user_id), + UNIQUE (user_id) + ) + """ + cur.execute(create_sql) + + copy_sql = """ + INSERT INTO temp_profiles ( + user_id, + displayname, + avatar_url, + full_user_id) + SELECT user_id, displayname, avatar_url, '@' || user_id || ':' || ? FROM profiles + """ + cur.execute(copy_sql, (f"{hostname}",)) + + drop_sql = """ + DROP TABLE profiles + """ + cur.execute(drop_sql) + + rename_sql = """ + ALTER TABLE temp_profiles RENAME to profiles + """ + cur.execute(rename_sql) diff --git a/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py b/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py new file mode 100644 index 000000000000..8ef63335e7c3 --- /dev/null +++ b/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py @@ -0,0 +1,95 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.config.homeserver import HomeServerConfig +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine + + +def run_upgrade( + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + config: HomeServerConfig, +) -> None: + """ + Part 3 of a multi-step migration to drop the column `user_id` and replace it with + `full_user_id`. See the database schema docs for more information on the full + migration steps. + """ + hostname = config.server.server_name + + if isinstance(database_engine, PostgresEngine): + # check if the constraint can be validated + check_sql = """ + SELECT user_id from user_filters WHERE full_user_id IS NULL + """ + cur.execute(check_sql) + res = cur.fetchall() + + if res: + # there are rows the background job missed, finish them here before we validate constraint + process_rows_sql = """ + UPDATE user_filters + SET full_user_id = '@' || user_id || ? + WHERE user_id IN ( + SELECT user_id FROM user_filters WHERE full_user_id IS NULL + ) + """ + cur.execute(process_rows_sql, (f":{hostname}",)) + + # Now we can validate + validate_sql = """ + ALTER TABLE user_filters VALIDATE CONSTRAINT full_user_id_not_null + """ + cur.execute(validate_sql) + + else: + cur.execute("DROP TABLE IF EXISTS temp_user_filters") + create_sql = """ + CREATE TABLE temp_user_filters ( + full_user_id text NOT NULL, + user_id text NOT NULL, + filter_id bigint NOT NULL, + filter_json bytea NOT NULL, + UNIQUE (full_user_id), + UNIQUE (user_id) + ) + """ + cur.execute(create_sql) + + index_sql = """ + CREATE UNIQUE INDEX IF NOT EXISTS user_filters_unique ON + temp_user_filters (user_id, filter_id) + """ + cur.execute(index_sql) + + copy_sql = """ + INSERT INTO temp_user_filters ( + user_id, + filter_id, + filter_json, + full_user_id) + SELECT user_id, filter_id, filter_json, '@' || user_id || ':' || ? FROM user_filters + """ + cur.execute(copy_sql, (f"{hostname}",)) + + drop_sql = """ + DROP TABLE user_filters + """ + cur.execute(drop_sql) + + rename_sql = """ + ALTER TABLE temp_user_filters RENAME to user_filters + """ + cur.execute(rename_sql) diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index aa6af5ad7bb2..868f0c699535 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -35,7 +35,6 @@ user_id = UserID.from_string("@test_user:test") user2_id = UserID.from_string("@test_user2:test") -user_localpart = "test_user" class FilteringTestCase(unittest.HomeserverTestCase): @@ -449,9 +448,7 @@ def test_filter_presence_match(self) -> None: ] user_filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart, filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user_id, filter_id=filter_id) ) results = self.get_success(user_filter.filter_presence(presence_states)) @@ -479,9 +476,7 @@ def test_filter_presence_no_match(self) -> None: ] user_filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart + "2", filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user2_id, filter_id=filter_id) ) results = self.get_success(user_filter.filter_presence(presence_states)) @@ -498,9 +493,7 @@ def test_filter_room_state_match(self) -> None: events = [event] user_filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart, filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user_id, filter_id=filter_id) ) results = self.get_success(user_filter.filter_room_state(events=events)) @@ -519,9 +512,7 @@ def test_filter_room_state_no_match(self) -> None: events = [event] user_filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart, filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user_id, filter_id=filter_id) ) results = self.get_success(user_filter.filter_room_state(events)) @@ -603,9 +594,7 @@ def test_add_filter(self) -> None: user_filter_json, ( self.get_success( - self.datastore.get_user_filter( - user_localpart=user_localpart, filter_id=0 - ) + self.datastore.get_user_filter(user_id=user_id, filter_id=0) ) ), ) @@ -620,9 +609,7 @@ def test_get_filter(self) -> None: ) filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart, filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user_id, filter_id=filter_id) ) self.assertEqual(filter.get_filter_json(), user_filter_json) diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 64a9a22afeca..196ceb0b82d0 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -80,11 +80,7 @@ def test_set_my_name(self) -> None: ) self.assertEqual( - ( - self.get_success( - self.store.get_profile_displayname(self.frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_displayname(self.frank))), "Frank Jr.", ) @@ -96,11 +92,7 @@ def test_set_my_name(self) -> None: ) self.assertEqual( - ( - self.get_success( - self.store.get_profile_displayname(self.frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_displayname(self.frank))), "Frank", ) @@ -112,7 +104,7 @@ def test_set_my_name(self) -> None: ) self.assertIsNone( - self.get_success(self.store.get_profile_displayname(self.frank.localpart)) + self.get_success(self.store.get_profile_displayname(self.frank)) ) def test_set_my_name_if_disabled(self) -> None: @@ -122,11 +114,7 @@ def test_set_my_name_if_disabled(self) -> None: self.get_success(self.store.set_profile_displayname(self.frank, "Frank")) self.assertEqual( - ( - self.get_success( - self.store.get_profile_displayname(self.frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_displayname(self.frank))), "Frank", ) @@ -201,7 +189,7 @@ def test_set_my_avatar(self) -> None: ) self.assertEqual( - (self.get_success(self.store.get_profile_avatar_url(self.frank.localpart))), + (self.get_success(self.store.get_profile_avatar_url(self.frank))), "http://my.server/pic.gif", ) @@ -215,7 +203,7 @@ def test_set_my_avatar(self) -> None: ) self.assertEqual( - (self.get_success(self.store.get_profile_avatar_url(self.frank.localpart))), + (self.get_success(self.store.get_profile_avatar_url(self.frank))), "http://my.server/me.png", ) @@ -229,7 +217,7 @@ def test_set_my_avatar(self) -> None: ) self.assertIsNone( - (self.get_success(self.store.get_profile_avatar_url(self.frank.localpart))), + (self.get_success(self.store.get_profile_avatar_url(self.frank))), ) def test_set_my_avatar_if_disabled(self) -> None: @@ -241,7 +229,7 @@ def test_set_my_avatar_if_disabled(self) -> None: ) self.assertEqual( - (self.get_success(self.store.get_profile_avatar_url(self.frank.localpart))), + (self.get_success(self.store.get_profile_avatar_url(self.frank))), "http://my.server/me.png", ) diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py index bff7114cd89c..b3310abe1b31 100644 --- a/tests/module_api/test_api.py +++ b/tests/module_api/test_api.py @@ -28,7 +28,7 @@ from synapse.rest import admin from synapse.rest.client import login, notifications, presence, profile, room from synapse.server import HomeServer -from synapse.types import JsonDict, create_requester +from synapse.types import JsonDict, UserID, create_requester from synapse.util import Clock from tests.events.test_presence_router import send_presence_update, sync_presence @@ -103,7 +103,9 @@ def test_can_register_user(self) -> None: self.assertEqual(email["added_at"], 0) # Check that the displayname was assigned - displayname = self.get_success(self.store.get_profile_displayname("bob")) + displayname = self.get_success( + self.store.get_profile_displayname(UserID.from_string("@bob:test")) + ) self.assertEqual(displayname, "Bobberino") def test_can_register_admin_user(self) -> None: diff --git a/tests/rest/client/test_filter.py b/tests/rest/client/test_filter.py index 9faa9de05076..a2d5d340be35 100644 --- a/tests/rest/client/test_filter.py +++ b/tests/rest/client/test_filter.py @@ -46,7 +46,9 @@ def test_add_filter(self) -> None: self.assertEqual(channel.code, 200) self.assertEqual(channel.json_body, {"filter_id": "0"}) filter = self.get_success( - self.store.get_user_filter(user_localpart="apple", filter_id=0) + self.store.get_user_filter( + user_id=UserID.from_string(FilterTestCase.user_id), filter_id=0 + ) ) self.pump() self.assertEqual(filter, self.EXAMPLE_FILTER) diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index f9cf0fcb82ed..fe5bb7791336 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from twisted.test.proto_helpers import MemoryReactor from synapse.server import HomeServer @@ -35,18 +36,14 @@ def test_displayname(self) -> None: self.assertEqual( "Frank", - ( - self.get_success( - self.store.get_profile_displayname(self.u_frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_displayname(self.u_frank))), ) # test set to None self.get_success(self.store.set_profile_displayname(self.u_frank, None)) self.assertIsNone( - self.get_success(self.store.get_profile_displayname(self.u_frank.localpart)) + self.get_success(self.store.get_profile_displayname(self.u_frank)) ) def test_avatar_url(self) -> None: @@ -58,18 +55,14 @@ def test_avatar_url(self) -> None: self.assertEqual( "http://my.site/here", - ( - self.get_success( - self.store.get_profile_avatar_url(self.u_frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_avatar_url(self.u_frank))), ) # test set to None self.get_success(self.store.set_profile_avatar_url(self.u_frank, None)) self.assertIsNone( - self.get_success(self.store.get_profile_avatar_url(self.u_frank.localpart)) + self.get_success(self.store.get_profile_avatar_url(self.u_frank)) ) def test_profiles_bg_migration(self) -> None: From 8ba530c0e3b157137031d456225b7ba1e0b1627d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:31:41 +0100 Subject: [PATCH 036/153] Bump importlib-metadata from 6.1.0 to 6.6.0 (#15711) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 6.1.0 to 6.6.0. - [Release notes](https://github.com/python/importlib_metadata/releases) - [Changelog](https://github.com/python/importlib_metadata/blob/main/CHANGES.rst) - [Commits](https://github.com/python/importlib_metadata/compare/v6.1.0...v6.6.0) --- updated-dependencies: - dependency-name: importlib-metadata dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index d8964f5719de..180f2740878c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "alabaster" @@ -867,14 +867,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.1.0" +version = "6.6.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, - {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, + {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, + {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, ] [package.dependencies] @@ -3424,18 +3424,18 @@ docs = ["Sphinx", "repoze.sphinx.autointerface"] test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"] [extras] -all = ["Pympler", "authlib", "hiredis", "jaeger-client", "lxml", "matrix-synapse-ldap3", "opentracing", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pyicu", "pysaml2", "sentry-sdk", "txredisapi"] +all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler", "pyicu"] cache-memory = ["Pympler"] jwt = ["authlib"] matrix-synapse-ldap3 = ["matrix-synapse-ldap3"] oidc = ["authlib"] opentracing = ["jaeger-client", "opentracing"] postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"] -redis = ["hiredis", "txredisapi"] +redis = ["txredisapi", "hiredis"] saml2 = ["pysaml2"] sentry = ["sentry-sdk"] systemd = ["systemd-python"] -test = ["idna", "parameterized"] +test = ["parameterized", "idna"] url-preview = ["lxml"] user-search = ["pyicu"] From 36a5bcae2cf70f5b7dec44e34c10d7e47ee0bcc2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:31:54 +0100 Subject: [PATCH 037/153] Bump library/redis from 6-bullseye to 7-bullseye in /docker (#15712) Bumps library/redis from 6-bullseye to 7-bullseye. --- updated-dependencies: - dependency-name: library/redis dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/Dockerfile-workers | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers index adb9a725e33f..31d6d33407c6 100644 --- a/docker/Dockerfile-workers +++ b/docker/Dockerfile-workers @@ -21,7 +21,7 @@ FROM docker.io/library/debian:bullseye-slim AS deps_base # which makes it much easier to copy (but we need to make sure we use an image # based on the same debian version as the synapse image, to make sure we get # the expected version of libc. -FROM docker.io/library/redis:6-bullseye AS redis_base +FROM docker.io/library/redis:7-bullseye AS redis_base # now build the final image, based on the the regular Synapse docker image FROM $FROM From 5feabbdf062d16577f697fed41687c7bffc60c49 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:32:07 +0100 Subject: [PATCH 038/153] Bump pyasn1 from 0.4.8 to 0.5.0 (#15713) Bumps [pyasn1](https://github.com/pyasn1/pyasn1) from 0.4.8 to 0.5.0. - [Release notes](https://github.com/pyasn1/pyasn1/releases) - [Changelog](https://github.com/pyasn1/pyasn1/blob/main/CHANGES.rst) - [Commits](https://github.com/pyasn1/pyasn1/compare/v0.4.8...v0.5.0) --- updated-dependencies: - dependency-name: pyasn1 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 180f2740878c..d2fc2c1c9c7c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1863,14 +1863,14 @@ psycopg2 = "*" [[package]] name = "pyasn1" -version = "0.4.8" -description = "ASN.1 types and codecs" +version = "0.5.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" category = "main" optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, + {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, + {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, ] [[package]] From 1a7aa81715609555cb4d0a7e3cad262b9c234007 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:32:16 +0100 Subject: [PATCH 039/153] Bump sentry-sdk from 1.22.1 to 1.25.0 (#15714) Bumps [sentry-sdk](https://github.com/getsentry/sentry-python) from 1.22.1 to 1.25.0. - [Release notes](https://github.com/getsentry/sentry-python/releases) - [Changelog](https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md) - [Commits](https://github.com/getsentry/sentry-python/compare/1.22.1...1.25.0) --- updated-dependencies: - dependency-name: sentry-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index d2fc2c1c9c7c..9f918574754d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2397,19 +2397,19 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.22.1" +version = "1.25.0" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.22.1.tar.gz", hash = "sha256:052dff5069c6f0d836ee014323576824a9b40836fc003fb12489a1f19c60a3c9"}, - {file = "sentry_sdk-1.22.1-py2.py3-none-any.whl", hash = "sha256:c6c6946f8c927adb00af1c5ab6921df38775b2199b9003816d5935a1310352d5"}, + {file = "sentry-sdk-1.25.0.tar.gz", hash = "sha256:5be3296fc574fa8a4d9b213b4dcf8c8d0246c08f8bd78315c6286f386c37555a"}, + {file = "sentry_sdk-1.25.0-py2.py3-none-any.whl", hash = "sha256:fe85cf5d0b3d0aa3480df689f9f6dc487de783defb0a95043368375dc893645e"}, ] [package.dependencies] certifi = "*" -urllib3 = {version = ">=1.26.11,<2.0.0", markers = "python_version >= \"3.6\""} +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] @@ -2421,10 +2421,11 @@ chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] From 2d97d5b1c359c2a1783365c0db035f17d512dc4c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:32:25 +0100 Subject: [PATCH 040/153] Bump types-jsonschema from 4.17.0.7 to 4.17.0.8 (#15716) Bumps [types-jsonschema](https://github.com/python/typeshed) from 4.17.0.7 to 4.17.0.8. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-jsonschema dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9f918574754d..c94daa6cef70 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3038,14 +3038,14 @@ files = [ [[package]] name = "types-jsonschema" -version = "4.17.0.7" +version = "4.17.0.8" description = "Typing stubs for jsonschema" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-jsonschema-4.17.0.7.tar.gz", hash = "sha256:130e57c5f1ca755f95775d0822ad7a3907294e1461306af54baf804f317fd54c"}, - {file = "types_jsonschema-4.17.0.7-py3-none-any.whl", hash = "sha256:e129b52be6df841d97a98f087631dd558f7812eb91ff7b733c3301bd2446271b"}, + {file = "types-jsonschema-4.17.0.8.tar.gz", hash = "sha256:96a56990910f405e62de58862c0bbb3ac29ee6dba6d3d99aa0ba7f874cc547de"}, + {file = "types_jsonschema-4.17.0.8-py3-none-any.whl", hash = "sha256:f5958eb7b53217dfb5125f0412aeaef226a8a9013eac95816c95b5b523f6796b"}, ] [[package]] From ca8906be2cb821a0fb49ad1adf8440e79e64a398 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:39:34 +0100 Subject: [PATCH 041/153] Bump types-requests from 2.31.0.0 to 2.31.0.1 (#15715) Bumps [types-requests](https://github.com/python/typeshed) from 2.31.0.0 to 2.31.0.1. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c94daa6cef70..1f5cb3a3a85e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3125,14 +3125,14 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.0" +version = "2.31.0.1" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.31.0.0.tar.gz", hash = "sha256:c1c29d20ab8d84dff468d7febfe8e0cb0b4664543221b386605e14672b44ea25"}, - {file = "types_requests-2.31.0.0-py3-none-any.whl", hash = "sha256:7c5cea7940f8e92ec560bbc468f65bf684aa3dcf0554a6f8c4710f5f708dc598"}, + {file = "types-requests-2.31.0.1.tar.gz", hash = "sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac"}, + {file = "types_requests-2.31.0.1-py3-none-any.whl", hash = "sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3"}, ] [package.dependencies] From f9561b9e37e4cbd97a71dd10549f1f03d3f01b5e Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 5 Jun 2023 23:38:52 -0500 Subject: [PATCH 042/153] Some house keeping on `maybe_backfill()` functions (#15709) --- changelog.d/15709.misc | 1 + synapse/handlers/federation.py | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 changelog.d/15709.misc diff --git a/changelog.d/15709.misc b/changelog.d/15709.misc new file mode 100644 index 000000000000..e9ce84a94021 --- /dev/null +++ b/changelog.d/15709.misc @@ -0,0 +1 @@ +Update docstring and traces on `maybe_backfill()` functions. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2eb28d55ac82..57d6b70cff48 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -200,6 +200,7 @@ def __init__(self, hs: "HomeServer"): ) @trace + @tag_args async def maybe_backfill( self, room_id: str, current_depth: int, limit: int ) -> bool: @@ -214,6 +215,9 @@ async def maybe_backfill( limit: The number of events that the pagination request will return. This is used as part of the heuristic to decide if we should back paginate. + + Returns: + True if we actually tried to backfill something, otherwise False. """ # Starting the processing time here so we can include the room backfill # linearizer lock queue in the timing @@ -227,6 +231,8 @@ async def maybe_backfill( processing_start_time=processing_start_time, ) + @trace + @tag_args async def _maybe_backfill_inner( self, room_id: str, @@ -247,6 +253,9 @@ async def _maybe_backfill_inner( limit: The max number of events to request from the remote federated server. processing_start_time: The time when `maybe_backfill` started processing. Only used for timing. If `None`, no timing observation will be made. + + Returns: + True if we actually tried to backfill something, otherwise False. """ backwards_extremities = [ _BackfillPoint(event_id, depth, _BackfillPointType.BACKWARDS_EXTREMITY) @@ -302,6 +311,14 @@ async def _maybe_backfill_inner( len(sorted_backfill_points), sorted_backfill_points, ) + set_tag( + SynapseTags.RESULT_PREFIX + "sorted_backfill_points", + str(sorted_backfill_points), + ) + set_tag( + SynapseTags.RESULT_PREFIX + "sorted_backfill_points.length", + str(len(sorted_backfill_points)), + ) # If we have no backfill points lower than the `current_depth` then # either we can a) bail or b) still attempt to backfill. We opt to try From f880e64b11bd03d1ebd710b34b541d5b2e044baa Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 6 Jun 2023 04:11:07 -0400 Subject: [PATCH 043/153] Stabilize support for MSC3952: Intentional mentions. (#15520) --- changelog.d/15520.feature | 1 + rust/benches/evaluator.rs | 3 -- rust/src/push/base_rules.rs | 8 ++--- rust/src/push/evaluator.rs | 10 +++--- rust/src/push/mod.rs | 7 ----- stubs/synapse/synapse_rust/push.pyi | 1 - synapse/api/constants.py | 2 +- synapse/config/experimental.py | 5 --- synapse/events/validator.py | 9 ++---- synapse/push/bulk_push_rule_evaluator.py | 8 +---- synapse/rest/client/versions.py | 2 -- synapse/storage/databases/main/push_rule.py | 1 - tests/push/test_bulk_push_rule_evaluator.py | 34 ++++++++------------- 13 files changed, 27 insertions(+), 64 deletions(-) create mode 100644 changelog.d/15520.feature diff --git a/changelog.d/15520.feature b/changelog.d/15520.feature new file mode 100644 index 000000000000..f4fd40ab9451 --- /dev/null +++ b/changelog.d/15520.feature @@ -0,0 +1 @@ +Enable support for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952): intentional mentions. diff --git a/rust/benches/evaluator.rs b/rust/benches/evaluator.rs index 64e13f6486e8..c2f33258a4e3 100644 --- a/rust/benches/evaluator.rs +++ b/rust/benches/evaluator.rs @@ -13,8 +13,6 @@ // limitations under the License. #![feature(test)] -use std::collections::BTreeSet; - use synapse::push::{ evaluator::PushRuleEvaluator, Condition, EventMatchCondition, FilteredPushRules, JsonValue, PushRules, SimpleJsonValue, @@ -197,7 +195,6 @@ fn bench_eval_message(b: &mut Bencher) { false, false, false, - false, ); b.iter(|| eval.run(&rules, Some("bob"), Some("person"))); diff --git a/rust/src/push/base_rules.rs b/rust/src/push/base_rules.rs index 51372e15531b..9d6c304d9285 100644 --- a/rust/src/push/base_rules.rs +++ b/rust/src/push/base_rules.rs @@ -142,11 +142,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[ default_enabled: true, }, PushRule { - rule_id: Cow::Borrowed(".org.matrix.msc3952.is_user_mention"), + rule_id: Cow::Borrowed("global/override/.m.is_user_mention"), priority_class: 5, conditions: Cow::Borrowed(&[Condition::Known( KnownCondition::ExactEventPropertyContainsType(EventPropertyIsTypeCondition { - key: Cow::Borrowed("content.org\\.matrix\\.msc3952\\.mentions.user_ids"), + key: Cow::Borrowed("content.m\\.mentions.user_ids"), value_type: Cow::Borrowed(&EventMatchPatternType::UserId), }), )]), @@ -163,11 +163,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[ default_enabled: true, }, PushRule { - rule_id: Cow::Borrowed(".org.matrix.msc3952.is_room_mention"), + rule_id: Cow::Borrowed("global/override/.m.is_room_mention"), priority_class: 5, conditions: Cow::Borrowed(&[ Condition::Known(KnownCondition::EventPropertyIs(EventPropertyIsCondition { - key: Cow::Borrowed("content.org\\.matrix\\.msc3952\\.mentions.room"), + key: Cow::Borrowed("content.m\\.mentions.room"), value: Cow::Borrowed(&SimpleJsonValue::Bool(true)), })), Condition::Known(KnownCondition::SenderNotificationPermission { diff --git a/rust/src/push/evaluator.rs b/rust/src/push/evaluator.rs index 2d7c4c06be96..59c53b1776c6 100644 --- a/rust/src/push/evaluator.rs +++ b/rust/src/push/evaluator.rs @@ -70,7 +70,9 @@ pub struct PushRuleEvaluator { /// The "content.body", if any. body: String, - /// True if the event has a mentions property and MSC3952 support is enabled. + /// True if the event has a m.mentions property. (Note that this is a separate + /// flag instead of checking flattened_keys since the m.mentions property + /// might be an empty map and not appear in flattened_keys. has_mentions: bool, /// The number of users in the room. @@ -155,9 +157,7 @@ impl PushRuleEvaluator { let rule_id = &push_rule.rule_id().to_string(); // For backwards-compatibility the legacy mention rules are disabled - // if the event contains the 'm.mentions' property (and if the - // experimental feature is enabled, both of these are represented - // by the has_mentions flag). + // if the event contains the 'm.mentions' property. if self.has_mentions && (rule_id == "global/override/.m.rule.contains_display_name" || rule_id == "global/content/.m.rule.contains_user_name" @@ -562,7 +562,7 @@ fn test_requires_room_version_supports_condition() { }; let rules = PushRules::new(vec![custom_rule]); result = evaluator.run( - &FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false, false), + &FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false), None, None, ); diff --git a/rust/src/push/mod.rs b/rust/src/push/mod.rs index f19d3c739f1c..514980579b63 100644 --- a/rust/src/push/mod.rs +++ b/rust/src/push/mod.rs @@ -527,7 +527,6 @@ pub struct FilteredPushRules { msc1767_enabled: bool, msc3381_polls_enabled: bool, msc3664_enabled: bool, - msc3952_intentional_mentions: bool, msc3958_suppress_edits_enabled: bool, } @@ -540,7 +539,6 @@ impl FilteredPushRules { msc1767_enabled: bool, msc3381_polls_enabled: bool, msc3664_enabled: bool, - msc3952_intentional_mentions: bool, msc3958_suppress_edits_enabled: bool, ) -> Self { Self { @@ -549,7 +547,6 @@ impl FilteredPushRules { msc1767_enabled, msc3381_polls_enabled, msc3664_enabled, - msc3952_intentional_mentions, msc3958_suppress_edits_enabled, } } @@ -587,10 +584,6 @@ impl FilteredPushRules { return false; } - if !self.msc3952_intentional_mentions && rule.rule_id.contains("org.matrix.msc3952") - { - return false; - } if !self.msc3958_suppress_edits_enabled && rule.rule_id == "global/override/.com.beeper.suppress_edits" { diff --git a/stubs/synapse/synapse_rust/push.pyi b/stubs/synapse/synapse_rust/push.pyi index 5d0ce4b1a4bd..d573a37b9aff 100644 --- a/stubs/synapse/synapse_rust/push.pyi +++ b/stubs/synapse/synapse_rust/push.pyi @@ -46,7 +46,6 @@ class FilteredPushRules: msc1767_enabled: bool, msc3381_polls_enabled: bool, msc3664_enabled: bool, - msc3952_intentional_mentions: bool, msc3958_suppress_edits_enabled: bool, ): ... def rules(self) -> Collection[Tuple[PushRule, bool]]: ... diff --git a/synapse/api/constants.py b/synapse/api/constants.py index cde9a2ecefb3..faf0770c663a 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -236,7 +236,7 @@ class EventContentFields: AUTHORISING_USER: Final = "join_authorised_via_users_server" # Use for mentioning users. - MSC3952_MENTIONS: Final = "org.matrix.msc3952.mentions" + MENTIONS: Final = "m.mentions" # an unspecced field added to to-device messages to identify them uniquely-ish TO_DEVICE_MSGID: Final = "org.matrix.msgid" diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index a9e002cf08f8..1d5b5ded455d 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -358,11 +358,6 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # MSC3391: Removing account data. self.msc3391_enabled = experimental.get("msc3391_enabled", False) - # MSC3952: Intentional mentions, this depends on MSC3966. - self.msc3952_intentional_mentions = experimental.get( - "msc3952_intentional_mentions", False - ) - # MSC3959: Do not generate notifications for edits. self.msc3958_supress_edit_notifs = experimental.get( "msc3958_supress_edit_notifs", False diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 47203209db2f..9278f1a1aa65 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -134,13 +134,8 @@ def validate_new(self, event: EventBase, config: HomeServerConfig) -> None: ) # If the event contains a mentions key, validate it. - if ( - EventContentFields.MSC3952_MENTIONS in event.content - and config.experimental.msc3952_intentional_mentions - ): - validate_json_object( - event.content[EventContentFields.MSC3952_MENTIONS], Mentions - ) + if EventContentFields.MENTIONS in event.content: + validate_json_object(event.content[EventContentFields.MENTIONS], Mentions) def _validate_retention(self, event: EventBase) -> None: """Checks that an event that defines the retention policy for a room respects the diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 320084f5f58c..33002cc0f275 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -120,9 +120,6 @@ def __init__(self, hs: "HomeServer"): self.should_calculate_push_rules = self.hs.config.push.enable_push self._related_event_match_enabled = self.hs.config.experimental.msc3664_enabled - self._intentional_mentions_enabled = ( - self.hs.config.experimental.msc3952_intentional_mentions - ) self.room_push_rule_cache_metrics = register_cache( "cache", @@ -390,10 +387,7 @@ async def _action_for_event_by_user( del notification_levels[key] # Pull out any user and room mentions. - has_mentions = ( - self._intentional_mentions_enabled - and EventContentFields.MSC3952_MENTIONS in event.content - ) + has_mentions = EventContentFields.MENTIONS in event.content evaluator = PushRuleEvaluator( _flatten_dict(event), diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 547bf34df15e..191064875561 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -124,8 +124,6 @@ def on_GET(self, request: Request) -> Tuple[int, JsonDict]: is not None, # Adds support for relation-based redactions as per MSC3912. "org.matrix.msc3912": self.config.experimental.msc3912_enabled, - # Adds support for unstable "intentional mentions" behaviour. - "org.matrix.msc3952_intentional_mentions": self.config.experimental.msc3952_intentional_mentions, # Whether recursively provide relations is supported. "org.matrix.msc3981": self.config.experimental.msc3981_recurse_relations, # Adds support for deleting account data. diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index 9f862f00c1c5..e098ceea3cdd 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -88,7 +88,6 @@ def _load_rules( msc1767_enabled=experimental_config.msc1767_enabled, msc3664_enabled=experimental_config.msc3664_enabled, msc3381_polls_enabled=experimental_config.msc3381_polls_enabled, - msc3952_intentional_mentions=experimental_config.msc3952_intentional_mentions, msc3958_suppress_edits_enabled=experimental_config.msc3958_supress_edit_notifs, ) diff --git a/tests/push/test_bulk_push_rule_evaluator.py b/tests/push/test_bulk_push_rule_evaluator.py index 9501096a7732..1e06f8607159 100644 --- a/tests/push/test_bulk_push_rule_evaluator.py +++ b/tests/push/test_bulk_push_rule_evaluator.py @@ -228,7 +228,6 @@ def _create_and_process( ) return len(result) > 0 - @override_config({"experimental_features": {"msc3952_intentional_mentions": True}}) def test_user_mentions(self) -> None: """Test the behavior of an event which includes invalid user mentions.""" bulk_evaluator = BulkPushRuleEvaluator(self.hs) @@ -237,9 +236,7 @@ def test_user_mentions(self) -> None: self.assertFalse(self._create_and_process(bulk_evaluator)) # An empty mentions field should not notify. self.assertFalse( - self._create_and_process( - bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: {}} - ) + self._create_and_process(bulk_evaluator, {EventContentFields.MENTIONS: {}}) ) # Non-dict mentions should be ignored. @@ -253,7 +250,7 @@ def test_user_mentions(self) -> None: for mentions in (None, True, False, 1, "foo", []): self.assertFalse( self._create_and_process( - bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: mentions} + bulk_evaluator, {EventContentFields.MENTIONS: mentions} ) ) @@ -262,7 +259,7 @@ def test_user_mentions(self) -> None: self.assertFalse( self._create_and_process( bulk_evaluator, - {EventContentFields.MSC3952_MENTIONS: {"user_ids": mentions}}, + {EventContentFields.MENTIONS: {"user_ids": mentions}}, ) ) @@ -270,14 +267,14 @@ def test_user_mentions(self) -> None: self.assertTrue( self._create_and_process( bulk_evaluator, - {EventContentFields.MSC3952_MENTIONS: {"user_ids": [self.alice]}}, + {EventContentFields.MENTIONS: {"user_ids": [self.alice]}}, ) ) self.assertTrue( self._create_and_process( bulk_evaluator, { - EventContentFields.MSC3952_MENTIONS: { + EventContentFields.MENTIONS: { "user_ids": ["@another:test", self.alice] } }, @@ -288,11 +285,7 @@ def test_user_mentions(self) -> None: self.assertTrue( self._create_and_process( bulk_evaluator, - { - EventContentFields.MSC3952_MENTIONS: { - "user_ids": [self.alice, self.alice] - } - }, + {EventContentFields.MENTIONS: {"user_ids": [self.alice, self.alice]}}, ) ) @@ -307,7 +300,7 @@ def test_user_mentions(self) -> None: self._create_and_process( bulk_evaluator, { - EventContentFields.MSC3952_MENTIONS: { + EventContentFields.MENTIONS: { "user_ids": [None, True, False, {}, []] } }, @@ -317,7 +310,7 @@ def test_user_mentions(self) -> None: self._create_and_process( bulk_evaluator, { - EventContentFields.MSC3952_MENTIONS: { + EventContentFields.MENTIONS: { "user_ids": [None, True, False, {}, [], self.alice] } }, @@ -331,12 +324,11 @@ def test_user_mentions(self) -> None: { "body": self.alice, "msgtype": "m.text", - EventContentFields.MSC3952_MENTIONS: {}, + EventContentFields.MENTIONS: {}, }, ) ) - @override_config({"experimental_features": {"msc3952_intentional_mentions": True}}) def test_room_mentions(self) -> None: """Test the behavior of an event which includes invalid room mentions.""" bulk_evaluator = BulkPushRuleEvaluator(self.hs) @@ -344,7 +336,7 @@ def test_room_mentions(self) -> None: # Room mentions from those without power should not notify. self.assertFalse( self._create_and_process( - bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: {"room": True}} + bulk_evaluator, {EventContentFields.MENTIONS: {"room": True}} ) ) @@ -358,7 +350,7 @@ def test_room_mentions(self) -> None: ) self.assertTrue( self._create_and_process( - bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: {"room": True}} + bulk_evaluator, {EventContentFields.MENTIONS: {"room": True}} ) ) @@ -374,7 +366,7 @@ def test_room_mentions(self) -> None: self.assertFalse( self._create_and_process( bulk_evaluator, - {EventContentFields.MSC3952_MENTIONS: {"room": mentions}}, + {EventContentFields.MENTIONS: {"room": mentions}}, ) ) @@ -385,7 +377,7 @@ def test_room_mentions(self) -> None: { "body": "@room", "msgtype": "m.text", - EventContentFields.MSC3952_MENTIONS: {}, + EventContentFields.MENTIONS: {}, }, ) ) From ad690037de0708d932380e3759d57ef3cc981345 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 6 Jun 2023 10:58:32 +0100 Subject: [PATCH 044/153] Fix link in changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index ea13b554baa2..905713b2afc7 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -8,7 +8,7 @@ No significant changes since 1.85.0rc2. The following issues are fixed in 1.85.0 (and RCs). -- [GHSA-26c5-ppr8-f33p](https://github.com/matrix-org/synapse/security/advisories/GHSA-26c5-ppr8-f33p) / [CVE-2023-32682](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-32683) — Low Severity +- [GHSA-26c5-ppr8-f33p](https://github.com/matrix-org/synapse/security/advisories/GHSA-26c5-ppr8-f33p) / [CVE-2023-32682](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-32682) — Low Severity It may be possible for a deactivated user to login when using uncommon configurations. From dfd77f426e3e4a66dd027db7078ed0345a4c74dd Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Tue, 6 Jun 2023 12:32:29 +0100 Subject: [PATCH 045/153] Remove some unused `server_name` fields (#15723) Signed-off-by: Sean Quah --- changelog.d/15723.misc | 1 + synapse/handlers/presence.py | 1 - synapse/handlers/read_marker.py | 1 - synapse/handlers/room.py | 1 - synapse/handlers/stats.py | 1 - synapse/rest/media/upload_resource.py | 1 - 6 files changed, 1 insertion(+), 5 deletions(-) create mode 100644 changelog.d/15723.misc diff --git a/changelog.d/15723.misc b/changelog.d/15723.misc new file mode 100644 index 000000000000..ba331adca7f2 --- /dev/null +++ b/changelog.d/15723.misc @@ -0,0 +1 @@ +Removed some unused fields. diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 4ad223357384..0a219b796271 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -648,7 +648,6 @@ class PresenceHandler(BasePresenceHandler): def __init__(self, hs: "HomeServer"): super().__init__(hs) self.hs = hs - self.server_name = hs.hostname self.wheel_timer: WheelTimer[str] = WheelTimer() self.notifier = hs.get_notifier() self._presence_enabled = hs.config.server.use_presence diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py index 49a497a86011..df5a4f3e22cf 100644 --- a/synapse/handlers/read_marker.py +++ b/synapse/handlers/read_marker.py @@ -27,7 +27,6 @@ class ReadMarkerHandler: def __init__(self, hs: "HomeServer"): - self.server_name = hs.config.server.server_name self.store = hs.get_datastores().main self.account_data_handler = hs.get_account_data_handler() self.read_marker_linearizer = Linearizer(name="read_marker") diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 5e1702d78a4b..cb957f2033a1 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -1490,7 +1490,6 @@ async def filter_evts(events: List[EventBase]) -> List[EventBase]: class TimestampLookupHandler: def __init__(self, hs: "HomeServer"): - self.server_name = hs.hostname self.store = hs.get_datastores().main self.state_handler = hs.get_state_handler() self.federation_client = hs.get_federation_client() diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 5c01482acfd7..7cabf7980af1 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -42,7 +42,6 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() self.state = hs.get_state_handler() - self.server_name = hs.hostname self.clock = hs.get_clock() self.notifier = hs.get_notifier() self.is_mine_id = hs.is_mine_id diff --git a/synapse/rest/media/upload_resource.py b/synapse/rest/media/upload_resource.py index 697348613b52..043e8d6077ea 100644 --- a/synapse/rest/media/upload_resource.py +++ b/synapse/rest/media/upload_resource.py @@ -39,7 +39,6 @@ def __init__(self, hs: "HomeServer", media_repo: "MediaRepository"): self.filepaths = media_repo.filepaths self.store = hs.get_datastores().main self.clock = hs.get_clock() - self.server_name = hs.hostname self.auth = hs.get_auth() self.max_upload_size = hs.config.media.max_upload_size self.clock = hs.get_clock() From d43c72a6c85ab7cf7391f1b716dfd57f8fd0bf3d Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 6 Jun 2023 19:29:54 +0100 Subject: [PATCH 046/153] Prevent "twisted trunk" and "latest deps" workflows from running on forks (#15726) --- .github/workflows/latest_deps.yml | 23 +++++++++++++++++++++-- .github/workflows/twisted_trunk.yml | 24 ++++++++++++++++++++++-- changelog.d/15726.misc | 1 + 3 files changed, 44 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15726.misc diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 452600ba1633..ec6391cf8fd4 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -22,7 +22,21 @@ concurrency: cancel-in-progress: true jobs: + check_repo: + # Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is + # only useful to the Synapse core team. + # All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest + # of the workflow will be skipped as well. + runs-on: ubuntu-latest + outputs: + should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }} + steps: + - id: check_condition + run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT" + mypy: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -47,6 +61,8 @@ jobs: run: sed '/warn_unused_ignores = True/d' -i mypy.ini - run: poetry run mypy trial: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest strategy: matrix: @@ -105,6 +121,8 @@ jobs: sytest: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest container: image: matrixdotorg/sytest-synapse:testing @@ -156,7 +174,8 @@ jobs: complement: - if: "${{ !failure() && !cancelled() }}" + needs: check_repo + if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'" runs-on: ubuntu-latest strategy: @@ -192,7 +211,7 @@ jobs: # Open an issue if the build fails, so we know about it. # Only do this if we're not experimenting with this action in a PR. open-issue: - if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'" + if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'" needs: # TODO: should mypy be included here? It feels more brittle than the others. - mypy diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 14fc6a0389c3..55081f8133b2 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -18,7 +18,22 @@ concurrency: cancel-in-progress: true jobs: + check_repo: + # Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is + # only useful to the Synapse core team. + # All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest + # of the workflow will be skipped as well. + if: github.repository == 'matrix-org/synapse' + runs-on: ubuntu-latest + outputs: + should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }} + steps: + - id: check_condition + run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT" + mypy: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest steps: @@ -41,6 +56,8 @@ jobs: - run: poetry run mypy trial: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest steps: @@ -75,6 +92,8 @@ jobs: || true sytest: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest container: image: matrixdotorg/sytest-synapse:buster @@ -119,7 +138,8 @@ jobs: /logs/**/*.log* complement: - if: "${{ !failure() && !cancelled() }}" + needs: check_repo + if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'" runs-on: ubuntu-latest strategy: @@ -166,7 +186,7 @@ jobs: # open an issue if the build fails, so we know about it. open-issue: - if: failure() + if: failure() && needs.check_repo.outputs.should_run_workflow == 'true' needs: - mypy - trial diff --git a/changelog.d/15726.misc b/changelog.d/15726.misc new file mode 100644 index 000000000000..941e541e7766 --- /dev/null +++ b/changelog.d/15726.misc @@ -0,0 +1 @@ +Prevent the `latest_deps` and `twisted_trunk` daily GitHub Actions workflows from running on forks of the codebase. \ No newline at end of file From 6ee96e936646d6ccc55dc076f62f8cf518c90d1e Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 6 Jun 2023 13:16:03 -0700 Subject: [PATCH 047/153] Improve performance of user directory search (#15729) --- changelog.d/15729.misc | 1 + synapse/storage/databases/main/user_directory.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15729.misc diff --git a/changelog.d/15729.misc b/changelog.d/15729.misc new file mode 100644 index 000000000000..394025430535 --- /dev/null +++ b/changelog.d/15729.misc @@ -0,0 +1 @@ +Improve performance of user directory search. diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index a0319575f071..b0a06baf4f0c 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -1061,12 +1061,15 @@ async def search_user_dir( # The array of numbers are the weights for the various part of the # search: (domain, _, display name, localpart) sql = """ + WITH matching_users AS ( + SELECT user_id, vector FROM user_directory_search WHERE vector @@ to_tsquery('simple', ?) + LIMIT 10000 + ) SELECT d.user_id AS user_id, display_name, avatar_url - FROM user_directory_search as t + FROM matching_users as t INNER JOIN user_directory AS d USING (user_id) WHERE %(where_clause)s - AND vector @@ to_tsquery('simple', ?) ORDER BY (CASE WHEN d.user_id IS NOT NULL THEN 4.0 ELSE 1.0 END) * (CASE WHEN display_name IS NOT NULL THEN 1.2 ELSE 1.0 END) @@ -1095,8 +1098,9 @@ async def search_user_dir( "order_case_statements": " ".join(additional_ordering_statements), } args = ( - join_args - + (full_query, exact_query, prefix_query) + (full_query,) + + join_args + + (exact_query, prefix_query) + ordering_arguments + (limit + 1,) ) From 33c3550887f412f015cf651db82a9082bb12cd9e Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 6 Jun 2023 16:25:03 -0500 Subject: [PATCH 048/153] Add context for when/why to use the `long_retries` option when sending Federation requests (#15721) --- changelog.d/15721.misc | 1 + synapse/http/matrixfederationclient.py | 11 +++++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15721.misc diff --git a/changelog.d/15721.misc b/changelog.d/15721.misc new file mode 100644 index 000000000000..f4d892daf970 --- /dev/null +++ b/changelog.d/15721.misc @@ -0,0 +1 @@ +Add context for when/why to use the `long_retries` option when sending Federation requests. diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 9094dab0feb7..abb5ae581521 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -499,8 +499,15 @@ async def _send_request( Note that the above intervals are *in addition* to the time spent waiting for the request to complete (up to `timeout` ms). - NB: the long retry algorithm takes over 20 minutes to complete, with - a default timeout of 60s! + NB: the long retry algorithm takes over 20 minutes to complete, with a + default timeout of 60s! It's best not to use the `long_retries` option + for something that is blocking a client so we don't make them wait for + aaaaages, whereas some things like sending transactions (server to + server) we can be a lot more lenient but its very fuzzy / hand-wavey. + + In the future, we could be more intelligent about doing this sort of + thing by looking at things with the bigger picture in mind, + https://github.com/matrix-org/synapse/issues/8917 ignore_backoff: true to ignore the historical backoff data and try the request anyway. From 4e6390cb10676d3f621319663587f49baa57bedc Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 6 Jun 2023 16:26:12 -0500 Subject: [PATCH 049/153] Update error to more plainly explain we can only authorize our own events (#15725) --- changelog.d/15725.misc | 1 + synapse/federation/federation_server.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15725.misc diff --git a/changelog.d/15725.misc b/changelog.d/15725.misc new file mode 100644 index 000000000000..6c7a8a41d883 --- /dev/null +++ b/changelog.d/15725.misc @@ -0,0 +1 @@ +Update federation error to more plainly explain we can only authorize our own membership events. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 149351dda025..9425b3250703 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -944,7 +944,7 @@ async def _on_send_membership_event( if not self._is_mine_server_name(authorising_server): raise SynapseError( 400, - f"Cannot authorise request from resident server: {authorising_server}", + f"Cannot authorise membership event for {authorising_server}. We can only authorise requests from our own homeserver", ) event.signatures.update( From 8bfded81f3378ab6333f174e182f2aae6ef01f49 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 6 Jun 2023 17:39:22 -0500 Subject: [PATCH 050/153] Trace functions which return `Awaitable` (#15650) --- changelog.d/15650.misc | 1 + synapse/logging/opentracing.py | 37 ++++++++++++++++++-------- tests/logging/test_opentracing.py | 43 +++++++++++++++++++++++-------- 3 files changed, 59 insertions(+), 22 deletions(-) create mode 100644 changelog.d/15650.misc diff --git a/changelog.d/15650.misc b/changelog.d/15650.misc new file mode 100644 index 000000000000..9bbad113e11e --- /dev/null +++ b/changelog.d/15650.misc @@ -0,0 +1 @@ +Add support for tracing functions which return `Awaitable`s. diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index c70eee649c57..75217e3f45bb 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -171,6 +171,7 @@ def set_fates(clotho, lachesis, atropos, father="Zues", mother="Themis"): from typing import ( TYPE_CHECKING, Any, + Awaitable, Callable, Collection, ContextManager, @@ -903,6 +904,7 @@ def _wrapping_logic(func: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> """ if inspect.iscoroutinefunction(func): + # For this branch, we handle async functions like `async def func() -> RInner`. # In this branch, R = Awaitable[RInner], for some other type RInner @wraps(func) async def _wrapper( @@ -914,15 +916,16 @@ async def _wrapper( return await func(*args, **kwargs) # type: ignore[misc] else: - # The other case here handles both sync functions and those - # decorated with inlineDeferred. + # The other case here handles sync functions including those decorated with + # `@defer.inlineCallbacks` or that return a `Deferred` or other `Awaitable`. @wraps(func) - def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + def _wrapper(*args: P.args, **kwargs: P.kwargs) -> Any: scope = wrapping_logic(func, *args, **kwargs) scope.__enter__() try: result = func(*args, **kwargs) + if isinstance(result, defer.Deferred): def call_back(result: R) -> R: @@ -930,20 +933,32 @@ def call_back(result: R) -> R: return result def err_back(result: R) -> R: + # TODO: Pass the error details into `scope.__exit__(...)` for + # consistency with the other paths. scope.__exit__(None, None, None) return result result.addCallbacks(call_back, err_back) + elif inspect.isawaitable(result): + + async def wrap_awaitable() -> Any: + try: + assert isinstance(result, Awaitable) + awaited_result = await result + scope.__exit__(None, None, None) + return awaited_result + except Exception as e: + scope.__exit__(type(e), None, e.__traceback__) + raise + + # The original method returned an awaitable, eg. a coroutine, so we + # create another awaitable wrapping it that calls + # `scope.__exit__(...)`. + return wrap_awaitable() else: - if inspect.isawaitable(result): - logger.error( - "@trace may not have wrapped %s correctly! " - "The function is not async but returned a %s.", - func.__qualname__, - type(result).__name__, - ) - + # Just a simple sync function so we can just exit the scope and + # return the result without any fuss. scope.__exit__(None, None, None) return result diff --git a/tests/logging/test_opentracing.py b/tests/logging/test_opentracing.py index e28ba84cc2b7..1bc7d64ad9c4 100644 --- a/tests/logging/test_opentracing.py +++ b/tests/logging/test_opentracing.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import cast +from typing import Awaitable, cast from twisted.internet import defer from twisted.test.proto_helpers import MemoryReactorClock @@ -227,8 +227,6 @@ def test_trace_decorator_deferred(self) -> None: Test whether we can use `@trace_with_opname` (`@trace`) and `@tag_args` with functions that return deferreds """ - reactor = MemoryReactorClock() - with LoggingContext("root context"): @trace_with_opname("fixture_deferred_func", tracer=self._tracer) @@ -240,9 +238,6 @@ def fixture_deferred_func() -> "defer.Deferred[str]": result_d1 = fixture_deferred_func() - # let the tasks complete - reactor.pump((2,) * 8) - self.assertEqual(self.successResultOf(result_d1), "foo") # the span should have been reported @@ -256,8 +251,6 @@ def test_trace_decorator_async(self) -> None: Test whether we can use `@trace_with_opname` (`@trace`) and `@tag_args` with async functions """ - reactor = MemoryReactorClock() - with LoggingContext("root context"): @trace_with_opname("fixture_async_func", tracer=self._tracer) @@ -267,9 +260,6 @@ async def fixture_async_func() -> str: d1 = defer.ensureDeferred(fixture_async_func()) - # let the tasks complete - reactor.pump((2,) * 8) - self.assertEqual(self.successResultOf(d1), "foo") # the span should have been reported @@ -277,3 +267,34 @@ async def fixture_async_func() -> str: [span.operation_name for span in self._reporter.get_spans()], ["fixture_async_func"], ) + + def test_trace_decorator_awaitable_return(self) -> None: + """ + Test whether we can use `@trace_with_opname` (`@trace`) and `@tag_args` + with functions that return an awaitable (e.g. a coroutine) + """ + with LoggingContext("root context"): + # Something we can return without `await` to get a coroutine + async def fixture_async_func() -> str: + return "foo" + + # The actual kind of function we want to test that returns an awaitable + @trace_with_opname("fixture_awaitable_return_func", tracer=self._tracer) + @tag_args + def fixture_awaitable_return_func() -> Awaitable[str]: + return fixture_async_func() + + # Something we can run with `defer.ensureDeferred(runner())` and pump the + # whole async tasks through to completion. + async def runner() -> str: + return await fixture_awaitable_return_func() + + d1 = defer.ensureDeferred(runner()) + + self.assertEqual(self.successResultOf(d1), "foo") + + # the span should have been reported + self.assertEqual( + [span.operation_name for span in self._reporter.get_spans()], + ["fixture_awaitable_return_func"], + ) From 9d911b0da651893e0b67cb3506e18582cb0d95b5 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 6 Jun 2023 22:19:57 -0500 Subject: [PATCH 051/153] No need for the extra join since `membership` is built-in to `current_state_events` (#15731) This helps with the upstream `is_host_joined()` and `is_host_invited()` functions. `membership` was added to `current_state_events` in https://github.com/matrix-org/synapse/pull/5706 and forced in https://github.com/matrix-org/synapse/pull/13745 --- changelog.d/15731.misc | 1 + synapse/storage/databases/main/roommember.py | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15731.misc diff --git a/changelog.d/15731.misc b/changelog.d/15731.misc new file mode 100644 index 000000000000..906bc2696254 --- /dev/null +++ b/changelog.d/15731.misc @@ -0,0 +1 @@ +Remove redundant table join with `room_memberships` when doing a `is_host_joined()`/`is_host_invited()` call (`membership` is already part of the `current_state_events`). diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index ae9c201b87e8..1b8ec67f5487 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -927,11 +927,10 @@ async def _check_host_room_membership( raise Exception("Invalid host name") sql = """ - SELECT state_key FROM current_state_events AS c - INNER JOIN room_memberships AS m USING (event_id) - WHERE m.membership = ? + SELECT state_key FROM current_state_events + WHERE membership = ? AND type = 'm.room.member' - AND c.room_id = ? + AND room_id = ? AND state_key LIKE ? LIMIT 1 """ From 5c24d7b9ebd8dec2c76dac5118cee22a1bb1032a Mon Sep 17 00:00:00 2001 From: Grant McLean Date: Thu, 8 Jun 2023 03:21:25 +1200 Subject: [PATCH 052/153] Check required power levels earlier in createRoom handler. (#15695) * Check required power levels earlier in createRoom handler. - If a server was configured to reject the creation of rooms with E2EE enabled (by specifying an unattainably high power level for "m.room.encryption" in default_power_level_content_override), the 403 error was not being triggered until after the room was created and before the "m.room.power_levels" was sent. This allowed a user to access the partially-configured room and complete the setup of E2EE and power levels manually. - This change causes the power level overrides to be checked earlier and the request to be rejected before the user gains access to the room. - A new `_validate_room_config` method is added to contain checks that should be run before a room is created. - The new test case confirms that a user request is rejected by the new validation method. Signed-off-by: Grant McLean * Add a changelog file. * Formatting fix for black. * Remove unneeded line from test. --------- Signed-off-by: Grant McLean --- changelog.d/15695.bugfix | 1 + synapse/handlers/room.py | 76 +++++++++++++++++++++++++++------ tests/rest/client/test_rooms.py | 37 ++++++++++++++++ 3 files changed, 100 insertions(+), 14 deletions(-) create mode 100644 changelog.d/15695.bugfix diff --git a/changelog.d/15695.bugfix b/changelog.d/15695.bugfix new file mode 100644 index 000000000000..99bf1fe05e34 --- /dev/null +++ b/changelog.d/15695.bugfix @@ -0,0 +1 @@ +Check permissions for enabling encryption earlier during room creation to avoid creating broken rooms. diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index cb957f2033a1..bf907b78815c 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -872,6 +872,8 @@ async def create_room( visibility = config.get("visibility", "private") is_public = visibility == "public" + self._validate_room_config(config, visibility) + room_id = await self._generate_and_create_room_id( creator_id=user_id, is_public=is_public, @@ -1111,20 +1113,7 @@ async def create_event( return new_event, new_unpersisted_context - visibility = room_config.get("visibility", "private") - preset_config = room_config.get( - "preset", - RoomCreationPreset.PRIVATE_CHAT - if visibility == "private" - else RoomCreationPreset.PUBLIC_CHAT, - ) - - try: - config = self._presets_dict[preset_config] - except KeyError: - raise SynapseError( - 400, f"'{preset_config}' is not a valid preset", errcode=Codes.BAD_JSON - ) + preset_config, config = self._room_preset_config(room_config) # MSC2175 removes the creator field from the create event. if not room_version.msc2175_implicit_room_creator: @@ -1306,6 +1295,65 @@ async def create_event( assert last_event.internal_metadata.stream_ordering is not None return last_event.internal_metadata.stream_ordering, last_event.event_id, depth + def _validate_room_config( + self, + config: JsonDict, + visibility: str, + ) -> None: + """Checks configuration parameters for a /createRoom request. + + If validation detects invalid parameters an exception may be raised to + cause room creation to be aborted and an error response to be returned + to the client. + + Args: + config: A dict of configuration options. Originally from the body of + the /createRoom request + visibility: One of "public" or "private" + """ + + # Validate the requested preset, raise a 400 error if not valid + preset_name, preset_config = self._room_preset_config(config) + + # If the user is trying to create an encrypted room and this is forbidden + # by the configured default_power_level_content_override, then reject the + # request before the room is created. + raw_initial_state = config.get("initial_state", []) + room_encryption_event = any( + s.get("type", "") == EventTypes.RoomEncryption for s in raw_initial_state + ) + + if preset_config["encrypted"] or room_encryption_event: + if self._default_power_level_content_override: + override = self._default_power_level_content_override.get(preset_name) + if override is not None: + event_levels = override.get("events", {}) + room_admin_level = event_levels.get(EventTypes.PowerLevels, 100) + encryption_level = event_levels.get(EventTypes.RoomEncryption, 100) + if encryption_level > room_admin_level: + raise SynapseError( + 403, + f"You cannot create an encrypted room. user_level ({room_admin_level}) < send_level ({encryption_level})", + ) + + def _room_preset_config(self, room_config: JsonDict) -> Tuple[str, dict]: + # The spec says rooms should default to private visibility if + # `visibility` is not specified. + visibility = room_config.get("visibility", "private") + preset_name = room_config.get( + "preset", + RoomCreationPreset.PRIVATE_CHAT + if visibility == "private" + else RoomCreationPreset.PUBLIC_CHAT, + ) + try: + preset_config = self._presets_dict[preset_name] + except KeyError: + raise SynapseError( + 400, f"'{preset_name}' is not a valid preset", errcode=Codes.BAD_JSON + ) + return preset_name, preset_config + def _generate_room_id(self) -> str: """Generates a random room ID. diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index 4d39c89f6f19..f1b4e1ad2fc1 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -1941,6 +1941,43 @@ def test_config_override_applies_only_to_specific_preset(self) -> None: channel.json_body["error"], ) + @unittest.override_config( + { + "default_power_level_content_override": { + "private_chat": { + "events": { + "m.room.avatar": 50, + "m.room.canonical_alias": 50, + "m.room.encryption": 999, + "m.room.history_visibility": 100, + "m.room.name": 50, + "m.room.power_levels": 100, + "m.room.server_acl": 100, + "m.room.tombstone": 100, + }, + "events_default": 0, + }, + } + }, + ) + def test_config_override_blocks_encrypted_room(self) -> None: + # Given the server has config for private_chats, + + # When I attempt to create an encrypted private_chat room + channel = self.make_request( + "POST", + "/createRoom", + '{"creation_content": {"m.federate": false},"name": "Secret Private Room","preset": "private_chat","initial_state": [{"type": "m.room.encryption","state_key": "","content": {"algorithm": "m.megolm.v1.aes-sha2"}}]}', + ) + + # Then I am not allowed because the required power level is unattainable + self.assertEqual(HTTPStatus.FORBIDDEN, channel.code, msg=channel.result["body"]) + self.assertEqual( + "You cannot create an encrypted room. " + + "user_level (100) < send_level (999)", + channel.json_body["error"], + ) + class RoomInitialSyncTestCase(RoomBase): """Tests /rooms/$room_id/initialSync.""" From 195b6a298d509518bf16d5a421d706ecb2ccdce6 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 7 Jun 2023 11:45:16 -0500 Subject: [PATCH 053/153] Remove redundant `room_memberships` join to find participating servers in a room (#15732) Spawning from https://github.com/matrix-org/synapse/pull/15731 --- changelog.d/15732.doc | 1 + docs/usage/administration/admin_faq.md | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15732.doc diff --git a/changelog.d/15732.doc b/changelog.d/15732.doc new file mode 100644 index 000000000000..b0e8639df78d --- /dev/null +++ b/changelog.d/15732.doc @@ -0,0 +1 @@ +Simplify query to find participating servers in a room. diff --git a/docs/usage/administration/admin_faq.md b/docs/usage/administration/admin_faq.md index 28c3dd53a5f4..5c9ee7d0aa6f 100644 --- a/docs/usage/administration/admin_faq.md +++ b/docs/usage/administration/admin_faq.md @@ -27,9 +27,8 @@ What servers are currently participating in this room? Run this sql query on your db: ```sql SELECT DISTINCT split_part(state_key, ':', 2) - FROM current_state_events AS c - INNER JOIN room_memberships AS m USING (room_id, event_id) - WHERE room_id = '!cURbafjkfsMDVwdRDQ:matrix.org' AND membership = 'join'; +FROM current_state_events +WHERE room_id = '!cURbafjkfsMDVwdRDQ:matrix.org' AND membership = 'join'; ``` What users are registered on my server? From e536f02f68135a8494f80ded75d1a53b98cbcb8d Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 7 Jun 2023 11:47:01 -0500 Subject: [PATCH 054/153] Remove superfluous `room_memberships` join from background update (#15733) Spawning from https://github.com/matrix-org/synapse/pull/15731 --- changelog.d/15733.misc | 1 + synapse/storage/databases/main/roommember.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 changelog.d/15733.misc diff --git a/changelog.d/15733.misc b/changelog.d/15733.misc new file mode 100644 index 000000000000..3ae7be3c27d1 --- /dev/null +++ b/changelog.d/15733.misc @@ -0,0 +1 @@ +Remove superfluous `room_memberships` join from background update. diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 1b8ec67f5487..582875c91a5b 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -1460,7 +1460,6 @@ def add_membership_profile_txn(txn: LoggingTransaction) -> int: SELECT stream_ordering, event_id, events.room_id, event_json.json FROM events INNER JOIN event_json USING (event_id) - INNER JOIN room_memberships USING (event_id) WHERE ? <= stream_ordering AND stream_ordering < ? AND type = 'm.room.member' ORDER BY stream_ordering DESC From d162aecaac52fb467822e319e4c3c5b216c33ca9 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 7 Jun 2023 18:12:23 +0100 Subject: [PATCH 055/153] Quick & dirty metric for background update status (#15740) * Quick & dirty metric for background update status * Changelog * Remove debug Co-authored-by: Mathieu Velten * Actually write to _aborted --------- Co-authored-by: Mathieu Velten --- changelog.d/15740.feature | 1 + synapse/metrics/__init__.py | 2 ++ synapse/storage/background_updates.py | 30 +++++++++++++++++++++++++++ synapse/storage/database.py | 8 ++++++- 4 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15740.feature diff --git a/changelog.d/15740.feature b/changelog.d/15740.feature new file mode 100644 index 000000000000..fed342ea55d8 --- /dev/null +++ b/changelog.d/15740.feature @@ -0,0 +1 @@ +Expose a metric reporting the database background update status. diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 8ce58872293a..39fc629937a8 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -77,6 +77,8 @@ def collect() -> Iterable[Metric]: @attr.s(slots=True, hash=True, auto_attribs=True) class LaterGauge(Collector): + """A Gauge which periodically calls a user-provided callback to produce metrics.""" + name: str desc: str labels: Optional[Sequence[str]] = attr.ib(hash=False) diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index ca085ef8000f..edc97a9d6105 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +from enum import IntEnum from types import TracebackType from typing import ( TYPE_CHECKING, @@ -136,6 +137,15 @@ def total_items_per_ms(self) -> Optional[float]: return float(self.total_item_count) / float(self.total_duration_ms) +class UpdaterStatus(IntEnum): + # Use negative values for error conditions. + ABORTED = -1 + DISABLED = 0 + NOT_STARTED = 1 + RUNNING_UPDATE = 2 + COMPLETE = 3 + + class BackgroundUpdater: """Background updates are updates to the database that run in the background. Each update processes a batch of data at once. We attempt to @@ -158,11 +168,16 @@ def __init__(self, hs: "HomeServer", database: "DatabasePool"): self._background_update_performance: Dict[str, BackgroundUpdatePerformance] = {} self._background_update_handlers: Dict[str, _BackgroundUpdateHandler] = {} + # TODO: all these bool flags make me feel icky---can we combine into a status + # enum? self._all_done = False # Whether we're currently running updates self._running = False + # Marker to be set if we abort and halt all background updates. + self._aborted = False + # Whether background updates are enabled. This allows us to # enable/disable background updates via the admin API. self.enabled = True @@ -175,6 +190,20 @@ def __init__(self, hs: "HomeServer", database: "DatabasePool"): self.sleep_duration_ms = hs.config.background_updates.sleep_duration_ms self.sleep_enabled = hs.config.background_updates.sleep_enabled + def get_status(self) -> UpdaterStatus: + """An integer summarising the updater status. Used as a metric.""" + if self._aborted: + return UpdaterStatus.ABORTED + # TODO: a status for "have seen at least one failure, but haven't aborted yet". + if not self.enabled: + return UpdaterStatus.DISABLED + + if self._all_done: + return UpdaterStatus.COMPLETE + if self._running: + return UpdaterStatus.RUNNING_UPDATE + return UpdaterStatus.NOT_STARTED + def register_update_controller_callbacks( self, on_update: ON_UPDATE_CALLBACK, @@ -296,6 +325,7 @@ async def run_background_updates(self, sleep: bool) -> None: except Exception: back_to_back_failures += 1 if back_to_back_failures >= 5: + self._aborted = True raise RuntimeError( "5 back-to-back background update failures; aborting." ) diff --git a/synapse/storage/database.py b/synapse/storage/database.py index bdaa508dbe12..10fa6c4802ee 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -54,7 +54,7 @@ current_context, make_deferred_yieldable, ) -from synapse.metrics import register_threadpool +from synapse.metrics import LaterGauge, register_threadpool from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.background_updates import BackgroundUpdater from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine @@ -547,6 +547,12 @@ def __init__( self._db_pool = make_pool(hs.get_reactor(), database_config, engine) self.updates = BackgroundUpdater(hs, self) + LaterGauge( + "synapse_background_update_status", + "Background update status", + [], + self.updates.get_status, + ) self._previous_txn_total_time = 0.0 self._current_txn_total_time = 0.0 From c485ed1c5a4c62ae555531cfd001a5e5f8bc2e44 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 8 Jun 2023 13:14:40 +0100 Subject: [PATCH 056/153] Clear event caches when we purge history (#15609) This should help a little with #13476 --------- Co-authored-by: Patrick Cloke --- changelog.d/15609.bugfix | 1 + synapse/storage/_base.py | 31 ++++ synapse/storage/databases/main/cache.py | 134 ++++++++++++++++++ .../storage/databases/main/events_worker.py | 9 ++ .../storage/databases/main/purge_events.py | 8 +- synapse/util/caches/lrucache.py | 2 +- tests/handlers/test_sync.py | 2 +- tests/rest/client/test_read_marker.py | 3 - .../databases/main/test_events_worker.py | 8 +- 9 files changed, 184 insertions(+), 14 deletions(-) create mode 100644 changelog.d/15609.bugfix diff --git a/changelog.d/15609.bugfix b/changelog.d/15609.bugfix new file mode 100644 index 000000000000..b5a990cfec1e --- /dev/null +++ b/changelog.d/15609.bugfix @@ -0,0 +1 @@ +Correctly clear caches when we delete a room. diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 481fec72fe1b..fe4a76341137 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -86,9 +86,14 @@ def _invalidate_state_caches( room_id: Room where state changed members_changed: The user_ids of members that have changed """ + + # XXX: If you add something to this function make sure you add it to + # `_invalidate_state_caches_all` as well. + # If there were any membership changes, purge the appropriate caches. for host in {get_domain_from_id(u) for u in members_changed}: self._attempt_to_invalidate_cache("is_host_joined", (room_id, host)) + self._attempt_to_invalidate_cache("is_host_invited", (room_id, host)) if members_changed: self._attempt_to_invalidate_cache("get_users_in_room", (room_id,)) self._attempt_to_invalidate_cache("get_current_hosts_in_room", (room_id,)) @@ -117,6 +122,32 @@ def _invalidate_state_caches( self._attempt_to_invalidate_cache("get_room_summary", (room_id,)) self._attempt_to_invalidate_cache("get_partial_current_state_ids", (room_id,)) + def _invalidate_state_caches_all(self, room_id: str) -> None: + """Invalidates caches that are based on the current state, but does + not stream invalidations down replication. + + Same as `_invalidate_state_caches`, except that works when we don't know + which memberships have changed. + + Args: + room_id: Room where state changed + """ + self._attempt_to_invalidate_cache("get_partial_current_state_ids", (room_id,)) + self._attempt_to_invalidate_cache("get_users_in_room", (room_id,)) + self._attempt_to_invalidate_cache("is_host_invited", None) + self._attempt_to_invalidate_cache("is_host_joined", None) + self._attempt_to_invalidate_cache("get_current_hosts_in_room", (room_id,)) + self._attempt_to_invalidate_cache("get_users_in_room_with_profiles", (room_id,)) + self._attempt_to_invalidate_cache("get_number_joined_users_in_room", (room_id,)) + self._attempt_to_invalidate_cache("get_local_users_in_room", (room_id,)) + self._attempt_to_invalidate_cache("does_pair_of_users_share_a_room", None) + self._attempt_to_invalidate_cache("get_user_in_room_with_profile", None) + self._attempt_to_invalidate_cache( + "get_rooms_for_user_with_stream_ordering", None + ) + self._attempt_to_invalidate_cache("get_rooms_for_user", None) + self._attempt_to_invalidate_cache("get_room_summary", (room_id,)) + def _attempt_to_invalidate_cache( self, cache_name: str, key: Optional[Collection[Any]] ) -> bool: diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 46fa0a73f9e4..6e1c7d681fe5 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -46,6 +46,12 @@ # based on the current state when notifying workers over replication. CURRENT_STATE_CACHE_NAME = "cs_cache_fake" +# As above, but for invalidating event caches on history deletion +PURGE_HISTORY_CACHE_NAME = "ph_cache_fake" + +# As above, but for invalidating room caches on room deletion +DELETE_ROOM_CACHE_NAME = "dr_cache_fake" + class CacheInvalidationWorkerStore(SQLBaseStore): def __init__( @@ -175,6 +181,23 @@ def process_replication_rows( room_id = row.keys[0] members_changed = set(row.keys[1:]) self._invalidate_state_caches(room_id, members_changed) + elif row.cache_func == PURGE_HISTORY_CACHE_NAME: + if row.keys is None: + raise Exception( + "Can't send an 'invalidate all' for 'purge history' cache" + ) + + room_id = row.keys[0] + self._invalidate_caches_for_room_events(room_id) + elif row.cache_func == DELETE_ROOM_CACHE_NAME: + if row.keys is None: + raise Exception( + "Can't send an 'invalidate all' for 'delete room' cache" + ) + + room_id = row.keys[0] + self._invalidate_caches_for_room_events(room_id) + self._invalidate_caches_for_room(room_id) else: self._attempt_to_invalidate_cache(row.cache_func, row.keys) @@ -226,6 +249,9 @@ def _invalidate_caches_for_event( relates_to: Optional[str], backfilled: bool, ) -> None: + # XXX: If you add something to this function make sure you add it to + # `_invalidate_caches_for_room_events` as well. + # This invalidates any local in-memory cached event objects, the original # process triggering the invalidation is responsible for clearing any external # cached objects. @@ -271,6 +297,106 @@ def _invalidate_caches_for_event( self._attempt_to_invalidate_cache("get_thread_participated", (relates_to,)) self._attempt_to_invalidate_cache("get_threads", (room_id,)) + def _invalidate_caches_for_room_events_and_stream( + self, txn: LoggingTransaction, room_id: str + ) -> None: + """Invalidate caches associated with events in a room, and stream to + replication. + + Used when we delete events a room, but don't know which events we've + deleted. + """ + + self._send_invalidation_to_replication(txn, PURGE_HISTORY_CACHE_NAME, [room_id]) + txn.call_after(self._invalidate_caches_for_room_events, room_id) + + def _invalidate_caches_for_room_events(self, room_id: str) -> None: + """Invalidate caches associated with events in a room, and stream to + replication. + + Used when we delete events in a room, but don't know which events we've + deleted. + """ + + self._invalidate_local_get_event_cache_all() # type: ignore[attr-defined] + + self._attempt_to_invalidate_cache("have_seen_event", (room_id,)) + self._attempt_to_invalidate_cache("get_latest_event_ids_in_room", (room_id,)) + self._attempt_to_invalidate_cache( + "get_unread_event_push_actions_by_room_for_user", (room_id,) + ) + + self._attempt_to_invalidate_cache("_get_membership_from_event_id", None) + self._attempt_to_invalidate_cache("get_relations_for_event", None) + self._attempt_to_invalidate_cache("get_applicable_edit", None) + self._attempt_to_invalidate_cache("get_thread_id", None) + self._attempt_to_invalidate_cache("get_thread_id_for_receipts", None) + self._attempt_to_invalidate_cache("get_invited_rooms_for_local_user", None) + self._attempt_to_invalidate_cache( + "get_rooms_for_user_with_stream_ordering", None + ) + self._attempt_to_invalidate_cache("get_rooms_for_user", None) + self._attempt_to_invalidate_cache("get_references_for_event", None) + self._attempt_to_invalidate_cache("get_thread_summary", None) + self._attempt_to_invalidate_cache("get_thread_participated", None) + self._attempt_to_invalidate_cache("get_threads", (room_id,)) + + self._attempt_to_invalidate_cache("_get_state_group_for_event", None) + + self._attempt_to_invalidate_cache("get_event_ordering", None) + self._attempt_to_invalidate_cache("is_partial_state_event", None) + self._attempt_to_invalidate_cache("_get_joined_profile_from_event_id", None) + + def _invalidate_caches_for_room_and_stream( + self, txn: LoggingTransaction, room_id: str + ) -> None: + """Invalidate caches associated with rooms, and stream to replication. + + Used when we delete rooms. + """ + + self._send_invalidation_to_replication(txn, DELETE_ROOM_CACHE_NAME, [room_id]) + txn.call_after(self._invalidate_caches_for_room, room_id) + + def _invalidate_caches_for_room(self, room_id: str) -> None: + """Invalidate caches associated with rooms. + + Used when we delete rooms. + """ + + # If we've deleted the room then we also need to purge all event caches. + self._invalidate_caches_for_room_events(room_id) + + self._attempt_to_invalidate_cache("get_account_data_for_room", None) + self._attempt_to_invalidate_cache("get_account_data_for_room_and_type", None) + self._attempt_to_invalidate_cache("get_aliases_for_room", (room_id,)) + self._attempt_to_invalidate_cache("get_latest_event_ids_in_room", (room_id,)) + self._attempt_to_invalidate_cache("_get_forward_extremeties_for_room", None) + self._attempt_to_invalidate_cache( + "get_unread_event_push_actions_by_room_for_user", (room_id,) + ) + self._attempt_to_invalidate_cache( + "_get_linearized_receipts_for_room", (room_id,) + ) + self._attempt_to_invalidate_cache("is_room_blocked", (room_id,)) + self._attempt_to_invalidate_cache("get_retention_policy_for_room", (room_id,)) + self._attempt_to_invalidate_cache( + "_get_partial_state_servers_at_join", (room_id,) + ) + self._attempt_to_invalidate_cache("is_partial_state_room", (room_id,)) + self._attempt_to_invalidate_cache("get_invited_rooms_for_local_user", None) + self._attempt_to_invalidate_cache( + "get_current_hosts_in_room_ordered", (room_id,) + ) + self._attempt_to_invalidate_cache("did_forget", None) + self._attempt_to_invalidate_cache("get_forgotten_rooms_for_user", None) + self._attempt_to_invalidate_cache("_get_membership_from_event_id", None) + self._attempt_to_invalidate_cache("get_room_version_id", (room_id,)) + + # And delete state caches. + + self._invalidate_state_caches_all(room_id) + async def invalidate_cache_and_stream( self, cache_name: str, keys: Tuple[Any, ...] ) -> None: @@ -377,6 +503,14 @@ def _send_invalidation_to_replication( "Can't stream invalidate all with magic current state cache" ) + if cache_name == PURGE_HISTORY_CACHE_NAME and keys is None: + raise Exception( + "Can't stream invalidate all with magic purge history cache" + ) + + if cache_name == DELETE_ROOM_CACHE_NAME and keys is None: + raise Exception("Can't stream invalidate all with magic delete room cache") + if isinstance(self.database_engine, PostgresEngine): assert self._cache_id_gen is not None diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index a39bc9097439..d93ffc4efa75 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -903,6 +903,15 @@ def _invalidate_local_get_event_cache(self, event_id: str) -> None: self._event_ref.pop(event_id, None) self._current_event_fetches.pop(event_id, None) + def _invalidate_local_get_event_cache_all(self) -> None: + """Clears the in-memory get event caches. + + Used when we purge room history. + """ + self._get_event_cache.clear() + self._event_ref.clear() + self._current_event_fetches.clear() + async def _get_events_from_cache( self, events: Iterable[str], update_metrics: bool = True ) -> Dict[str, EventCacheEntry]: diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index efbd3e75d99e..9773c1fcd28a 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -308,6 +308,8 @@ def _purge_history_txn( logger.info("[purge] done") + self._invalidate_caches_for_room_events_and_stream(txn, room_id) + return referenced_state_groups async def purge_room(self, room_id: str) -> List[int]: @@ -485,10 +487,6 @@ def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]: # index on them. In any case we should be clearing out 'stream' tables # periodically anyway (#5888) - # TODO: we could probably usefully do a bunch more cache invalidation here - - # XXX: as with purge_history, this is racy, but no worse than other races - # that already exist. - self._invalidate_cache_and_stream(txn, self.have_seen_event, (room_id,)) + self._invalidate_caches_for_room_and_stream(txn, room_id) return state_groups diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index ed0da17227d3..6137c85e1051 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -862,5 +862,5 @@ def invalidate_local(self, key: KT) -> None: async def contains(self, key: KT) -> bool: return self._lru_cache.contains(key) - async def clear(self) -> None: + def clear(self) -> None: self._lru_cache.clear() diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 0d9a3de92a5d..9f035a02dc69 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -163,7 +163,7 @@ def test_unknown_room_version(self) -> None: # Blow away caches (supported room versions can only change due to a restart). self.store.get_rooms_for_user_with_stream_ordering.invalidate_all() self.store.get_rooms_for_user.invalidate_all() - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() self.store._event_ref.clear() # The rooms should be excluded from the sync response. diff --git a/tests/rest/client/test_read_marker.py b/tests/rest/client/test_read_marker.py index 0eedcdb476b4..5cdd5694a04a 100644 --- a/tests/rest/client/test_read_marker.py +++ b/tests/rest/client/test_read_marker.py @@ -131,9 +131,6 @@ def send_message() -> str: event = self.get_success(self.store.get_event(event_id_1, allow_none=True)) assert event is None - # TODO See https://github.com/matrix-org/synapse/issues/13476 - self.store.get_event_ordering.invalidate_all() - # Test moving the read marker to a newer event event_id_2 = send_message() channel = self.make_request( diff --git a/tests/storage/databases/main/test_events_worker.py b/tests/storage/databases/main/test_events_worker.py index 9606ecc43b6b..788500e38f2d 100644 --- a/tests/storage/databases/main/test_events_worker.py +++ b/tests/storage/databases/main/test_events_worker.py @@ -188,7 +188,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.event_id = res["event_id"] # Reset the event cache so the tests start with it empty - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() def test_simple(self) -> None: """Test that we cache events that we pull from the DB.""" @@ -205,7 +205,7 @@ def test_event_ref(self) -> None: """ # Reset the event cache - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() with LoggingContext("test") as ctx: # We keep hold of the event event though we never use it. @@ -215,7 +215,7 @@ def test_event_ref(self) -> None: self.assertEqual(ctx.get_resource_usage().evt_db_fetch_count, 1) # Reset the event cache - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() with LoggingContext("test") as ctx: self.get_success(self.store.get_event(self.event_id)) @@ -390,7 +390,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.event_id = res["event_id"] # Reset the event cache so the tests start with it empty - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() @contextmanager def blocking_get_event_calls( From d84e66144dc12dacf71c987a2ba802dd59c0b68e Mon Sep 17 00:00:00 2001 From: Shay Date: Fri, 9 Jun 2023 00:00:46 -0700 Subject: [PATCH 057/153] Allow for the configuration of max request retries and min/max retry delays in the matrix federation client (#12504) Co-authored-by: Mathieu Velten Co-authored-by: Erik Johnston --- changelog.d/12504.misc | 1 + .../configuration/config_documentation.md | 26 +++++++++++++++++++ synapse/config/federation.py | 10 +++++++ synapse/http/matrixfederationclient.py | 21 ++++++++------- tests/http/test_matrixfederationclient.py | 20 +++++++++++++- 5 files changed, 68 insertions(+), 10 deletions(-) create mode 100644 changelog.d/12504.misc diff --git a/changelog.d/12504.misc b/changelog.d/12504.misc new file mode 100644 index 000000000000..0bebaa213d9e --- /dev/null +++ b/changelog.d/12504.misc @@ -0,0 +1 @@ +Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 0cf6e075ff11..8426de04179b 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1196,6 +1196,32 @@ Example configuration: allow_device_name_lookup_over_federation: true ``` --- +### `federation` + +The federation section defines some sub-options related to federation. + +The following options are related to configuring timeout and retry logic for one request, +independently of the others. +Short retry algorithm is used when something or someone will wait for the request to have an +answer, while long retry is used for requests that happen in the background, +like sending a federation transaction. + +* `client_timeout`: timeout for the federation requests in seconds. Default to 60s. +* `max_short_retry_delay`: maximum delay to be used for the short retry algo in seconds. Default to 2s. +* `max_long_retry_delay`: maximum delay to be used for the short retry algo in seconds. Default to 60s. +* `max_short_retries`: maximum number of retries for the short retry algo. Default to 3 attempts. +* `max_long_retries`: maximum number of retries for the long retry algo. Default to 10 attempts. + +Example configuration: +```yaml +federation: + client_timeout: 180 + max_short_retry_delay: 7 + max_long_retry_delay: 100 + max_short_retries: 5 + max_long_retries: 20 +``` +--- ## Caching Options related to caching. diff --git a/synapse/config/federation.py b/synapse/config/federation.py index 336fca578aa1..d21f7fd02a5e 100644 --- a/synapse/config/federation.py +++ b/synapse/config/federation.py @@ -22,6 +22,8 @@ class FederationConfig(Config): section = "federation" def read_config(self, config: JsonDict, **kwargs: Any) -> None: + federation_config = config.setdefault("federation", {}) + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist: Optional[dict] = None federation_domain_whitelist = config.get("federation_domain_whitelist", None) @@ -49,5 +51,13 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "allow_device_name_lookup_over_federation", False ) + # Allow for the configuration of timeout, max request retries + # and min/max retry delays in the matrix federation client. + self.client_timeout = federation_config.get("client_timeout", 60) + self.max_long_retry_delay = federation_config.get("max_long_retry_delay", 60) + self.max_short_retry_delay = federation_config.get("max_short_retry_delay", 2) + self.max_long_retries = federation_config.get("max_long_retries", 10) + self.max_short_retries = federation_config.get("max_short_retries", 3) + _METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}} diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index abb5ae581521..ed36825b671c 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -95,8 +95,6 @@ ) -MAX_LONG_RETRIES = 10 -MAX_SHORT_RETRIES = 3 MAXINT = sys.maxsize @@ -406,7 +404,12 @@ def __init__( self.clock = hs.get_clock() self._store = hs.get_datastores().main self.version_string_bytes = hs.version_string.encode("ascii") - self.default_timeout = 60 + self.default_timeout = hs.config.federation.client_timeout + + self.max_long_retry_delay = hs.config.federation.max_long_retry_delay + self.max_short_retry_delay = hs.config.federation.max_short_retry_delay + self.max_long_retries = hs.config.federation.max_long_retries + self.max_short_retries = hs.config.federation.max_short_retries self._cooperator = Cooperator(scheduler=_make_scheduler(self.reactor)) @@ -583,9 +586,9 @@ async def _send_request( # XXX: Would be much nicer to retry only at the transaction-layer # (once we have reliable transactions in place) if long_retries: - retries_left = MAX_LONG_RETRIES + retries_left = self.max_long_retries else: - retries_left = MAX_SHORT_RETRIES + retries_left = self.max_short_retries url_bytes = request.uri url_str = url_bytes.decode("ascii") @@ -730,12 +733,12 @@ async def _send_request( if retries_left and not timeout: if long_retries: - delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left) - delay = min(delay, 60) + delay = 4 ** (self.max_long_retries + 1 - retries_left) + delay = min(delay, self.max_long_retry_delay) delay *= random.uniform(0.8, 1.4) else: - delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left) - delay = min(delay, 2) + delay = 0.5 * 2 ** (self.max_short_retries - retries_left) + delay = min(delay, self.max_short_retry_delay) delay *= random.uniform(0.8, 1.4) logger.debug( diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index 0dfc03ce50f4..8565f8ac64ad 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -40,7 +40,7 @@ from synapse.util import Clock from tests.server import FakeTransport -from tests.unittest import HomeserverTestCase +from tests.unittest import HomeserverTestCase, override_config def check_logcontext(context: LoggingContextOrSentinel) -> None: @@ -640,3 +640,21 @@ def test_build_auth_headers_rejects_falsey_destinations(self) -> None: self.cl.build_auth_headers( b"", b"GET", b"https://example.com", destination_is=b"" ) + + @override_config( + { + "federation": { + "client_timeout": 180, + "max_long_retry_delay": 100, + "max_short_retry_delay": 7, + "max_long_retries": 20, + "max_short_retries": 5, + } + } + ) + def test_configurable_retry_and_delay_values(self) -> None: + self.assertEqual(self.cl.default_timeout, 180) + self.assertEqual(self.cl.max_long_retry_delay, 100) + self.assertEqual(self.cl.max_short_retry_delay, 7) + self.assertEqual(self.cl.max_long_retries, 20) + self.assertEqual(self.cl.max_short_retries, 5) From 373c0c7ff7cf55b5f46aba43f4c4f9bba5c79c0e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 9 Jun 2023 15:00:30 +0100 Subject: [PATCH 058/153] Speed up typechecking CI (#15752) By restoring the rust cache before installing the project. --- .github/workflows/tests.yml | 8 ++++---- changelog.d/15752.misc | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15752.misc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cf1899b580e8..02a4be3a24a1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -92,6 +92,10 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 + - name: Install Rust + uses: dtolnay/rust-toolchain@1.58.1 + - uses: Swatinem/rust-cache@v2 + - name: Setup Poetry uses: matrix-org/setup-python-poetry@v1 with: @@ -103,10 +107,6 @@ jobs: # To make CI green, err towards caution and install the project. install-project: "true" - - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 - - uses: Swatinem/rust-cache@v2 - # Cribbed from # https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17 - name: Restore/persist mypy's cache diff --git a/changelog.d/15752.misc b/changelog.d/15752.misc new file mode 100644 index 000000000000..7e373b12750b --- /dev/null +++ b/changelog.d/15752.misc @@ -0,0 +1 @@ +Speed up typechecking CI. From fcc3ca37e1b404981d9a0d6f2708e14407775b97 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 9 Jun 2023 15:39:49 -0500 Subject: [PATCH 059/153] Backfill in the background if we're doing it "just because" (#15710) Fix https://github.com/matrix-org/synapse/issues/15702 --- changelog.d/15710.feature | 1 + synapse/handlers/federation.py | 18 ++++++++++++++---- 2 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15710.feature diff --git a/changelog.d/15710.feature b/changelog.d/15710.feature new file mode 100644 index 000000000000..fe77a2fef6e2 --- /dev/null +++ b/changelog.d/15710.feature @@ -0,0 +1 @@ +Speed up `/messages` by backfilling in the background when there are no backward extremities where we are directly paginating. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 57d6b70cff48..b7b5e2102036 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -320,14 +320,21 @@ async def _maybe_backfill_inner( str(len(sorted_backfill_points)), ) - # If we have no backfill points lower than the `current_depth` then - # either we can a) bail or b) still attempt to backfill. We opt to try - # backfilling anyway just in case we do get relevant events. + # If we have no backfill points lower than the `current_depth` then either we + # can a) bail or b) still attempt to backfill. We opt to try backfilling anyway + # just in case we do get relevant events. This is good for eventual consistency + # sake but we don't need to block the client for something that is just as + # likely not to return anything relevant so we backfill in the background. The + # only way, this could return something relevant is if we discover a new branch + # of history that extends all the way back to where we are currently paginating + # and it's within the 100 events that are returned from `/backfill`. if not sorted_backfill_points and current_depth != MAX_DEPTH: logger.debug( "_maybe_backfill_inner: all backfill points are *after* current depth. Trying again with later backfill points." ) - return await self._maybe_backfill_inner( + run_as_background_process( + "_maybe_backfill_inner_anyway_with_max_depth", + self._maybe_backfill_inner, room_id=room_id, # We use `MAX_DEPTH` so that we find all backfill points next # time (all events are below the `MAX_DEPTH`) @@ -338,6 +345,9 @@ async def _maybe_backfill_inner( # overall otherwise the smaller one will throw off the results. processing_start_time=None, ) + # We return `False` because we're backfilling in the background and there is + # no new events immediately for the caller to know about yet. + return False # Even after recursing with `MAX_DEPTH`, we didn't find any # backward extremities to backfill from. From 4f2bd6be695c83007ebd6f817b74c5a97cf01e4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:17:04 +0100 Subject: [PATCH 060/153] Bump types-pyopenssl from 23.1.0.2 to 23.2.0.0 (#15766) Bumps [types-pyopenssl](https://github.com/python/typeshed) from 23.1.0.2 to 23.2.0.0. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pyopenssl dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 166 +++------------------------------------------------- 1 file changed, 7 insertions(+), 159 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1f5cb3a3a85e..228fccac9cc7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -16,7 +15,6 @@ files = [ name = "astroid" version = "2.15.0" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -36,7 +34,6 @@ wrapt = [ name = "attrs" version = "22.2.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -55,7 +52,6 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy name = "authlib" version = "1.2.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -category = "main" optional = true python-versions = "*" files = [ @@ -70,7 +66,6 @@ cryptography = ">=3.2" name = "automat" version = "22.10.0" description = "Self-service finite-state machines for the programmer on the go." -category = "main" optional = false python-versions = "*" files = [ @@ -89,7 +84,6 @@ visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -104,7 +98,6 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -139,7 +132,6 @@ typecheck = ["mypy"] name = "beautifulsoup4" version = "4.12.0" description = "Screen-scraping library" -category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -158,7 +150,6 @@ lxml = ["lxml"] name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -209,7 +200,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -228,7 +218,6 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "canonicaljson" version = "2.0.0" description = "Canonical JSON" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -240,7 +229,6 @@ files = [ name = "certifi" version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -252,7 +240,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -329,7 +316,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -414,7 +400,6 @@ files = [ name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -430,7 +415,6 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} name = "click-default-group" version = "1.2.2" description = "Extends click.Group to invoke a command without explicit subcommand name" -category = "dev" optional = false python-versions = "*" files = [ @@ -444,7 +428,6 @@ click = "*" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -456,7 +439,6 @@ files = [ name = "commonmark" version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" -category = "dev" optional = false python-versions = "*" files = [ @@ -471,7 +453,6 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] name = "constantly" version = "15.1.0" description = "Symbolic constants in Python" -category = "main" optional = false python-versions = "*" files = [ @@ -483,7 +464,6 @@ files = [ name = "cryptography" version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -525,7 +505,6 @@ tox = ["tox"] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -537,7 +516,6 @@ files = [ name = "deprecated" version = "1.2.13" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -555,7 +533,6 @@ dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -567,7 +544,6 @@ files = [ name = "elementpath" version = "4.1.0" description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and lxml" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -582,7 +558,6 @@ dev = ["Sphinx", "coverage", "flake8", "lxml", "lxml-stubs", "memory-profiler", name = "furo" version = "2023.5.20" description = "A clean customisable Sphinx documentation theme." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -600,7 +575,6 @@ sphinx-basic-ng = "*" name = "gitdb" version = "4.0.10" description = "Git Object Database" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -615,7 +589,6 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.31" description = "GitPython is a Python library used to interact with Git repositories" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -631,7 +604,6 @@ typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\"" name = "hiredis" version = "2.2.3" description = "Python wrapper for hiredis" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -730,7 +702,6 @@ files = [ name = "hyperlink" version = "21.0.0" description = "A featureful, immutable, and correct URL for Python." -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -745,7 +716,6 @@ idna = ">=2.5" name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -757,7 +727,6 @@ files = [ name = "ijson" version = "3.2.0.post0" description = "Iterative JSON parser with standard Python iterator interfaces" -category = "main" optional = false python-versions = "*" files = [ @@ -845,7 +814,6 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -857,7 +825,6 @@ files = [ name = "immutabledict" version = "2.2.4" description = "Immutable wrapper around dictionaries (a fork of frozendict)" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -869,7 +836,6 @@ files = [ name = "importlib-metadata" version = "6.6.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -890,7 +856,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -909,7 +874,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "incremental" version = "22.10.0" description = "\"A small library that versions your Python projects.\"" -category = "main" optional = false python-versions = "*" files = [ @@ -925,7 +889,6 @@ scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] name = "isort" version = "5.11.5" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -943,7 +906,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "jaeger-client" version = "4.8.0" description = "Jaeger Python OpenTracing Tracer implementation" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -963,7 +925,6 @@ tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-import name = "jaraco-classes" version = "3.2.3" description = "Utility functions for Python class constructs" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -982,7 +943,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -998,7 +958,6 @@ trio = ["async_generator", "trio"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1016,7 +975,6 @@ i18n = ["Babel (>=2.7)"] name = "jsonschema" version = "4.17.3" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1040,7 +998,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "keyring" version = "23.13.1" description = "Store and access your passwords safely." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1065,7 +1022,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1111,7 +1067,6 @@ files = [ name = "ldap3" version = "2.9.1" description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library" -category = "main" optional = true python-versions = "*" files = [ @@ -1126,7 +1081,6 @@ pyasn1 = ">=0.4.6" name = "lxml" version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -1219,7 +1173,6 @@ source = ["Cython (>=0.29.7)"] name = "lxml-stubs" version = "0.4.0" description = "Type annotations for the lxml package" -category = "dev" optional = false python-versions = "*" files = [ @@ -1234,7 +1187,6 @@ test = ["coverage[toml] (==5.2)", "pytest (>=6.0.0)", "pytest-mypy-plugins (==1. name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1260,7 +1212,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1320,7 +1271,6 @@ files = [ name = "matrix-common" version = "1.3.0" description = "Common utilities for Synapse, Sydent and Sygnal" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1340,7 +1290,6 @@ test = ["aiounittest", "tox", "twisted"] name = "matrix-synapse-ldap3" version = "0.2.2" description = "An LDAP3 auth provider for Synapse" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1360,7 +1309,6 @@ dev = ["black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "ldaptor", "ma name = "mdit-py-plugins" version = "0.3.5" description = "Collection of plugins for markdown-it-py" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1380,7 +1328,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1392,7 +1339,6 @@ files = [ name = "more-itertools" version = "9.1.0" description = "More routines for operating on iterables, beyond itertools" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1404,7 +1350,6 @@ files = [ name = "msgpack" version = "1.0.5" description = "MessagePack serializer" -category = "main" optional = false python-versions = "*" files = [ @@ -1477,7 +1422,6 @@ files = [ name = "mypy" version = "1.0.1" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1525,7 +1469,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1537,7 +1480,6 @@ files = [ name = "mypy-zope" version = "0.9.1" description = "Plugin for mypy to support zope interfaces" -category = "dev" optional = false python-versions = "*" files = [ @@ -1557,7 +1499,6 @@ test = ["lxml", "pytest (>=4.6)", "pytest-cov"] name = "myst-parser" version = "1.0.0" description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1584,7 +1525,6 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4, name = "netaddr" version = "0.8.0" description = "A network address manipulation library for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1596,7 +1536,6 @@ files = [ name = "opentracing" version = "2.4.0" description = "OpenTracing API for Python. See documentation at http://opentracing.io" -category = "main" optional = true python-versions = "*" files = [ @@ -1610,7 +1549,6 @@ tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pyte name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1622,7 +1560,6 @@ files = [ name = "parameterized" version = "0.9.0" description = "Parameterized testing with any Python test framework" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1637,7 +1574,6 @@ dev = ["jinja2"] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1649,7 +1585,6 @@ files = [ name = "phonenumbers" version = "8.13.11" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." -category = "main" optional = false python-versions = "*" files = [ @@ -1661,7 +1596,6 @@ files = [ name = "pillow" version = "9.4.0" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1752,7 +1686,6 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pkginfo" version = "1.9.6" description = "Query metadata from sdists / bdists / installed packages." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1767,7 +1700,6 @@ testing = ["pytest", "pytest-cov"] name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1779,7 +1711,6 @@ files = [ name = "platformdirs" version = "3.1.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1798,7 +1729,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytes name = "prometheus-client" version = "0.17.0" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1813,7 +1743,6 @@ twisted = ["twisted"] name = "psycopg2" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -1836,7 +1765,6 @@ files = [ name = "psycopg2cffi" version = "2.9.0" description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=master" -category = "main" optional = true python-versions = "*" files = [ @@ -1851,7 +1779,6 @@ six = "*" name = "psycopg2cffi-compat" version = "1.1" description = "A Simple library to enable psycopg2 compatability" -category = "main" optional = true python-versions = "*" files = [ @@ -1865,7 +1792,6 @@ psycopg2 = "*" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1877,7 +1803,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1892,7 +1817,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1904,7 +1828,6 @@ files = [ name = "pydantic" version = "1.10.8" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1957,7 +1880,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygithub" version = "1.58.2" description = "Use the full Github API v3" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1975,7 +1897,6 @@ requests = ">=2.14.0" name = "pygments" version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1990,7 +1911,6 @@ plugins = ["importlib-metadata"] name = "pyicu" version = "2.11" description = "Python extension wrapping the ICU C++ API" -category = "main" optional = true python-versions = "*" files = [ @@ -2001,7 +1921,6 @@ files = [ name = "pyjwt" version = "2.6.0" description = "JSON Web Token implementation in Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2022,7 +1941,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pymacaroons" version = "0.13.0" description = "Macaroon library for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2038,7 +1956,6 @@ six = ">=1.8.0" name = "pympler" version = "1.0.1" description = "A development tool to measure, monitor and analyze the memory behavior of Python objects." -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -2050,7 +1967,6 @@ files = [ name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2077,7 +1993,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyopenssl" version = "23.1.1" description = "Python wrapper module around the OpenSSL library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2096,7 +2011,6 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2133,7 +2047,6 @@ files = [ name = "pysaml2" version = "7.3.1" description = "Python implementation of SAML Version 2 Standard" -category = "main" optional = true python-versions = ">=3.6.2,<4.0.0" files = [ @@ -2159,7 +2072,6 @@ s2repoze = ["paste", "repoze.who", "zope.interface"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2174,7 +2086,6 @@ six = ">=1.5" name = "pytz" version = "2022.7.1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -2186,7 +2097,6 @@ files = [ name = "pywin32-ctypes" version = "0.2.0" description = "" -category = "dev" optional = false python-versions = "*" files = [ @@ -2198,7 +2108,6 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2248,7 +2157,6 @@ files = [ name = "readme-renderer" version = "37.3" description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2268,7 +2176,6 @@ md = ["cmarkgfm (>=0.8.0)"] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2290,7 +2197,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-toolbelt" version = "0.10.1" description = "A utility belt for advanced users of python-requests" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2305,7 +2211,6 @@ requests = ">=2.0.1,<3.0.0" name = "rfc3986" version = "2.0.0" description = "Validating URI References per RFC 3986" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2320,7 +2225,6 @@ idna2008 = ["idna"] name = "rich" version = "13.3.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -2340,7 +2244,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "ruff" version = "0.0.265" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2367,7 +2270,6 @@ files = [ name = "secretstorage" version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2383,7 +2285,6 @@ jeepney = ">=0.6" name = "semantic-version" version = "2.10.0" description = "A library implementing the 'SemVer' scheme." -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -2399,7 +2300,6 @@ doc = ["Sphinx", "sphinx-rtd-theme"] name = "sentry-sdk" version = "1.25.0" description = "Python client for Sentry (https://sentry.io)" -category = "main" optional = true python-versions = "*" files = [ @@ -2442,7 +2342,6 @@ tornado = ["tornado (>=5)"] name = "service-identity" version = "21.1.0" description = "Service identity verification for pyOpenSSL & cryptography." -category = "main" optional = false python-versions = "*" files = [ @@ -2467,7 +2366,6 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] name = "setuptools" version = "67.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2484,7 +2382,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "setuptools-rust" version = "1.6.0" description = "Setuptools Rust extension plugin" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2501,7 +2398,6 @@ typing-extensions = ">=3.7.4.3" name = "signedjson" version = "1.1.4" description = "Sign JSON with Ed25519 signatures" -category = "main" optional = false python-versions = "*" files = [ @@ -2523,7 +2419,6 @@ dev = ["typing-extensions (>=3.5)"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2535,7 +2430,6 @@ files = [ name = "smmap" version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2547,7 +2441,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -2559,7 +2452,6 @@ files = [ name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "main" optional = false python-versions = "*" files = [ @@ -2571,7 +2463,6 @@ files = [ name = "soupsieve" version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2583,7 +2474,6 @@ files = [ name = "sphinx" version = "6.2.1" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2619,7 +2509,6 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] name = "sphinx-autodoc2" version = "0.4.2" description = "Analyse a python project and create documentation for it." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2642,7 +2531,6 @@ testing = ["pytest", "pytest-cov", "pytest-regressions", "sphinx (>=4.0.0)"] name = "sphinx-basic-ng" version = "1.0.0b1" description = "A modern skeleton for Sphinx themes." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2660,7 +2548,6 @@ docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-ta name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2676,7 +2563,6 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2692,7 +2578,6 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2708,7 +2593,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2723,7 +2607,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2739,7 +2622,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2755,7 +2637,6 @@ test = ["pytest"] name = "systemd-python" version = "235" description = "Python interface for libsystemd" -category = "main" optional = true python-versions = "*" files = [ @@ -2766,7 +2647,6 @@ files = [ name = "threadloop" version = "1.0.2" description = "Tornado IOLoop Backed Concurrent Futures" -category = "main" optional = true python-versions = "*" files = [ @@ -2781,7 +2661,6 @@ tornado = "*" name = "thrift" version = "0.16.0" description = "Python bindings for the Apache Thrift RPC system" -category = "main" optional = true python-versions = "*" files = [ @@ -2800,7 +2679,6 @@ twisted = ["twisted"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2812,7 +2690,6 @@ files = [ name = "tornado" version = "6.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "main" optional = true python-versions = ">= 3.7" files = [ @@ -2833,7 +2710,6 @@ files = [ name = "towncrier" version = "22.12.0" description = "Building newsfiles for your project." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2856,7 +2732,6 @@ dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] name = "treq" version = "22.2.0" description = "High-level Twisted HTTP Client API" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2879,7 +2754,6 @@ docs = ["sphinx (>=1.4.8)"] name = "twine" version = "4.0.2" description = "Collection of utilities for publishing packages on PyPI" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2902,7 +2776,6 @@ urllib3 = ">=1.26.0" name = "twisted" version = "22.10.0" description = "An asynchronous networking framework written in Python" -category = "main" optional = false python-versions = ">=3.7.1" files = [ @@ -2944,7 +2817,6 @@ windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0. name = "twisted-iocpsupport" version = "1.0.2" description = "An extension for use in the twisted I/O Completion Ports reactor." -category = "main" optional = false python-versions = "*" files = [ @@ -2966,7 +2838,6 @@ files = [ name = "txredisapi" version = "1.4.9" description = "non-blocking redis client for python" -category = "main" optional = true python-versions = "*" files = [ @@ -2982,7 +2853,6 @@ twisted = "*" name = "typed-ast" version = "1.5.4" description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3016,7 +2886,6 @@ files = [ name = "types-bleach" version = "6.0.0.3" description = "Typing stubs for bleach" -category = "dev" optional = false python-versions = "*" files = [ @@ -3028,7 +2897,6 @@ files = [ name = "types-commonmark" version = "0.9.2.3" description = "Typing stubs for commonmark" -category = "dev" optional = false python-versions = "*" files = [ @@ -3040,7 +2908,6 @@ files = [ name = "types-jsonschema" version = "4.17.0.8" description = "Typing stubs for jsonschema" -category = "dev" optional = false python-versions = "*" files = [ @@ -3052,7 +2919,6 @@ files = [ name = "types-netaddr" version = "0.8.0.8" description = "Typing stubs for netaddr" -category = "dev" optional = false python-versions = "*" files = [ @@ -3064,7 +2930,6 @@ files = [ name = "types-opentracing" version = "2.4.10.4" description = "Typing stubs for opentracing" -category = "dev" optional = false python-versions = "*" files = [ @@ -3076,7 +2941,6 @@ files = [ name = "types-pillow" version = "9.5.0.4" description = "Typing stubs for Pillow" -category = "dev" optional = false python-versions = "*" files = [ @@ -3088,7 +2952,6 @@ files = [ name = "types-psycopg2" version = "2.9.21.10" description = "Typing stubs for psycopg2" -category = "dev" optional = false python-versions = "*" files = [ @@ -3098,14 +2961,13 @@ files = [ [[package]] name = "types-pyopenssl" -version = "23.1.0.2" +version = "23.2.0.0" description = "Typing stubs for pyOpenSSL" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-pyOpenSSL-23.1.0.2.tar.gz", hash = "sha256:20b80971b86240e8432a1832bd8124cea49c3088c7bfc77dfd23be27ffe4a517"}, - {file = "types_pyOpenSSL-23.1.0.2-py3-none-any.whl", hash = "sha256:b050641aeff6dfebf231ad719bdac12d53b8ee818d4afb67b886333484629957"}, + {file = "types-pyOpenSSL-23.2.0.0.tar.gz", hash = "sha256:43e307e8dfb3a7a8208a19874ca060305f460c529d4eaca8a2669ea89499f244"}, + {file = "types_pyOpenSSL-23.2.0.0-py3-none-any.whl", hash = "sha256:ba803a99440b0c2e9ab4e197084aeefc55bdfe8a580d367b2aa4210810a21240"}, ] [package.dependencies] @@ -3115,7 +2977,6 @@ cryptography = ">=35.0.0" name = "types-pyyaml" version = "6.0.12.10" description = "Typing stubs for PyYAML" -category = "dev" optional = false python-versions = "*" files = [ @@ -3127,7 +2988,6 @@ files = [ name = "types-requests" version = "2.31.0.1" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ @@ -3142,7 +3002,6 @@ types-urllib3 = "*" name = "types-setuptools" version = "67.8.0.0" description = "Typing stubs for setuptools" -category = "dev" optional = false python-versions = "*" files = [ @@ -3154,7 +3013,6 @@ files = [ name = "types-urllib3" version = "1.26.25.8" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ @@ -3166,7 +3024,6 @@ files = [ name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3178,7 +3035,6 @@ files = [ name = "unpaddedbase64" version = "2.1.0" description = "Encode and decode Base64 without \"=\" padding" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -3190,7 +3046,6 @@ files = [ name = "urllib3" version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -3207,7 +3062,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "main" optional = false python-versions = "*" files = [ @@ -3219,7 +3073,6 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3304,7 +3157,6 @@ files = [ name = "xmlschema" version = "2.2.2" description = "An XML Schema validator and decoder" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -3324,7 +3176,6 @@ docs = ["Sphinx", "elementpath (>=4.0.0,<5.0.0)", "jinja2", "sphinx-rtd-theme"] name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3340,7 +3191,6 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more name = "zope-event" version = "4.6" description = "Very basic event publishing system" -category = "dev" optional = false python-versions = "*" files = [ @@ -3359,7 +3209,6 @@ test = ["zope.testrunner"] name = "zope-interface" version = "6.0" description = "Interfaces for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3407,7 +3256,6 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] name = "zope-schema" version = "7.0.1" description = "zope.interface extension for defining data schemas" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3425,18 +3273,18 @@ docs = ["Sphinx", "repoze.sphinx.autointerface"] test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"] [extras] -all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler", "pyicu"] +all = ["Pympler", "authlib", "hiredis", "jaeger-client", "lxml", "matrix-synapse-ldap3", "opentracing", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pyicu", "pysaml2", "sentry-sdk", "txredisapi"] cache-memory = ["Pympler"] jwt = ["authlib"] matrix-synapse-ldap3 = ["matrix-synapse-ldap3"] oidc = ["authlib"] opentracing = ["jaeger-client", "opentracing"] postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"] -redis = ["txredisapi", "hiredis"] +redis = ["hiredis", "txredisapi"] saml2 = ["pysaml2"] sentry = ["sentry-sdk"] systemd = ["systemd-python"] -test = ["parameterized", "idna"] +test = ["idna", "parameterized"] url-preview = ["lxml"] user-search = ["pyicu"] From 046e7e494a11f9a23c0dcd2defae595b35a37579 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:17:40 +0100 Subject: [PATCH 061/153] Bump phonenumbers from 8.13.11 to 8.13.13 (#15763) Bumps [phonenumbers](https://github.com/daviddrysdale/python-phonenumbers) from 8.13.11 to 8.13.13. - [Commits](https://github.com/daviddrysdale/python-phonenumbers/compare/v8.13.11...v8.13.13) --- updated-dependencies: - dependency-name: phonenumbers dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 228fccac9cc7..d726407c58ff 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1583,13 +1583,13 @@ files = [ [[package]] name = "phonenumbers" -version = "8.13.11" +version = "8.13.13" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.11-py2.py3-none-any.whl", hash = "sha256:107469114fd297258a485bdf8238d0522cb392db1257faf2bf23384ecbdb0e8a"}, - {file = "phonenumbers-8.13.11.tar.gz", hash = "sha256:3e3274d88cab3609b55ff5b93417075dbca2d13064f103fbf562e0ea1dda0f9a"}, + {file = "phonenumbers-8.13.13-py2.py3-none-any.whl", hash = "sha256:55657adb607484aba6d56270b8a1f9b302f35496076e6c02051d06ed366374d9"}, + {file = "phonenumbers-8.13.13.tar.gz", hash = "sha256:4bdf8c989aff0cdb105aef170ad2c21f14b4537bcb32cf349f1f710df992a40a"}, ] [[package]] From aad7e2d0c18ee7ba87bef3750da3c962acc2fd95 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:19:01 +0100 Subject: [PATCH 062/153] Bump sentry-sdk from 1.25.0 to 1.25.1 (#15764) Bumps [sentry-sdk](https://github.com/getsentry/sentry-python) from 1.25.0 to 1.25.1. - [Release notes](https://github.com/getsentry/sentry-python/releases) - [Changelog](https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md) - [Commits](https://github.com/getsentry/sentry-python/compare/1.25.0...1.25.1) --- updated-dependencies: - dependency-name: sentry-sdk dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d726407c58ff..f2221680a8f5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2298,13 +2298,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.25.0" +version = "1.25.1" description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.25.0.tar.gz", hash = "sha256:5be3296fc574fa8a4d9b213b4dcf8c8d0246c08f8bd78315c6286f386c37555a"}, - {file = "sentry_sdk-1.25.0-py2.py3-none-any.whl", hash = "sha256:fe85cf5d0b3d0aa3480df689f9f6dc487de783defb0a95043368375dc893645e"}, + {file = "sentry-sdk-1.25.1.tar.gz", hash = "sha256:aa796423eb6a2f4a8cd7a5b02ba6558cb10aab4ccdc0537f63a47b038c520c38"}, + {file = "sentry_sdk-1.25.1-py2.py3-none-any.whl", hash = "sha256:79afb7c896014038e358401ad1d36889f97a129dfa8031c49b3f238cd1aa3935"}, ] [package.dependencies] From 0aa731cb6f1e145cf399a948e14f77d4e3720190 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:19:43 +0100 Subject: [PATCH 063/153] Bump pydantic from 1.10.8 to 1.10.9 (#15762) Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.8 to 1.10.9. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v1.10.8...v1.10.9) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 74 ++++++++++++++++++++++++++--------------------------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/poetry.lock b/poetry.lock index f2221680a8f5..5e4168289433 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1826,47 +1826,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.8" +version = "1.10.9" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-1.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca"}, + {file = "pydantic-1.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f"}, + {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896"}, + {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d"}, + {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f"}, + {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4"}, + {file = "pydantic-1.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f"}, + {file = "pydantic-1.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0"}, + {file = "pydantic-1.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7"}, + {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d"}, + {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c"}, + {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91"}, + {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8"}, + {file = "pydantic-1.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f"}, + {file = "pydantic-1.10.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece"}, + {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a"}, + {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a"}, + {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60"}, + {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf"}, + {file = "pydantic-1.10.9-cp37-cp37m-win_amd64.whl", hash = "sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29"}, + {file = "pydantic-1.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82"}, + {file = "pydantic-1.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6"}, + {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766"}, + {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3"}, + {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572"}, + {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e"}, + {file = "pydantic-1.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb"}, + {file = "pydantic-1.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298"}, + {file = "pydantic-1.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276"}, + {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60"}, + {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc"}, + {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a"}, + {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4"}, + {file = "pydantic-1.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1"}, + {file = "pydantic-1.10.9-py3-none-any.whl", hash = "sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305"}, + {file = "pydantic-1.10.9.tar.gz", hash = "sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be"}, ] [package.dependencies] From 9e321e0098d069711674371c8c3a3cdc80df0c16 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:20:55 +0100 Subject: [PATCH 064/153] Bump pyopenssl from 23.1.1 to 23.2.0 (#15765) Bumps [pyopenssl](https://github.com/pyca/pyopenssl) from 23.1.1 to 23.2.0. - [Changelog](https://github.com/pyca/pyopenssl/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/pyopenssl/compare/23.1.1...23.2.0) --- updated-dependencies: - dependency-name: pyopenssl dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5e4168289433..cf4a89c85ad1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1991,17 +1991,17 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "23.1.1" +version = "23.2.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.6" files = [ - {file = "pyOpenSSL-23.1.1-py3-none-any.whl", hash = "sha256:9e0c526404a210df9d2b18cd33364beadb0dc858a739b885677bc65e105d4a4c"}, - {file = "pyOpenSSL-23.1.1.tar.gz", hash = "sha256:841498b9bec61623b1b6c47ebbc02367c07d60e0e195f19790817f10cc8db0b7"}, + {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, + {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, ] [package.dependencies] -cryptography = ">=38.0.0,<41" +cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] From 42eb4fea1c671bd7a3eacf329c9afc6644081e4f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:21:20 +0100 Subject: [PATCH 065/153] Bump serde from 1.0.163 to 1.0.164 (#15760) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.163 to 1.0.164. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.163...v1.0.164) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 08331385c07b..f34a72c26917 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -320,18 +320,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.163" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" +checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.163" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" +checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ "proc-macro2", "quote", From 0b104364f9f118be0ec722894650fad9583bf59c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:22:21 +0100 Subject: [PATCH 066/153] Bump pyo3-log from 0.8.1 to 0.8.2 (#15759) Bumps [pyo3-log](https://github.com/vorner/pyo3-log) from 0.8.1 to 0.8.2. - [Changelog](https://github.com/vorner/pyo3-log/blob/main/CHANGELOG.md) - [Commits](https://github.com/vorner/pyo3-log/compare/v0.8.1...v0.8.2) --- updated-dependencies: - dependency-name: pyo3-log dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f34a72c26917..4f75452b3e34 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -229,9 +229,9 @@ dependencies = [ [[package]] name = "pyo3-log" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9c8b57fe71fb5dcf38970ebedc2b1531cf1c14b1b9b4c560a182a57e115575c" +checksum = "c94ff6535a6bae58d7d0b85e60d4c53f7f84d0d0aa35d6a28c3f3e70bfe51444" dependencies = [ "arc-swap", "log", From ba97b39881e296f4775b8f6dd18edb98a3dc733f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 12 Jun 2023 14:27:11 +0100 Subject: [PATCH 067/153] Bump minimum supported Rust version (#15768) Important crates such as `log` and `regex` have bumped theirs to 1.60.0 as well. --- .github/workflows/tests.yml | 18 +++++++++--------- changelog.d/15768.misc | 1 + docs/upgrade.md | 8 ++++++++ rust/Cargo.toml | 2 +- 4 files changed, 19 insertions(+), 10 deletions(-) create mode 100644 changelog.d/15768.misc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 02a4be3a24a1..a0d1c24e9075 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -35,7 +35,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: @@ -93,7 +93,7 @@ jobs: uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - name: Setup Poetry @@ -150,7 +150,7 @@ jobs: with: ref: ${{ github.event.pull_request.head.sha }} - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: @@ -167,7 +167,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 with: components: clippy - uses: Swatinem/rust-cache@v2 @@ -268,7 +268,7 @@ jobs: postgres:${{ matrix.job.postgres-version }} - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 @@ -308,7 +308,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 # There aren't wheels for some of the older deps, so we need to install @@ -416,7 +416,7 @@ jobs: run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - name: Run SyTest @@ -556,7 +556,7 @@ jobs: path: synapse - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - uses: actions/setup-go@v4 @@ -584,7 +584,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - run: cargo test diff --git a/changelog.d/15768.misc b/changelog.d/15768.misc new file mode 100644 index 000000000000..bc4b86323cd0 --- /dev/null +++ b/changelog.d/15768.misc @@ -0,0 +1 @@ +Bump minimum supported Rust version to 1.60.0. diff --git a/docs/upgrade.md b/docs/upgrade.md index 49ab00c05760..4cd38b13932a 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -87,6 +87,14 @@ process, for example: wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb ``` +# Upgrading to v1.86.0 + +## Minimum supported Rust version + +The minimum supported Rust version has been increased from v1.58.1 to v1.60.0. +Users building from source will need to ensure their `rustc` version is up to +date. + # Upgrading to v1.85.0 diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 533a8cc6776f..3ead01c0521e 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -7,7 +7,7 @@ name = "synapse" version = "0.1.0" edition = "2021" -rust-version = "1.58.1" +rust-version = "1.60.0" [lib] name = "synapse" From 8afc9a4cda9b884bde1f6c87f7cb3087d04418a5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 10:05:13 +0100 Subject: [PATCH 068/153] Bump log from 0.4.18 to 0.4.19 (#15761) Bumps [log](https://github.com/rust-lang/log) from 0.4.18 to 0.4.19. - [Release notes](https://github.com/rust-lang/log/releases) - [Changelog](https://github.com/rust-lang/log/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/log/compare/0.4.18...0.4.19) --- updated-dependencies: - dependency-name: log dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4f75452b3e34..9724af5dca3e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -132,9 +132,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.18" +version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de" +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" [[package]] name = "memchr" From 99c850f79821e12ad1895b9505f8612752deea52 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 10:05:29 +0100 Subject: [PATCH 069/153] Bump regex from 1.7.3 to 1.8.4 (#15769) Bumps [regex](https://github.com/rust-lang/regex) from 1.7.3 to 1.8.4. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.7.3...1.8.4) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9724af5dca3e..9bb8225226f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "aho-corasick" -version = "0.7.19" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" dependencies = [ "memchr", ] @@ -291,9 +291,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.7.3" +version = "1.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" +checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" dependencies = [ "aho-corasick", "memchr", @@ -302,9 +302,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.29" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" +checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" [[package]] name = "ryu" From 9966eb10a3671958992b57d723fab27b57b6faff Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Tue, 13 Jun 2023 14:30:51 +0200 Subject: [PATCH 070/153] 1.86.0rc1 --- CHANGES.md | 69 +++++++++++++++++++++++++++++++++++++++ changelog.d/12504.misc | 1 - changelog.d/14213.misc | 1 - changelog.d/15388.feature | 1 - changelog.d/15450.feature | 1 - changelog.d/15520.feature | 1 - changelog.d/15582.feature | 1 - changelog.d/15609.bugfix | 1 - changelog.d/15649.misc | 1 - changelog.d/15650.misc | 1 - changelog.d/15674.feature | 1 - changelog.d/15675.misc | 1 - changelog.d/15689.misc | 1 - changelog.d/15690.misc | 1 - changelog.d/15694.misc | 1 - changelog.d/15695.bugfix | 1 - changelog.d/15697.misc | 1 - changelog.d/15705.feature | 1 - changelog.d/15709.misc | 1 - changelog.d/15710.feature | 1 - changelog.d/15721.misc | 1 - changelog.d/15723.misc | 1 - changelog.d/15725.misc | 1 - changelog.d/15726.misc | 1 - changelog.d/15729.misc | 1 - changelog.d/15731.misc | 1 - changelog.d/15732.doc | 1 - changelog.d/15733.misc | 1 - changelog.d/15740.feature | 1 - changelog.d/15752.misc | 1 - changelog.d/15768.misc | 1 - debian/changelog | 6 ++++ pyproject.toml | 2 +- 33 files changed, 76 insertions(+), 31 deletions(-) delete mode 100644 changelog.d/12504.misc delete mode 100644 changelog.d/14213.misc delete mode 100644 changelog.d/15388.feature delete mode 100644 changelog.d/15450.feature delete mode 100644 changelog.d/15520.feature delete mode 100644 changelog.d/15582.feature delete mode 100644 changelog.d/15609.bugfix delete mode 100644 changelog.d/15649.misc delete mode 100644 changelog.d/15650.misc delete mode 100644 changelog.d/15674.feature delete mode 100644 changelog.d/15675.misc delete mode 100644 changelog.d/15689.misc delete mode 100644 changelog.d/15690.misc delete mode 100644 changelog.d/15694.misc delete mode 100644 changelog.d/15695.bugfix delete mode 100644 changelog.d/15697.misc delete mode 100644 changelog.d/15705.feature delete mode 100644 changelog.d/15709.misc delete mode 100644 changelog.d/15710.feature delete mode 100644 changelog.d/15721.misc delete mode 100644 changelog.d/15723.misc delete mode 100644 changelog.d/15725.misc delete mode 100644 changelog.d/15726.misc delete mode 100644 changelog.d/15729.misc delete mode 100644 changelog.d/15731.misc delete mode 100644 changelog.d/15732.doc delete mode 100644 changelog.d/15733.misc delete mode 100644 changelog.d/15740.feature delete mode 100644 changelog.d/15752.misc delete mode 100644 changelog.d/15768.misc diff --git a/CHANGES.md b/CHANGES.md index 5debbc35b6b0..99c246a3bc2d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,72 @@ +Synapse 1.86.0rc1 (2023-06-13) +============================== + +Features +-------- + +- Stable support for [MSC3882](https://github.com/matrix-org/matrix-spec-proposals/pull/3882) to allow an existing device/session to generate a login token for use on a new device/session. ([\#15388](https://github.com/matrix-org/synapse/issues/15388)) +- Support resolving a room's [canonical alias](https://spec.matrix.org/v1.7/client-server-api/#mroomcanonical_alias) via the module API. ([\#15450](https://github.com/matrix-org/synapse/issues/15450)) +- Enable support for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952): intentional mentions. ([\#15520](https://github.com/matrix-org/synapse/issues/15520)) +- Experimental [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) support: delegate auth to an OIDC provider. ([\#15582](https://github.com/matrix-org/synapse/issues/15582)) +- Add Syanpse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. ([\#15674](https://github.com/matrix-org/synapse/issues/15674)) +- Add a catch-all * to the supported relation types when redacting an event and its related events. This is an update to [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) implementation. ([\#15705](https://github.com/matrix-org/synapse/issues/15705)) +- Speed up `/messages` by backfilling in the background when there are no backward extremities where we are directly paginating. ([\#15710](https://github.com/matrix-org/synapse/issues/15710)) +- Expose a metric reporting the database background update status. ([\#15740](https://github.com/matrix-org/synapse/issues/15740)) + + +Bugfixes +-------- + +- Correctly clear caches when we delete a room. ([\#15609](https://github.com/matrix-org/synapse/issues/15609)) +- Check permissions for enabling encryption earlier during room creation to avoid creating broken rooms. ([\#15695](https://github.com/matrix-org/synapse/issues/15695)) + + +Improved Documentation +---------------------- + +- Simplify query to find participating servers in a room. ([\#15732](https://github.com/matrix-org/synapse/issues/15732)) + + +Internal Changes +---------------- + +- Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. ([\#12504](https://github.com/matrix-org/synapse/issues/12504)) +- Log when events are (maybe unexpectedly) filtered out of responses in tests. ([\#14213](https://github.com/matrix-org/synapse/issues/14213)) +- Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters`. ([\#15649](https://github.com/matrix-org/synapse/issues/15649)) +- Add support for tracing functions which return `Awaitable`s. ([\#15650](https://github.com/matrix-org/synapse/issues/15650)) +- Cache requests for user's devices over federation. ([\#15675](https://github.com/matrix-org/synapse/issues/15675)) +- Add fully qualified docker image names to Dockerfiles. ([\#15689](https://github.com/matrix-org/synapse/issues/15689)) +- Remove some unused code. ([\#15690](https://github.com/matrix-org/synapse/issues/15690)) +- Improve type hints. ([\#15694](https://github.com/matrix-org/synapse/issues/15694), [\#15697](https://github.com/matrix-org/synapse/issues/15697)) +- Update docstring and traces on `maybe_backfill()` functions. ([\#15709](https://github.com/matrix-org/synapse/issues/15709)) +- Add context for when/why to use the `long_retries` option when sending Federation requests. ([\#15721](https://github.com/matrix-org/synapse/issues/15721)) +- Removed some unused fields. ([\#15723](https://github.com/matrix-org/synapse/issues/15723)) +- Update federation error to more plainly explain we can only authorize our own membership events. ([\#15725](https://github.com/matrix-org/synapse/issues/15725)) +- Prevent the `latest_deps` and `twisted_trunk` daily GitHub Actions workflows from running on forks of the codebase. ([\#15726](https://github.com/matrix-org/synapse/issues/15726)) +- Improve performance of user directory search. ([\#15729](https://github.com/matrix-org/synapse/issues/15729)) +- Remove redundant table join with `room_memberships` when doing a `is_host_joined()`/`is_host_invited()` call (`membership` is already part of the `current_state_events`). ([\#15731](https://github.com/matrix-org/synapse/issues/15731)) +- Remove superfluous `room_memberships` join from background update. ([\#15733](https://github.com/matrix-org/synapse/issues/15733)) +- Speed up typechecking CI. ([\#15752](https://github.com/matrix-org/synapse/issues/15752)) +- Bump minimum supported Rust version to 1.60.0. ([\#15768](https://github.com/matrix-org/synapse/issues/15768)) + +### Updates to locked dependencies + +* Bump importlib-metadata from 6.1.0 to 6.6.0. ([\#15711](https://github.com/matrix-org/synapse/issues/15711)) +* Bump library/redis from 6-bullseye to 7-bullseye in /docker. ([\#15712](https://github.com/matrix-org/synapse/issues/15712)) +* Bump log from 0.4.18 to 0.4.19. ([\#15761](https://github.com/matrix-org/synapse/issues/15761)) +* Bump phonenumbers from 8.13.11 to 8.13.13. ([\#15763](https://github.com/matrix-org/synapse/issues/15763)) +* Bump pyasn1 from 0.4.8 to 0.5.0. ([\#15713](https://github.com/matrix-org/synapse/issues/15713)) +* Bump pydantic from 1.10.8 to 1.10.9. ([\#15762](https://github.com/matrix-org/synapse/issues/15762)) +* Bump pyo3-log from 0.8.1 to 0.8.2. ([\#15759](https://github.com/matrix-org/synapse/issues/15759)) +* Bump pyopenssl from 23.1.1 to 23.2.0. ([\#15765](https://github.com/matrix-org/synapse/issues/15765)) +* Bump regex from 1.7.3 to 1.8.4. ([\#15769](https://github.com/matrix-org/synapse/issues/15769)) +* Bump sentry-sdk from 1.22.1 to 1.25.0. ([\#15714](https://github.com/matrix-org/synapse/issues/15714)) +* Bump sentry-sdk from 1.25.0 to 1.25.1. ([\#15764](https://github.com/matrix-org/synapse/issues/15764)) +* Bump serde from 1.0.163 to 1.0.164. ([\#15760](https://github.com/matrix-org/synapse/issues/15760)) +* Bump types-jsonschema from 4.17.0.7 to 4.17.0.8. ([\#15716](https://github.com/matrix-org/synapse/issues/15716)) +* Bump types-pyopenssl from 23.1.0.2 to 23.2.0.0. ([\#15766](https://github.com/matrix-org/synapse/issues/15766)) +* Bump types-requests from 2.31.0.0 to 2.31.0.1. ([\#15715](https://github.com/matrix-org/synapse/issues/15715)) + Synapse 1.85.2 (2023-06-08) =========================== diff --git a/changelog.d/12504.misc b/changelog.d/12504.misc deleted file mode 100644 index 0bebaa213d9e..000000000000 --- a/changelog.d/12504.misc +++ /dev/null @@ -1 +0,0 @@ -Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. diff --git a/changelog.d/14213.misc b/changelog.d/14213.misc deleted file mode 100644 index b0689f3d1574..000000000000 --- a/changelog.d/14213.misc +++ /dev/null @@ -1 +0,0 @@ -Log when events are (maybe unexpectedly) filtered out of responses in tests. diff --git a/changelog.d/15388.feature b/changelog.d/15388.feature deleted file mode 100644 index 6cc55cafa2e1..000000000000 --- a/changelog.d/15388.feature +++ /dev/null @@ -1 +0,0 @@ -Stable support for [MSC3882](https://github.com/matrix-org/matrix-spec-proposals/pull/3882) to allow an existing device/session to generate a login token for use on a new device/session. \ No newline at end of file diff --git a/changelog.d/15450.feature b/changelog.d/15450.feature deleted file mode 100644 index 2102381143ba..000000000000 --- a/changelog.d/15450.feature +++ /dev/null @@ -1 +0,0 @@ -Support resolving a room's [canonical alias](https://spec.matrix.org/v1.7/client-server-api/#mroomcanonical_alias) via the module API. \ No newline at end of file diff --git a/changelog.d/15520.feature b/changelog.d/15520.feature deleted file mode 100644 index f4fd40ab9451..000000000000 --- a/changelog.d/15520.feature +++ /dev/null @@ -1 +0,0 @@ -Enable support for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952): intentional mentions. diff --git a/changelog.d/15582.feature b/changelog.d/15582.feature deleted file mode 100644 index 00959500a54e..000000000000 --- a/changelog.d/15582.feature +++ /dev/null @@ -1 +0,0 @@ -Experimental [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) support: delegate auth to an OIDC provider. diff --git a/changelog.d/15609.bugfix b/changelog.d/15609.bugfix deleted file mode 100644 index b5a990cfec1e..000000000000 --- a/changelog.d/15609.bugfix +++ /dev/null @@ -1 +0,0 @@ -Correctly clear caches when we delete a room. diff --git a/changelog.d/15649.misc b/changelog.d/15649.misc deleted file mode 100644 index fca38abe0f60..000000000000 --- a/changelog.d/15649.misc +++ /dev/null @@ -1 +0,0 @@ -Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters`. diff --git a/changelog.d/15650.misc b/changelog.d/15650.misc deleted file mode 100644 index 9bbad113e11e..000000000000 --- a/changelog.d/15650.misc +++ /dev/null @@ -1 +0,0 @@ -Add support for tracing functions which return `Awaitable`s. diff --git a/changelog.d/15674.feature b/changelog.d/15674.feature deleted file mode 100644 index 68cf207dc0f6..000000000000 --- a/changelog.d/15674.feature +++ /dev/null @@ -1 +0,0 @@ -Add Syanpse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. diff --git a/changelog.d/15675.misc b/changelog.d/15675.misc deleted file mode 100644 index 05538fdbeff9..000000000000 --- a/changelog.d/15675.misc +++ /dev/null @@ -1 +0,0 @@ -Cache requests for user's devices over federation. diff --git a/changelog.d/15689.misc b/changelog.d/15689.misc deleted file mode 100644 index 4262cc951507..000000000000 --- a/changelog.d/15689.misc +++ /dev/null @@ -1 +0,0 @@ -Add fully qualified docker image names to Dockerfiles. diff --git a/changelog.d/15690.misc b/changelog.d/15690.misc deleted file mode 100644 index c6c259eb7d14..000000000000 --- a/changelog.d/15690.misc +++ /dev/null @@ -1 +0,0 @@ -Remove some unused code. diff --git a/changelog.d/15694.misc b/changelog.d/15694.misc deleted file mode 100644 index 93ceaeafc9b9..000000000000 --- a/changelog.d/15694.misc +++ /dev/null @@ -1 +0,0 @@ -Improve type hints. diff --git a/changelog.d/15695.bugfix b/changelog.d/15695.bugfix deleted file mode 100644 index 99bf1fe05e34..000000000000 --- a/changelog.d/15695.bugfix +++ /dev/null @@ -1 +0,0 @@ -Check permissions for enabling encryption earlier during room creation to avoid creating broken rooms. diff --git a/changelog.d/15697.misc b/changelog.d/15697.misc deleted file mode 100644 index 93ceaeafc9b9..000000000000 --- a/changelog.d/15697.misc +++ /dev/null @@ -1 +0,0 @@ -Improve type hints. diff --git a/changelog.d/15705.feature b/changelog.d/15705.feature deleted file mode 100644 index e3cbb5a12e28..000000000000 --- a/changelog.d/15705.feature +++ /dev/null @@ -1 +0,0 @@ -Add a catch-all * to the supported relation types when redacting an event and its related events. This is an update to [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) implementation. diff --git a/changelog.d/15709.misc b/changelog.d/15709.misc deleted file mode 100644 index e9ce84a94021..000000000000 --- a/changelog.d/15709.misc +++ /dev/null @@ -1 +0,0 @@ -Update docstring and traces on `maybe_backfill()` functions. diff --git a/changelog.d/15710.feature b/changelog.d/15710.feature deleted file mode 100644 index fe77a2fef6e2..000000000000 --- a/changelog.d/15710.feature +++ /dev/null @@ -1 +0,0 @@ -Speed up `/messages` by backfilling in the background when there are no backward extremities where we are directly paginating. diff --git a/changelog.d/15721.misc b/changelog.d/15721.misc deleted file mode 100644 index f4d892daf970..000000000000 --- a/changelog.d/15721.misc +++ /dev/null @@ -1 +0,0 @@ -Add context for when/why to use the `long_retries` option when sending Federation requests. diff --git a/changelog.d/15723.misc b/changelog.d/15723.misc deleted file mode 100644 index ba331adca7f2..000000000000 --- a/changelog.d/15723.misc +++ /dev/null @@ -1 +0,0 @@ -Removed some unused fields. diff --git a/changelog.d/15725.misc b/changelog.d/15725.misc deleted file mode 100644 index 6c7a8a41d883..000000000000 --- a/changelog.d/15725.misc +++ /dev/null @@ -1 +0,0 @@ -Update federation error to more plainly explain we can only authorize our own membership events. diff --git a/changelog.d/15726.misc b/changelog.d/15726.misc deleted file mode 100644 index 941e541e7766..000000000000 --- a/changelog.d/15726.misc +++ /dev/null @@ -1 +0,0 @@ -Prevent the `latest_deps` and `twisted_trunk` daily GitHub Actions workflows from running on forks of the codebase. \ No newline at end of file diff --git a/changelog.d/15729.misc b/changelog.d/15729.misc deleted file mode 100644 index 394025430535..000000000000 --- a/changelog.d/15729.misc +++ /dev/null @@ -1 +0,0 @@ -Improve performance of user directory search. diff --git a/changelog.d/15731.misc b/changelog.d/15731.misc deleted file mode 100644 index 906bc2696254..000000000000 --- a/changelog.d/15731.misc +++ /dev/null @@ -1 +0,0 @@ -Remove redundant table join with `room_memberships` when doing a `is_host_joined()`/`is_host_invited()` call (`membership` is already part of the `current_state_events`). diff --git a/changelog.d/15732.doc b/changelog.d/15732.doc deleted file mode 100644 index b0e8639df78d..000000000000 --- a/changelog.d/15732.doc +++ /dev/null @@ -1 +0,0 @@ -Simplify query to find participating servers in a room. diff --git a/changelog.d/15733.misc b/changelog.d/15733.misc deleted file mode 100644 index 3ae7be3c27d1..000000000000 --- a/changelog.d/15733.misc +++ /dev/null @@ -1 +0,0 @@ -Remove superfluous `room_memberships` join from background update. diff --git a/changelog.d/15740.feature b/changelog.d/15740.feature deleted file mode 100644 index fed342ea55d8..000000000000 --- a/changelog.d/15740.feature +++ /dev/null @@ -1 +0,0 @@ -Expose a metric reporting the database background update status. diff --git a/changelog.d/15752.misc b/changelog.d/15752.misc deleted file mode 100644 index 7e373b12750b..000000000000 --- a/changelog.d/15752.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up typechecking CI. diff --git a/changelog.d/15768.misc b/changelog.d/15768.misc deleted file mode 100644 index bc4b86323cd0..000000000000 --- a/changelog.d/15768.misc +++ /dev/null @@ -1 +0,0 @@ -Bump minimum supported Rust version to 1.60.0. diff --git a/debian/changelog b/debian/changelog index a7503ea60a7a..1c13433c4775 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium + + * New Synapse release 1.86.0rc1. + + -- Synapse Packaging team Tue, 13 Jun 2023 14:30:45 +0200 + matrix-synapse-py3 (1.85.2) stable; urgency=medium * New Synapse release 1.85.2. diff --git a/pyproject.toml b/pyproject.toml index d42d7644d80e..6bbbf9500199 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.85.2" +version = "1.86.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 629115836f9d32aad8e2afcf98196753877d70fd Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Tue, 13 Jun 2023 14:38:53 +0200 Subject: [PATCH 071/153] Fix changelog typo --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 99c246a3bc2d..5412581eef25 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -8,7 +8,7 @@ Features - Support resolving a room's [canonical alias](https://spec.matrix.org/v1.7/client-server-api/#mroomcanonical_alias) via the module API. ([\#15450](https://github.com/matrix-org/synapse/issues/15450)) - Enable support for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952): intentional mentions. ([\#15520](https://github.com/matrix-org/synapse/issues/15520)) - Experimental [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) support: delegate auth to an OIDC provider. ([\#15582](https://github.com/matrix-org/synapse/issues/15582)) -- Add Syanpse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. ([\#15674](https://github.com/matrix-org/synapse/issues/15674)) +- Add Synapse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. ([\#15674](https://github.com/matrix-org/synapse/issues/15674)) - Add a catch-all * to the supported relation types when redacting an event and its related events. This is an update to [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) implementation. ([\#15705](https://github.com/matrix-org/synapse/issues/15705)) - Speed up `/messages` by backfilling in the background when there are no backward extremities where we are directly paginating. ([\#15710](https://github.com/matrix-org/synapse/issues/15710)) - Expose a metric reporting the database background update status. ([\#15740](https://github.com/matrix-org/synapse/issues/15740)) From df945e0d7cd3cc78e54002115ae5e5793ed0a116 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 13 Jun 2023 12:07:55 -0400 Subject: [PATCH 072/153] Fix MSC3983 support: Use the unstable /keys/claim federation endpoint if multiple keys are requested (#15755) --- changelog.d/15755.misc | 1 + synapse/federation/federation_client.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15755.misc diff --git a/changelog.d/15755.misc b/changelog.d/15755.misc new file mode 100644 index 000000000000..a65340d380e3 --- /dev/null +++ b/changelog.d/15755.misc @@ -0,0 +1 @@ +Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983). diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index a2cf3a96c611..e5359ca5588b 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -260,7 +260,9 @@ async def claim_client_keys( use_unstable = False for user_id, one_time_keys in query.items(): for device_id, algorithms in one_time_keys.items(): - if any(count > 1 for count in algorithms.values()): + # If more than one algorithm is requested, attempt to use the unstable + # endpoint. + if sum(algorithms.values()) > 1: use_unstable = True if algorithms: # For the stable query, choose only the first algorithm. @@ -296,6 +298,7 @@ async def claim_client_keys( else: logger.debug("Skipping unstable claim client keys API") + # TODO Potentially attempt multiple queries and combine the results? return await self.transport_layer.claim_client_keys( user, destination, content, timeout ) From 0757d59ec4f3275e30907825b4dfb0fdbdce9006 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 13 Jun 2023 12:31:08 -0500 Subject: [PATCH 073/153] Avoid backfill when we already have messages to return (#15737) We now only block the client to backfill when we see a large gap in the events (more than 2 events missing in a row according to `depth`), more than 3 single-event holes, or not enough messages to fill the response. Otherwise, we return the messages directly to the client and backfill in the background for eventual consistency sake. Fix https://github.com/matrix-org/synapse/issues/15696 --- changelog.d/15737.feature | 1 + synapse/handlers/pagination.py | 137 +++++++++++++++++++++++++++------ 2 files changed, 116 insertions(+), 22 deletions(-) create mode 100644 changelog.d/15737.feature diff --git a/changelog.d/15737.feature b/changelog.d/15737.feature new file mode 100644 index 000000000000..9a547b5ebdd0 --- /dev/null +++ b/changelog.d/15737.feature @@ -0,0 +1 @@ +Improve `/messages` response time by avoiding backfill when we already have messages to return. diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index d5257acb7da3..19b8728db9b9 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -40,6 +40,11 @@ logger = logging.getLogger(__name__) +# How many single event gaps we tolerate returning in a `/messages` response before we +# backfill and try to fill in the history. This is an arbitrarily picked number so feel +# free to tune it in the future. +BACKFILL_BECAUSE_TOO_MANY_GAPS_THRESHOLD = 3 + @attr.s(slots=True, auto_attribs=True) class PurgeStatus: @@ -486,35 +491,35 @@ async def get_messages( room_id, room_token.stream ) - if not use_admin_priviledge and membership == Membership.LEAVE: - # If they have left the room then clamp the token to be before - # they left the room, to save the effort of loading from the - # database. - - # This is only None if the room is world_readable, in which - # case "JOIN" would have been returned. - assert member_event_id + # If they have left the room then clamp the token to be before + # they left the room, to save the effort of loading from the + # database. + if ( + pagin_config.direction == Direction.BACKWARDS + and not use_admin_priviledge + and membership == Membership.LEAVE + ): + # This is only None if the room is world_readable, in which case + # "Membership.JOIN" would have been returned and we should never hit + # this branch. + assert member_event_id + + leave_token = await self.store.get_topological_token_for_event( + member_event_id + ) + assert leave_token.topological is not None - leave_token = await self.store.get_topological_token_for_event( - member_event_id + if leave_token.topological < curr_topo: + from_token = from_token.copy_and_replace( + StreamKeyType.ROOM, leave_token ) - assert leave_token.topological is not None - - if leave_token.topological < curr_topo: - from_token = from_token.copy_and_replace( - StreamKeyType.ROOM, leave_token - ) - - await self.hs.get_federation_handler().maybe_backfill( - room_id, - curr_topo, - limit=pagin_config.limit, - ) to_room_key = None if pagin_config.to_token: to_room_key = pagin_config.to_token.room_key + # Initially fetch the events from the database. With any luck, we can return + # these without blocking on backfill (handled below). events, next_key = await self.store.paginate_room_events( room_id=room_id, from_key=from_token.room_key, @@ -524,6 +529,94 @@ async def get_messages( event_filter=event_filter, ) + if pagin_config.direction == Direction.BACKWARDS: + # We use a `Set` because there can be multiple events at a given depth + # and we only care about looking at the unique continum of depths to + # find gaps. + event_depths: Set[int] = {event.depth for event in events} + sorted_event_depths = sorted(event_depths) + + # Inspect the depths of the returned events to see if there are any gaps + found_big_gap = False + number_of_gaps = 0 + previous_event_depth = ( + sorted_event_depths[0] if len(sorted_event_depths) > 0 else 0 + ) + for event_depth in sorted_event_depths: + # We don't expect a negative depth but we'll just deal with it in + # any case by taking the absolute value to get the true gap between + # any two integers. + depth_gap = abs(event_depth - previous_event_depth) + # A `depth_gap` of 1 is a normal continuous chain to the next event + # (1 <-- 2 <-- 3) so anything larger indicates a missing event (it's + # also possible there is no event at a given depth but we can't ever + # know that for sure) + if depth_gap > 1: + number_of_gaps += 1 + + # We only tolerate a small number single-event long gaps in the + # returned events because those are most likely just events we've + # failed to pull in the past. Anything longer than that is probably + # a sign that we're missing a decent chunk of history and we should + # try to backfill it. + # + # XXX: It's possible we could tolerate longer gaps if we checked + # that a given events `prev_events` is one that has failed pull + # attempts and we could just treat it like a dead branch of history + # for now or at least something that we don't need the block the + # client on to try pulling. + # + # XXX: If we had something like MSC3871 to indicate gaps in the + # timeline to the client, we could also get away with any sized gap + # and just have the client refetch the holes as they see fit. + if depth_gap > 2: + found_big_gap = True + break + previous_event_depth = event_depth + + # Backfill in the foreground if we found a big gap, have too many holes, + # or we don't have enough events to fill the limit that the client asked + # for. + missing_too_many_events = ( + number_of_gaps > BACKFILL_BECAUSE_TOO_MANY_GAPS_THRESHOLD + ) + not_enough_events_to_fill_response = len(events) < pagin_config.limit + if ( + found_big_gap + or missing_too_many_events + or not_enough_events_to_fill_response + ): + did_backfill = ( + await self.hs.get_federation_handler().maybe_backfill( + room_id, + curr_topo, + limit=pagin_config.limit, + ) + ) + + # If we did backfill something, refetch the events from the database to + # catch anything new that might have been added since we last fetched. + if did_backfill: + events, next_key = await self.store.paginate_room_events( + room_id=room_id, + from_key=from_token.room_key, + to_key=to_room_key, + direction=pagin_config.direction, + limit=pagin_config.limit, + event_filter=event_filter, + ) + else: + # Otherwise, we can backfill in the background for eventual + # consistency's sake but we don't need to block the client waiting + # for a costly federation call and processing. + run_as_background_process( + "maybe_backfill_in_the_background", + self.hs.get_federation_handler().maybe_backfill, + room_id, + curr_topo, + limit=pagin_config.limit, + ) + next_token = from_token.copy_and_replace(StreamKeyType.ROOM, next_key) # if no events are returned from pagination, that implies From 59ec4a0dc1404991935e3c29abe548affa0446bf Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Tue, 13 Jun 2023 19:51:47 +0200 Subject: [PATCH 074/153] Fix MSC3983 support: only one OTK per device was returned through federation (#15770) --- changelog.d/15770.bugfix | 1 + synapse/federation/federation_server.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15770.bugfix diff --git a/changelog.d/15770.bugfix b/changelog.d/15770.bugfix new file mode 100644 index 000000000000..a65340d380e3 --- /dev/null +++ b/changelog.d/15770.bugfix @@ -0,0 +1 @@ +Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983). diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 9425b3250703..61fa3b30af5a 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -1016,7 +1016,9 @@ async def on_claim_client_keys( for user_id, device_keys in result.items(): for device_id, keys in device_keys.items(): for key_id, key in keys.items(): - json_result.setdefault(user_id, {})[device_id] = {key_id: key} + json_result.setdefault(user_id, {}).setdefault(device_id, {})[ + key_id + ] = key logger.info( "Claimed one-time-keys: %s", From 553f2f53e7930d721d2070ffa45af6397f5ecb60 Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 13 Jun 2023 13:22:06 -0700 Subject: [PATCH 075/153] Replace `EventContext` fields `prev_group` and `delta_ids` with field `state_group_deltas` (#15233) --- changelog.d/15233.misc | 1 + synapse/events/snapshot.py | 159 ++++++++++++------ synapse/storage/controllers/persist_events.py | 5 +- tests/events/test_snapshot.py | 3 +- tests/storage/test_event_chain.py | 5 +- tests/test_state.py | 11 +- 6 files changed, 126 insertions(+), 58 deletions(-) create mode 100644 changelog.d/15233.misc diff --git a/changelog.d/15233.misc b/changelog.d/15233.misc new file mode 100644 index 000000000000..1dff00bf3c8c --- /dev/null +++ b/changelog.d/15233.misc @@ -0,0 +1 @@ +Replace `EventContext` fields `prev_group` and `delta_ids` with field `state_group_deltas`. diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index e7e8225b8e06..a43498ed4d51 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Dict, List, Optional, Tuple import attr from immutabledict import immutabledict @@ -107,33 +107,32 @@ class EventContext(UnpersistedEventContextBase): state_delta_due_to_event: If `state_group` and `state_group_before_event` are not None then this is the delta of the state between the two groups. - prev_group: If it is known, ``state_group``'s prev_group. Note that this being - None does not necessarily mean that ``state_group`` does not have - a prev_group! + state_group_deltas: If not empty, this is a dict collecting a mapping of the state + difference between state groups. - If the event is a state event, this is normally the same as - ``state_group_before_event``. + The keys are a tuple of two integers: the initial group and final state group. + The corresponding value is a state map representing the state delta between + these state groups. - If ``state_group`` is None (ie, the event is an outlier), ``prev_group`` - will always also be ``None``. + The dictionary is expected to have at most two entries with state groups of: - Note that this *not* (necessarily) the state group associated with - ``_prev_state_ids``. + 1. The state group before the event and after the event. + 2. The state group preceding the state group before the event and the + state group before the event. - delta_ids: If ``prev_group`` is not None, the state delta between ``prev_group`` - and ``state_group``. + This information is collected and stored as part of an optimization for persisting + events. partial_state: if True, we may be storing this event with a temporary, incomplete state. """ _storage: "StorageControllers" + state_group_deltas: Dict[Tuple[int, int], StateMap[str]] rejected: Optional[str] = None _state_group: Optional[int] = None state_group_before_event: Optional[int] = None _state_delta_due_to_event: Optional[StateMap[str]] = None - prev_group: Optional[int] = None - delta_ids: Optional[StateMap[str]] = None app_service: Optional[ApplicationService] = None partial_state: bool = False @@ -145,16 +144,14 @@ def with_state( state_group_before_event: Optional[int], state_delta_due_to_event: Optional[StateMap[str]], partial_state: bool, - prev_group: Optional[int] = None, - delta_ids: Optional[StateMap[str]] = None, + state_group_deltas: Dict[Tuple[int, int], StateMap[str]], ) -> "EventContext": return EventContext( storage=storage, state_group=state_group, state_group_before_event=state_group_before_event, state_delta_due_to_event=state_delta_due_to_event, - prev_group=prev_group, - delta_ids=delta_ids, + state_group_deltas=state_group_deltas, partial_state=partial_state, ) @@ -163,7 +160,7 @@ def for_outlier( storage: "StorageControllers", ) -> "EventContext": """Return an EventContext instance suitable for persisting an outlier event""" - return EventContext(storage=storage) + return EventContext(storage=storage, state_group_deltas={}) async def persist(self, event: EventBase) -> "EventContext": return self @@ -183,13 +180,15 @@ async def serialize(self, event: EventBase, store: "DataStore") -> JsonDict: "state_group": self._state_group, "state_group_before_event": self.state_group_before_event, "rejected": self.rejected, - "prev_group": self.prev_group, + "state_group_deltas": _encode_state_group_delta(self.state_group_deltas), "state_delta_due_to_event": _encode_state_dict( self._state_delta_due_to_event ), - "delta_ids": _encode_state_dict(self.delta_ids), "app_service_id": self.app_service.id if self.app_service else None, "partial_state": self.partial_state, + # add dummy delta_ids and prev_group for backwards compatibility + "delta_ids": None, + "prev_group": None, } @staticmethod @@ -204,17 +203,24 @@ def deserialize(storage: "StorageControllers", input: JsonDict) -> "EventContext Returns: The event context. """ + # workaround for backwards/forwards compatibility: if the input doesn't have a value + # for "state_group_deltas" just assign an empty dict + state_group_deltas = input.get("state_group_deltas", None) + if state_group_deltas: + state_group_deltas = _decode_state_group_delta(state_group_deltas) + else: + state_group_deltas = {} + context = EventContext( # We use the state_group and prev_state_id stuff to pull the # current_state_ids out of the DB and construct prev_state_ids. storage=storage, state_group=input["state_group"], state_group_before_event=input["state_group_before_event"], - prev_group=input["prev_group"], + state_group_deltas=state_group_deltas, state_delta_due_to_event=_decode_state_dict( input["state_delta_due_to_event"] ), - delta_ids=_decode_state_dict(input["delta_ids"]), rejected=input["rejected"], partial_state=input.get("partial_state", False), ) @@ -349,7 +355,7 @@ class UnpersistedEventContext(UnpersistedEventContextBase): _storage: "StorageControllers" state_group_before_event: Optional[int] state_group_after_event: Optional[int] - state_delta_due_to_event: Optional[dict] + state_delta_due_to_event: Optional[StateMap[str]] prev_group_for_state_group_before_event: Optional[int] delta_ids_to_state_group_before_event: Optional[StateMap[str]] partial_state: bool @@ -380,26 +386,16 @@ async def batch_persist_unpersisted_contexts( events_and_persisted_context = [] for event, unpersisted_context in amended_events_and_context: - if event.is_state(): - context = EventContext( - storage=unpersisted_context._storage, - state_group=unpersisted_context.state_group_after_event, - state_group_before_event=unpersisted_context.state_group_before_event, - state_delta_due_to_event=unpersisted_context.state_delta_due_to_event, - partial_state=unpersisted_context.partial_state, - prev_group=unpersisted_context.state_group_before_event, - delta_ids=unpersisted_context.state_delta_due_to_event, - ) - else: - context = EventContext( - storage=unpersisted_context._storage, - state_group=unpersisted_context.state_group_after_event, - state_group_before_event=unpersisted_context.state_group_before_event, - state_delta_due_to_event=unpersisted_context.state_delta_due_to_event, - partial_state=unpersisted_context.partial_state, - prev_group=unpersisted_context.prev_group_for_state_group_before_event, - delta_ids=unpersisted_context.delta_ids_to_state_group_before_event, - ) + state_group_deltas = unpersisted_context._build_state_group_deltas() + + context = EventContext( + storage=unpersisted_context._storage, + state_group=unpersisted_context.state_group_after_event, + state_group_before_event=unpersisted_context.state_group_before_event, + state_delta_due_to_event=unpersisted_context.state_delta_due_to_event, + partial_state=unpersisted_context.partial_state, + state_group_deltas=state_group_deltas, + ) events_and_persisted_context.append((event, context)) return events_and_persisted_context @@ -452,11 +448,11 @@ async def persist(self, event: EventBase) -> EventContext: # if the event isn't a state event the state group doesn't change if not self.state_delta_due_to_event: - state_group_after_event = self.state_group_before_event + self.state_group_after_event = self.state_group_before_event # otherwise if it is a state event we need to get a state group for it else: - state_group_after_event = await self._storage.state.store_state_group( + self.state_group_after_event = await self._storage.state.store_state_group( event.event_id, event.room_id, prev_group=self.state_group_before_event, @@ -464,16 +460,81 @@ async def persist(self, event: EventBase) -> EventContext: current_state_ids=None, ) + state_group_deltas = self._build_state_group_deltas() + return EventContext.with_state( storage=self._storage, - state_group=state_group_after_event, + state_group=self.state_group_after_event, state_group_before_event=self.state_group_before_event, state_delta_due_to_event=self.state_delta_due_to_event, + state_group_deltas=state_group_deltas, partial_state=self.partial_state, - prev_group=self.state_group_before_event, - delta_ids=self.state_delta_due_to_event, ) + def _build_state_group_deltas(self) -> Dict[Tuple[int, int], StateMap]: + """ + Collect deltas between the state groups associated with this context + """ + state_group_deltas = {} + + # if we know the state group before the event and after the event, add them and the + # state delta between them to state_group_deltas + if self.state_group_before_event and self.state_group_after_event: + # if we have the state groups we should have the delta + assert self.state_delta_due_to_event is not None + state_group_deltas[ + ( + self.state_group_before_event, + self.state_group_after_event, + ) + ] = self.state_delta_due_to_event + + # the state group before the event may also have a state group which precedes it, if + # we have that and the state group before the event, add them and the state + # delta between them to state_group_deltas + if ( + self.prev_group_for_state_group_before_event + and self.state_group_before_event + ): + # if we have both state groups we should have the delta between them + assert self.delta_ids_to_state_group_before_event is not None + state_group_deltas[ + ( + self.prev_group_for_state_group_before_event, + self.state_group_before_event, + ) + ] = self.delta_ids_to_state_group_before_event + + return state_group_deltas + + +def _encode_state_group_delta( + state_group_delta: Dict[Tuple[int, int], StateMap[str]] +) -> List[Tuple[int, int, Optional[List[Tuple[str, str, str]]]]]: + if not state_group_delta: + return [] + + state_group_delta_encoded = [] + for key, value in state_group_delta.items(): + state_group_delta_encoded.append((key[0], key[1], _encode_state_dict(value))) + + return state_group_delta_encoded + + +def _decode_state_group_delta( + input: List[Tuple[int, int, List[Tuple[str, str, str]]]] +) -> Dict[Tuple[int, int], StateMap[str]]: + if not input: + return {} + + state_group_deltas = {} + for state_group_1, state_group_2, state_dict in input: + state_map = _decode_state_dict(state_dict) + assert state_map is not None + state_group_deltas[(state_group_1, state_group_2)] = state_map + + return state_group_deltas + def _encode_state_dict( state_dict: Optional[StateMap[str]], diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py index f1d2c71c9116..35c068036599 100644 --- a/synapse/storage/controllers/persist_events.py +++ b/synapse/storage/controllers/persist_events.py @@ -839,9 +839,8 @@ async def _get_new_state_after_events( "group" % (ev.event_id,) ) continue - - if ctx.prev_group: - state_group_deltas[(ctx.prev_group, ctx.state_group)] = ctx.delta_ids + if ctx.state_group_deltas: + state_group_deltas.update(ctx.state_group_deltas) # We need to map the event_ids to their state groups. First, let's # check if the event is one we're persisting, in which case we can diff --git a/tests/events/test_snapshot.py b/tests/events/test_snapshot.py index 6687c28e8fea..b5e42f9600e7 100644 --- a/tests/events/test_snapshot.py +++ b/tests/events/test_snapshot.py @@ -101,8 +101,7 @@ def _check_serialize_deserialize( self.assertEqual( context.state_group_before_event, d_context.state_group_before_event ) - self.assertEqual(context.prev_group, d_context.prev_group) - self.assertEqual(context.delta_ids, d_context.delta_ids) + self.assertEqual(context.state_group_deltas, d_context.state_group_deltas) self.assertEqual(context.app_service, d_context.app_service) self.assertEqual( diff --git a/tests/storage/test_event_chain.py b/tests/storage/test_event_chain.py index e39b63edac42..48ebfadaab31 100644 --- a/tests/storage/test_event_chain.py +++ b/tests/storage/test_event_chain.py @@ -401,7 +401,10 @@ def _persist(txn: LoggingTransaction) -> None: assert persist_events_store is not None persist_events_store._store_event_txn( txn, - [(e, EventContext(self.hs.get_storage_controllers())) for e in events], + [ + (e, EventContext(self.hs.get_storage_controllers(), {})) + for e in events + ], ) # Actually call the function that calculates the auth chain stuff. diff --git a/tests/test_state.py b/tests/test_state.py index 7a49b8795320..eded38c7669d 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -555,10 +555,15 @@ def test_annotate_with_old_state( (e.event_id for e in old_state + [event]), current_state_ids.values() ) - self.assertIsNotNone(context.state_group_before_event) + assert context.state_group_before_event is not None + assert context.state_group is not None + self.assertEqual( + context.state_group_deltas.get( + (context.state_group_before_event, context.state_group) + ), + {(event.type, event.state_key): event.event_id}, + ) self.assertNotEqual(context.state_group_before_event, context.state_group) - self.assertEqual(context.state_group_before_event, context.prev_group) - self.assertEqual({("state", ""): event.event_id}, context.delta_ids) @defer.inlineCallbacks def test_trivial_annotate_message( From 8ddb2de55387d54bac53138f374f55c7608991ce Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 13 Jun 2023 16:34:54 -0500 Subject: [PATCH 076/153] Document `looping_call()` functionality that will wait for the given function to finish before scheduling another (#15772) Thanks to @erikjohnston for clarifying, https://github.com/matrix-org/synapse/pull/15743#discussion_r1226544457 We don't have to worry about calls stacking up if the given function takes longer than the scheduled time. --- changelog.d/15772.doc | 1 + synapse/util/__init__.py | 5 +++++ 2 files changed, 6 insertions(+) create mode 100644 changelog.d/15772.doc diff --git a/changelog.d/15772.doc b/changelog.d/15772.doc new file mode 100644 index 000000000000..4d6c933c713b --- /dev/null +++ b/changelog.d/15772.doc @@ -0,0 +1 @@ +Document `looping_call()` functionality that will wait for the given function to finish before scheduling another. diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 7ea0c4c36bcc..9f3b8741c1db 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -116,6 +116,11 @@ def looping_call( Waits `msec` initially before calling `f` for the first time. + If the function given to `looping_call` returns an awaitable/deferred, the next + call isn't scheduled until after the returned awaitable has finished. We get + this functionality thanks to this function being a thin wrapper around + `twisted.internet.task.LoopingCall`. + Note that the function will be called with no logcontext, so if it is anything other than trivial, you probably want to wrap it in run_as_background_process. From 21fea6b7493533985f7fa14924949514b5a356e2 Mon Sep 17 00:00:00 2001 From: Jason Little Date: Wed, 14 Jun 2023 03:42:18 -0500 Subject: [PATCH 077/153] Prefill events after invalidate not before when persisting events (#15758) Fixes #15757 --- changelog.d/15758.bugfix | 1 + synapse/storage/databases/main/events.py | 15 ++++-- .../storage/databases/main/events_worker.py | 2 +- synapse/util/caches/lrucache.py | 8 ++- .../databases/main/test_events_worker.py | 49 +++++++++++++++++++ 5 files changed, 70 insertions(+), 5 deletions(-) create mode 100644 changelog.d/15758.bugfix diff --git a/changelog.d/15758.bugfix b/changelog.d/15758.bugfix new file mode 100644 index 000000000000..cabe25ca24ec --- /dev/null +++ b/changelog.d/15758.bugfix @@ -0,0 +1 @@ +Avoid invalidating a cache that was just prefilled. diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index e2e6eb479f61..44af3357af47 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1729,13 +1729,22 @@ def _add_to_cache( if not row["rejects"] and not row["redacts"]: to_prefill.append(EventCacheEntry(event=event, redacted_event=None)) - async def prefill() -> None: + async def external_prefill() -> None: for cache_entry in to_prefill: - await self.store._get_event_cache.set( + await self.store._get_event_cache.set_external( (cache_entry.event.event_id,), cache_entry ) - txn.async_call_after(prefill) + def local_prefill() -> None: + for cache_entry in to_prefill: + self.store._get_event_cache.set_local( + (cache_entry.event.event_id,), cache_entry + ) + + # The order these are called here is not as important as knowing that after the + # transaction is finished, the async_call_after will run before the call_after. + txn.async_call_after(external_prefill) + txn.call_after(local_prefill) def _store_redaction(self, txn: LoggingTransaction, event: EventBase) -> None: assert event.redacts is not None diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index d93ffc4efa75..7e7648c95112 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -883,7 +883,7 @@ def invalidate_get_event_cache_after_txn( async def _invalidate_async_get_event_cache(self, event_id: str) -> None: """ - Invalidates an event in the asyncronous get event cache, which may be remote. + Invalidates an event in the asynchronous get event cache, which may be remote. Arguments: event_id: the event ID to invalidate diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 6137c85e1051..be6554319a31 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -842,7 +842,13 @@ def get_local( return self._lru_cache.get(key, update_metrics=update_metrics) async def set(self, key: KT, value: VT) -> None: - self._lru_cache.set(key, value) + # This will add the entries in the correct order, local first external second + self.set_local(key, value) + await self.set_external(key, value) + + async def set_external(self, key: KT, value: VT) -> None: + # This method should add an entry to any configured external cache, in this case noop. + pass def set_local(self, key: KT, value: VT) -> None: self._lru_cache.set(key, value) diff --git a/tests/storage/databases/main/test_events_worker.py b/tests/storage/databases/main/test_events_worker.py index 788500e38f2d..b223dc750bfc 100644 --- a/tests/storage/databases/main/test_events_worker.py +++ b/tests/storage/databases/main/test_events_worker.py @@ -139,6 +139,55 @@ def test_persisting_event_invalidates_cache(self) -> None: # That should result in a single db query to lookup self.assertEqual(ctx.get_resource_usage().db_txn_count, 1) + def test_persisting_event_prefills_get_event_cache(self) -> None: + """ + Test to make sure that the `_get_event_cache` is prefilled after we persist an + event and returns the updated value. + """ + event, event_context = self.get_success( + create_event( + self.hs, + room_id=self.room_id, + sender=self.user, + type="test_event_type", + content={"body": "conflabulation"}, + ) + ) + + # First, check `_get_event_cache` for the event we just made + # to verify it's not in the cache. + res = self.store._get_event_cache.get_local((event.event_id,)) + self.assertEqual(res, None, "Event was cached when it should not have been.") + + with LoggingContext(name="test") as ctx: + # Persist the event which should invalidate then prefill the + # `_get_event_cache` so we don't return stale values. + # Side Note: Apparently, persisting an event isn't a transaction in the + # sense that it is recorded in the LoggingContext + persistence = self.hs.get_storage_controllers().persistence + assert persistence is not None + self.get_success( + persistence.persist_event( + event, + event_context, + ) + ) + + # Check `_get_event_cache` again and we should see the updated fact + # that we now have the event cached after persisting it. + res = self.store._get_event_cache.get_local((event.event_id,)) + self.assertEqual(res.event, event, "Event not cached as expected.") # type: ignore + + # Try and fetch the event from the database. + self.get_success(self.store.get_event(event.event_id)) + + # Verify that the database hit was avoided. + self.assertEqual( + ctx.get_resource_usage().evt_db_fetch_count, + 0, + "Database was hit, which would not happen if event was cached.", + ) + def test_invalidate_cache_by_room_id(self) -> None: """ Test to make sure that all events associated with the given `(room_id,)` From 14f9d9b4520099118f009ae4f4c6b11b779af499 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Wed, 14 Jun 2023 11:53:55 +0200 Subject: [PATCH 078/153] Fix empty scope when having version mismatch between workers (#15774) --- changelog.d/15774.bugfix | 1 + synapse/types/__init__.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15774.bugfix diff --git a/changelog.d/15774.bugfix b/changelog.d/15774.bugfix new file mode 100644 index 000000000000..c24d6c25e4fc --- /dev/null +++ b/changelog.d/15774.bugfix @@ -0,0 +1 @@ +Fix an error when having workers of different versions running. diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index dfc95e8ebb8a..095be070e0c5 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -177,7 +177,7 @@ def deserialize( user=UserID.from_string(input["user_id"]), access_token_id=input["access_token_id"], is_guest=input["is_guest"], - scope=set(input["scope"]), + scope=set(input.get("scope", [])), shadow_banned=input["shadow_banned"], device_id=input["device_id"], app_service=appservice, From ef0d3d7bd941b497ad8291c58bcc53700e08b999 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Wed, 14 Jun 2023 11:55:09 +0200 Subject: [PATCH 079/153] Revert "Allow for the configuration of max request retries and min/max retry delays in the matrix federation client (#12504)" This reverts commit d84e66144dc12dacf71c987a2ba802dd59c0b68e. --- CHANGES.md | 1 - .../configuration/config_documentation.md | 26 ------------------- synapse/config/federation.py | 10 ------- synapse/http/matrixfederationclient.py | 21 +++++++-------- tests/http/test_matrixfederationclient.py | 20 +------------- 5 files changed, 10 insertions(+), 68 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 5412581eef25..d89859366479 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -30,7 +30,6 @@ Improved Documentation Internal Changes ---------------- -- Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. ([\#12504](https://github.com/matrix-org/synapse/issues/12504)) - Log when events are (maybe unexpectedly) filtered out of responses in tests. ([\#14213](https://github.com/matrix-org/synapse/issues/14213)) - Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters`. ([\#15649](https://github.com/matrix-org/synapse/issues/15649)) - Add support for tracing functions which return `Awaitable`s. ([\#15650](https://github.com/matrix-org/synapse/issues/15650)) diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 8426de04179b..0cf6e075ff11 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1196,32 +1196,6 @@ Example configuration: allow_device_name_lookup_over_federation: true ``` --- -### `federation` - -The federation section defines some sub-options related to federation. - -The following options are related to configuring timeout and retry logic for one request, -independently of the others. -Short retry algorithm is used when something or someone will wait for the request to have an -answer, while long retry is used for requests that happen in the background, -like sending a federation transaction. - -* `client_timeout`: timeout for the federation requests in seconds. Default to 60s. -* `max_short_retry_delay`: maximum delay to be used for the short retry algo in seconds. Default to 2s. -* `max_long_retry_delay`: maximum delay to be used for the short retry algo in seconds. Default to 60s. -* `max_short_retries`: maximum number of retries for the short retry algo. Default to 3 attempts. -* `max_long_retries`: maximum number of retries for the long retry algo. Default to 10 attempts. - -Example configuration: -```yaml -federation: - client_timeout: 180 - max_short_retry_delay: 7 - max_long_retry_delay: 100 - max_short_retries: 5 - max_long_retries: 20 -``` ---- ## Caching Options related to caching. diff --git a/synapse/config/federation.py b/synapse/config/federation.py index d21f7fd02a5e..336fca578aa1 100644 --- a/synapse/config/federation.py +++ b/synapse/config/federation.py @@ -22,8 +22,6 @@ class FederationConfig(Config): section = "federation" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - federation_config = config.setdefault("federation", {}) - # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist: Optional[dict] = None federation_domain_whitelist = config.get("federation_domain_whitelist", None) @@ -51,13 +49,5 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "allow_device_name_lookup_over_federation", False ) - # Allow for the configuration of timeout, max request retries - # and min/max retry delays in the matrix federation client. - self.client_timeout = federation_config.get("client_timeout", 60) - self.max_long_retry_delay = federation_config.get("max_long_retry_delay", 60) - self.max_short_retry_delay = federation_config.get("max_short_retry_delay", 2) - self.max_long_retries = federation_config.get("max_long_retries", 10) - self.max_short_retries = federation_config.get("max_short_retries", 3) - _METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}} diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index ed36825b671c..abb5ae581521 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -95,6 +95,8 @@ ) +MAX_LONG_RETRIES = 10 +MAX_SHORT_RETRIES = 3 MAXINT = sys.maxsize @@ -404,12 +406,7 @@ def __init__( self.clock = hs.get_clock() self._store = hs.get_datastores().main self.version_string_bytes = hs.version_string.encode("ascii") - self.default_timeout = hs.config.federation.client_timeout - - self.max_long_retry_delay = hs.config.federation.max_long_retry_delay - self.max_short_retry_delay = hs.config.federation.max_short_retry_delay - self.max_long_retries = hs.config.federation.max_long_retries - self.max_short_retries = hs.config.federation.max_short_retries + self.default_timeout = 60 self._cooperator = Cooperator(scheduler=_make_scheduler(self.reactor)) @@ -586,9 +583,9 @@ async def _send_request( # XXX: Would be much nicer to retry only at the transaction-layer # (once we have reliable transactions in place) if long_retries: - retries_left = self.max_long_retries + retries_left = MAX_LONG_RETRIES else: - retries_left = self.max_short_retries + retries_left = MAX_SHORT_RETRIES url_bytes = request.uri url_str = url_bytes.decode("ascii") @@ -733,12 +730,12 @@ async def _send_request( if retries_left and not timeout: if long_retries: - delay = 4 ** (self.max_long_retries + 1 - retries_left) - delay = min(delay, self.max_long_retry_delay) + delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left) + delay = min(delay, 60) delay *= random.uniform(0.8, 1.4) else: - delay = 0.5 * 2 ** (self.max_short_retries - retries_left) - delay = min(delay, self.max_short_retry_delay) + delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left) + delay = min(delay, 2) delay *= random.uniform(0.8, 1.4) logger.debug( diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index 8565f8ac64ad..0dfc03ce50f4 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -40,7 +40,7 @@ from synapse.util import Clock from tests.server import FakeTransport -from tests.unittest import HomeserverTestCase, override_config +from tests.unittest import HomeserverTestCase def check_logcontext(context: LoggingContextOrSentinel) -> None: @@ -640,21 +640,3 @@ def test_build_auth_headers_rejects_falsey_destinations(self) -> None: self.cl.build_auth_headers( b"", b"GET", b"https://example.com", destination_is=b"" ) - - @override_config( - { - "federation": { - "client_timeout": 180, - "max_long_retry_delay": 100, - "max_short_retry_delay": 7, - "max_long_retries": 20, - "max_short_retries": 5, - } - } - ) - def test_configurable_retry_and_delay_values(self) -> None: - self.assertEqual(self.cl.default_timeout, 180) - self.assertEqual(self.cl.max_long_retry_delay, 100) - self.assertEqual(self.cl.max_short_retry_delay, 7) - self.assertEqual(self.cl.max_long_retries, 20) - self.assertEqual(self.cl.max_short_retries, 5) From 825c5909de642c9c6494ef464684e29630d197b5 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Wed, 14 Jun 2023 12:16:41 +0200 Subject: [PATCH 080/153] 1.86.0rc2 --- CHANGES.md | 11 +++++++++++ changelog.d/15774.bugfix | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 18 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/15774.bugfix diff --git a/CHANGES.md b/CHANGES.md index d89859366479..f2f39c3b6eb3 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,17 @@ +Synapse 1.86.0rc2 (2023-06-14) +============================== + +Bugfixes +-------- + +- Fix an error when having workers of different versions running. ([\#15774](https://github.com/matrix-org/synapse/issues/15774)) + + Synapse 1.86.0rc1 (2023-06-13) ============================== +This version was tagged but never released. + Features -------- diff --git a/changelog.d/15774.bugfix b/changelog.d/15774.bugfix deleted file mode 100644 index c24d6c25e4fc..000000000000 --- a/changelog.d/15774.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix an error when having workers of different versions running. diff --git a/debian/changelog b/debian/changelog index 1c13433c4775..81b71ba342a1 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium + + * New Synapse release 1.86.0rc2. + + -- Synapse Packaging team Wed, 14 Jun 2023 12:16:27 +0200 + matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium * New Synapse release 1.86.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 6bbbf9500199..097bd039433f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.86.0rc1" +version = "1.86.0rc2" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 1404f68a03e684434dea6b2a9a5c2739c719549d Mon Sep 17 00:00:00 2001 From: Tulir Asokan Date: Wed, 14 Jun 2023 17:42:33 +0300 Subject: [PATCH 081/153] Fix joining rooms through aliases where the alias server isn't a real homeserver (#15776) --- changelog.d/15776.bugfix | 1 + synapse/handlers/room_member.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15776.bugfix diff --git a/changelog.d/15776.bugfix b/changelog.d/15776.bugfix new file mode 100644 index 000000000000..f146a85f1a22 --- /dev/null +++ b/changelog.d/15776.bugfix @@ -0,0 +1 @@ +Fix joining rooms through aliases where the alias server isn't a real homeserver. Contributed by @tulir @ Beeper. diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index af0ca5c26d33..55df34bd06a9 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -1498,7 +1498,7 @@ async def lookup_room_alias( # put the server which owns the alias at the front of the server list. if room_alias.domain in servers: servers.remove(room_alias.domain) - servers.insert(0, room_alias.domain) + servers.insert(0, room_alias.domain) return RoomID.from_string(room_id), servers From d93912042191d30ff1f7aa41d9f0779a609caca8 Mon Sep 17 00:00:00 2001 From: Josh Qou <97894002+joshqou@users.noreply.github.com> Date: Thu, 15 Jun 2023 14:23:27 +0100 Subject: [PATCH 082/153] Fix unsafe hotserving behaviour for non-multimedia uploads. (#15680) * Fix unsafe hotserving behaviour for non-multimedia uploads. * invert disposition assert * test_media_storage.py: run lint * test_base.py: /inline/attachment/s * Only return attachment for disposition type, update tests * Update synapse/media/_base.py Co-authored-by: Patrick Cloke * Update changelog.d/15680.bugfix Co-authored-by: Patrick Cloke * add attribution * Update changelog. --------- Co-authored-by: Patrick Cloke --- changelog.d/15680.bugfix | 1 + synapse/media/_base.py | 15 ++++++++++++--- tests/media/test_base.py | 12 ++++++------ tests/media/test_media_storage.py | 20 ++++++++++---------- 4 files changed, 29 insertions(+), 19 deletions(-) create mode 100644 changelog.d/15680.bugfix diff --git a/changelog.d/15680.bugfix b/changelog.d/15680.bugfix new file mode 100644 index 000000000000..04ac19b4ec3a --- /dev/null +++ b/changelog.d/15680.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug where media files were served in an unsafe manner. Contributed by @joshqou. diff --git a/synapse/media/_base.py b/synapse/media/_base.py index ef8334ae2586..20cb8b9010bb 100644 --- a/synapse/media/_base.py +++ b/synapse/media/_base.py @@ -152,6 +152,9 @@ def _quote(x: str) -> str: content_type = media_type request.setHeader(b"Content-Type", content_type.encode("UTF-8")) + + # Use a Content-Disposition of attachment to force download of media. + disposition = "attachment" if upload_name: # RFC6266 section 4.1 [1] defines both `filename` and `filename*`. # @@ -173,11 +176,17 @@ def _quote(x: str) -> str: # correctly interpret those as of 0.99.2 and (b) they are a bit of a pain and we # may as well just do the filename* version. if _can_encode_filename_as_token(upload_name): - disposition = "inline; filename=%s" % (upload_name,) + disposition = "%s; filename=%s" % ( + disposition, + upload_name, + ) else: - disposition = "inline; filename*=utf-8''%s" % (_quote(upload_name),) + disposition = "%s; filename*=utf-8''%s" % ( + disposition, + _quote(upload_name), + ) - request.setHeader(b"Content-Disposition", disposition.encode("ascii")) + request.setHeader(b"Content-Disposition", disposition.encode("ascii")) # cache for at least a day. # XXX: we might want to turn this off for data we don't want to diff --git a/tests/media/test_base.py b/tests/media/test_base.py index 66498c744d6d..4728c80969a8 100644 --- a/tests/media/test_base.py +++ b/tests/media/test_base.py @@ -20,12 +20,12 @@ class GetFileNameFromHeadersTests(unittest.TestCase): # input -> expected result TEST_CASES = { - b"inline; filename=abc.txt": "abc.txt", - b'inline; filename="azerty"': "azerty", - b'inline; filename="aze%20rty"': "aze%20rty", - b'inline; filename="aze"rty"': 'aze"rty', - b'inline; filename="azer;ty"': "azer;ty", - b"inline; filename*=utf-8''foo%C2%A3bar": "foo£bar", + b"attachment; filename=abc.txt": "abc.txt", + b'attachment; filename="azerty"': "azerty", + b'attachment; filename="aze%20rty"': "aze%20rty", + b'attachment; filename="aze"rty"': 'aze"rty', + b'attachment; filename="azer;ty"': "azer;ty", + b"attachment; filename*=utf-8''foo%C2%A3bar": "foo£bar", } def tests(self) -> None: diff --git a/tests/media/test_media_storage.py b/tests/media/test_media_storage.py index f0f2da65db0b..ea0051dde42f 100644 --- a/tests/media/test_media_storage.py +++ b/tests/media/test_media_storage.py @@ -317,7 +317,7 @@ def _req( def test_handle_missing_content_type(self) -> None: channel = self._req( - b"inline; filename=out" + self.test_image.extension, + b"attachment; filename=out" + self.test_image.extension, include_content_type=False, ) headers = channel.headers @@ -331,7 +331,7 @@ def test_disposition_filename_ascii(self) -> None: If the filename is filename= then Synapse will decode it as an ASCII string, and use filename= in the response. """ - channel = self._req(b"inline; filename=out" + self.test_image.extension) + channel = self._req(b"attachment; filename=out" + self.test_image.extension) headers = channel.headers self.assertEqual( @@ -339,7 +339,7 @@ def test_disposition_filename_ascii(self) -> None: ) self.assertEqual( headers.getRawHeaders(b"Content-Disposition"), - [b"inline; filename=out" + self.test_image.extension], + [b"attachment; filename=out" + self.test_image.extension], ) def test_disposition_filenamestar_utf8escaped(self) -> None: @@ -350,7 +350,7 @@ def test_disposition_filenamestar_utf8escaped(self) -> None: """ filename = parse.quote("\u2603".encode()).encode("ascii") channel = self._req( - b"inline; filename*=utf-8''" + filename + self.test_image.extension + b"attachment; filename*=utf-8''" + filename + self.test_image.extension ) headers = channel.headers @@ -359,13 +359,13 @@ def test_disposition_filenamestar_utf8escaped(self) -> None: ) self.assertEqual( headers.getRawHeaders(b"Content-Disposition"), - [b"inline; filename*=utf-8''" + filename + self.test_image.extension], + [b"attachment; filename*=utf-8''" + filename + self.test_image.extension], ) def test_disposition_none(self) -> None: """ - If there is no filename, one isn't passed on in the Content-Disposition - of the request. + If there is no filename, Content-Disposition should only + be a disposition type. """ channel = self._req(None) @@ -373,7 +373,7 @@ def test_disposition_none(self) -> None: self.assertEqual( headers.getRawHeaders(b"Content-Type"), [self.test_image.content_type] ) - self.assertEqual(headers.getRawHeaders(b"Content-Disposition"), None) + self.assertEqual(headers.getRawHeaders(b"Content-Disposition"), [b"attachment"]) def test_thumbnail_crop(self) -> None: """Test that a cropped remote thumbnail is available.""" @@ -612,7 +612,7 @@ def test_x_robots_tag_header(self) -> None: Tests that the `X-Robots-Tag` header is present, which informs web crawlers to not index, archive, or follow links in media. """ - channel = self._req(b"inline; filename=out" + self.test_image.extension) + channel = self._req(b"attachment; filename=out" + self.test_image.extension) headers = channel.headers self.assertEqual( @@ -625,7 +625,7 @@ def test_cross_origin_resource_policy_header(self) -> None: Test that the Cross-Origin-Resource-Policy header is set to "cross-origin" allowing web clients to embed media from the downloads API. """ - channel = self._req(b"inline; filename=out" + self.test_image.extension) + channel = self._req(b"attachment; filename=out" + self.test_image.extension) headers = channel.headers From f63d4a3a65e95d3845c43a9dd2893605b06f164a Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Fri, 16 Jun 2023 12:15:12 +0200 Subject: [PATCH 083/153] Regularly try to wake up dests instead of waiting for next PDU/EDU (#15743) --- changelog.d/15743.misc | 1 + synapse/federation/sender/__init__.py | 34 +++++++++----------- tests/federation/test_federation_catch_up.py | 22 ++++++------- 3 files changed, 26 insertions(+), 31 deletions(-) create mode 100644 changelog.d/15743.misc diff --git a/changelog.d/15743.misc b/changelog.d/15743.misc new file mode 100644 index 000000000000..b95eed929e4b --- /dev/null +++ b/changelog.d/15743.misc @@ -0,0 +1 @@ +Regularly try to send transactions to other servers after they failed instead of waiting for a new event to be available before trying. diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index f3bdc5a4d220..97abbdee18e5 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -109,10 +109,8 @@ If a remote server is unreachable over federation, we back off from that server, with an exponentially-increasing retry interval. -Whilst we don't automatically retry after the interval, we prevent making new attempts -until such time as the back-off has cleared. -Once the back-off is cleared and a new PDU or EDU arrives for transmission, the transmission -loop resumes and empties the queue by making federation requests. +We automatically retry after the retry interval expires (roughly, the logic to do so +being triggered every minute). If the backoff grows too large (> 1 hour), the in-memory queue is emptied (to prevent unbounded growth) and Catch-Up Mode is entered. @@ -145,7 +143,6 @@ from typing_extensions import Literal from twisted.internet import defer -from twisted.internet.interfaces import IDelayedCall import synapse.metrics from synapse.api.presence import UserPresenceState @@ -184,14 +181,18 @@ "Total number of PDUs queued for sending across all destinations", ) -# Time (in s) after Synapse's startup that we will begin to wake up destinations -# that have catch-up outstanding. -CATCH_UP_STARTUP_DELAY_SEC = 15 +# Time (in s) to wait before trying to wake up destinations that have +# catch-up outstanding. This will also be the delay applied at startup +# before trying the same. +# Please note that rate limiting still applies, so while the loop is +# executed every X seconds the destinations may not be wake up because +# they are being rate limited following previous attempt failures. +WAKEUP_RETRY_PERIOD_SEC = 60 # Time (in s) to wait in between waking up each destination, i.e. one destination -# will be woken up every seconds after Synapse's startup until we have woken -# every destination has outstanding catch-up. -CATCH_UP_STARTUP_INTERVAL_SEC = 5 +# will be woken up every seconds until we have woken every destination +# has outstanding catch-up. +WAKEUP_INTERVAL_BETWEEN_DESTINATIONS_SEC = 5 class AbstractFederationSender(metaclass=abc.ABCMeta): @@ -415,12 +416,10 @@ def __init__(self, hs: "HomeServer"): / hs.config.ratelimiting.federation_rr_transactions_per_room_per_second ) - # wake up destinations that have outstanding PDUs to be caught up - self._catchup_after_startup_timer: Optional[ - IDelayedCall - ] = self.clock.call_later( - CATCH_UP_STARTUP_DELAY_SEC, + # Regularly wake up destinations that have outstanding PDUs to be caught up + self.clock.looping_call( run_as_background_process, + WAKEUP_RETRY_PERIOD_SEC * 1000.0, "wake_destinations_needing_catchup", self._wake_destinations_needing_catchup, ) @@ -966,7 +965,6 @@ async def _wake_destinations_needing_catchup(self) -> None: if not destinations_to_wake: # finished waking all destinations! - self._catchup_after_startup_timer = None break last_processed = destinations_to_wake[-1] @@ -983,4 +981,4 @@ async def _wake_destinations_needing_catchup(self) -> None: last_processed, ) self.wake_destination(destination) - await self.clock.sleep(CATCH_UP_STARTUP_INTERVAL_SEC) + await self.clock.sleep(WAKEUP_INTERVAL_BETWEEN_DESTINATIONS_SEC) diff --git a/tests/federation/test_federation_catch_up.py b/tests/federation/test_federation_catch_up.py index 391ae5170729..b290b020a274 100644 --- a/tests/federation/test_federation_catch_up.py +++ b/tests/federation/test_federation_catch_up.py @@ -431,28 +431,24 @@ def test_catch_up_on_synapse_startup(self) -> None: # ACT: call _wake_destinations_needing_catchup # patch wake_destination to just count the destinations instead - woken = [] + woken = set() def wake_destination_track(destination: str) -> None: - woken.append(destination) + woken.add(destination) self.federation_sender.wake_destination = wake_destination_track # type: ignore[assignment] - # cancel the pre-existing timer for _wake_destinations_needing_catchup - # this is because we are calling it manually rather than waiting for it - # to be called automatically - assert self.federation_sender._catchup_after_startup_timer is not None - self.federation_sender._catchup_after_startup_timer.cancel() - - self.get_success( - self.federation_sender._wake_destinations_needing_catchup(), by=5.0 - ) + # We wait quite long so that all dests can be woken up, since there is a delay + # between them. + self.pump(by=5.0) # ASSERT (_wake_destinations_needing_catchup): # - all remotes are woken up, save for zzzerver self.assertNotIn("zzzerver", woken) - # - all destinations are woken exactly once; they appear once in woken. - self.assertCountEqual(woken, server_names[:-1]) + # - all destinations are woken, potentially more than once, since the + # wake up is called regularly and we don't ack in this test that a transaction + # has been successfully sent. + self.assertCountEqual(woken, set(server_names[:-1])) def test_not_latest_event(self) -> None: """Test that we send the latest event in the room even if its not ours.""" From 0618bf94cdc56631e670b4e93e4dfaeae2162e73 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Fri, 16 Jun 2023 14:17:02 +0200 Subject: [PATCH 084/153] push rules: fix internal conversion from _type to value (#15781) Also fix wrong rule names for `is_user_mention` and `is_room_mention`. --- changelog.d/15781.bugfix | 1 + rust/src/push/base_rules.rs | 4 +- synapse/push/clientformat.py | 26 ++++----- tests/rest/client/test_push_rule_attrs.py | 67 +++++++++++++++++++++++ 4 files changed, 81 insertions(+), 17 deletions(-) create mode 100644 changelog.d/15781.bugfix diff --git a/changelog.d/15781.bugfix b/changelog.d/15781.bugfix new file mode 100644 index 000000000000..5faf59afee0c --- /dev/null +++ b/changelog.d/15781.bugfix @@ -0,0 +1 @@ +Fix a bug in push rules handling leading to an invalid (per spec) `is_user_mention` rule sent to clients. Also fix wrong rule names for `is_user_mention` and `is_room_mention`. \ No newline at end of file diff --git a/rust/src/push/base_rules.rs b/rust/src/push/base_rules.rs index 9d6c304d9285..7eea9313f038 100644 --- a/rust/src/push/base_rules.rs +++ b/rust/src/push/base_rules.rs @@ -142,7 +142,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[ default_enabled: true, }, PushRule { - rule_id: Cow::Borrowed("global/override/.m.is_user_mention"), + rule_id: Cow::Borrowed("global/override/.m.rule.is_user_mention"), priority_class: 5, conditions: Cow::Borrowed(&[Condition::Known( KnownCondition::ExactEventPropertyContainsType(EventPropertyIsTypeCondition { @@ -163,7 +163,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[ default_enabled: true, }, PushRule { - rule_id: Cow::Borrowed("global/override/.m.is_room_mention"), + rule_id: Cow::Borrowed("global/override/.m.rule.is_room_mention"), priority_class: 5, conditions: Cow::Borrowed(&[ Condition::Known(KnownCondition::EventPropertyIs(EventPropertyIsCondition { diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py index 88b52c26a008..735cef0aed76 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py @@ -41,12 +41,7 @@ def format_push_rules_for_user( rulearray.append(template_rule) - for type_key in ("pattern", "value"): - type_value = template_rule.pop(f"{type_key}_type", None) - if type_value == "user_id": - template_rule[type_key] = user.to_string() - elif type_value == "user_localpart": - template_rule[type_key] = user.localpart + _convert_type_to_value(template_rule, user) template_rule["enabled"] = enabled @@ -63,19 +58,20 @@ def format_push_rules_for_user( for c in template_rule["conditions"]: c.pop("_cache_key", None) - pattern_type = c.pop("pattern_type", None) - if pattern_type == "user_id": - c["pattern"] = user.to_string() - elif pattern_type == "user_localpart": - c["pattern"] = user.localpart - - sender_type = c.pop("sender_type", None) - if sender_type == "user_id": - c["sender"] = user.to_string() + _convert_type_to_value(c, user) return rules +def _convert_type_to_value(rule_or_cond: Dict[str, Any], user: UserID) -> None: + for type_key in ("pattern", "value"): + type_value = rule_or_cond.pop(f"{type_key}_type", None) + if type_value == "user_id": + rule_or_cond[type_key] = user.to_string() + elif type_value == "user_localpart": + rule_or_cond[type_key] = user.localpart + + def _add_empty_priority_class_arrays(d: Dict[str, list]) -> Dict[str, list]: for pc in PRIORITY_CLASS_MAP.keys(): d[pc] = [] diff --git a/tests/rest/client/test_push_rule_attrs.py b/tests/rest/client/test_push_rule_attrs.py index 4f875b92898e..5aca74475f6f 100644 --- a/tests/rest/client/test_push_rule_attrs.py +++ b/tests/rest/client/test_push_rule_attrs.py @@ -412,3 +412,70 @@ def test_actions_404_when_put_non_existent_server_rule(self) -> None: ) self.assertEqual(channel.code, 404) self.assertEqual(channel.json_body["errcode"], Codes.NOT_FOUND) + + def test_contains_user_name(self) -> None: + """ + Tests that `contains_user_name` rule is present and have proper value in `pattern`. + """ + username = "bob" + self.register_user(username, "pass") + token = self.login(username, "pass") + + channel = self.make_request( + "GET", + "/pushrules/global/content/.m.rule.contains_user_name", + access_token=token, + ) + + self.assertEqual(channel.code, 200) + + self.assertEqual( + { + "rule_id": ".m.rule.contains_user_name", + "default": True, + "enabled": True, + "pattern": username, + "actions": [ + "notify", + {"set_tweak": "highlight"}, + {"set_tweak": "sound", "value": "default"}, + ], + }, + channel.json_body, + ) + + def test_is_user_mention(self) -> None: + """ + Tests that `is_user_mention` rule is present and have proper value in `value`. + """ + user = self.register_user("bob", "pass") + token = self.login("bob", "pass") + + channel = self.make_request( + "GET", + "/pushrules/global/override/.m.rule.is_user_mention", + access_token=token, + ) + + self.assertEqual(channel.code, 200) + + self.assertEqual( + { + "rule_id": ".m.rule.is_user_mention", + "default": True, + "enabled": True, + "conditions": [ + { + "kind": "event_property_contains", + "key": "content.m\\.mentions.user_ids", + "value": user, + } + ], + "actions": [ + "notify", + {"set_tweak": "highlight"}, + {"set_tweak": "sound", "value": "default"}, + ], + }, + channel.json_body, + ) From 2ac6c3bbb535677bd62b3df425dd1755dba79b66 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 16 Jun 2023 15:25:44 +0100 Subject: [PATCH 085/153] Don't always lock "user_ips" table when performing non-native upsert (#15788) --- changelog.d/15788.bugfix | 1 + synapse/storage/database.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15788.bugfix diff --git a/changelog.d/15788.bugfix b/changelog.d/15788.bugfix new file mode 100644 index 000000000000..d22aae7baf89 --- /dev/null +++ b/changelog.d/15788.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in 1.57.0 where the wrong table would be locked on updating database rows when using SQLite as the database backend. \ No newline at end of file diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 10fa6c4802ee..7e49ae11bc0e 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -1529,7 +1529,7 @@ def simple_upsert_many_txn_emulated( # Lock the table just once, to prevent it being done once per row. # Note that, according to Postgres' documentation, once obtained, # the lock is held for the remainder of the current transaction. - self.engine.lock_table(txn, "user_ips") + self.engine.lock_table(txn, table) for keyv, valv in zip(key_values, value_values): _keys = dict(zip(key_names, keyv)) From 0f02f0b4da92229e88e27a92ea3bfa523457bfc1 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 16 Jun 2023 14:12:24 -0500 Subject: [PATCH 086/153] Remove experimental MSC2716 implementation to incrementally import history into existing rooms (#15748) Context for why we're removing the implementation: - https://github.com/matrix-org/matrix-spec-proposals/pull/2716#issuecomment-1487441010 - https://github.com/matrix-org/matrix-spec-proposals/pull/2716#issuecomment-1504262734 Anyone wanting to continue MSC2716, should also address these leftover tasks: https://github.com/matrix-org/synapse/issues/10737 Closes https://github.com/matrix-org/synapse/issues/10737 in the fact that it is not longer necessary to track those things. --- changelog.d/15748.removal | 1 + .../conf/workers-shared-extra.yaml.j2 | 2 - docker/configure_workers_and_start.py | 1 - docs/workers.md | 1 - scripts-dev/complement.sh | 4 - synapse/api/constants.py | 14 - synapse/api/room_versions.py | 61 --- synapse/app/generic_worker.py | 2 - synapse/config/experimental.py | 3 - synapse/event_auth.py | 40 -- synapse/events/__init__.py | 9 - synapse/events/utils.py | 9 - synapse/handlers/federation.py | 33 +- synapse/handlers/federation_event.py | 109 ---- synapse/handlers/message.py | 168 +------ synapse/handlers/room_batch.py | 466 ------------------ synapse/handlers/room_member.py | 54 +- synapse/push/bulk_push_rule_evaluator.py | 1 - synapse/rest/__init__.py | 2 - synapse/rest/client/room_batch.py | 254 ---------- synapse/rest/client/versions.py | 2 - synapse/server.py | 5 - synapse/storage/databases/main/__init__.py | 2 - .../databases/main/event_federation.py | 211 +------- synapse/storage/databases/main/events.py | 125 ----- synapse/storage/databases/main/room_batch.py | 47 -- tests/rest/client/test_room_batch.py | 302 ------------ tests/storage/test_event_federation.py | 211 -------- 28 files changed, 36 insertions(+), 2103 deletions(-) create mode 100644 changelog.d/15748.removal delete mode 100644 synapse/handlers/room_batch.py delete mode 100644 synapse/rest/client/room_batch.py delete mode 100644 synapse/storage/databases/main/room_batch.py delete mode 100644 tests/rest/client/test_room_batch.py diff --git a/changelog.d/15748.removal b/changelog.d/15748.removal new file mode 100644 index 000000000000..dcb978017839 --- /dev/null +++ b/changelog.d/15748.removal @@ -0,0 +1 @@ +Remove experimental [MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716) implementation to incrementally import history into existing rooms. diff --git a/docker/complement/conf/workers-shared-extra.yaml.j2 b/docker/complement/conf/workers-shared-extra.yaml.j2 index 63acf86a4619..2b11b487f639 100644 --- a/docker/complement/conf/workers-shared-extra.yaml.j2 +++ b/docker/complement/conf/workers-shared-extra.yaml.j2 @@ -92,8 +92,6 @@ allow_device_name_lookup_over_federation: true ## Experimental Features ## experimental_features: - # Enable history backfilling support - msc2716_enabled: true # client-side support for partial state in /send_join responses faster_joins: true # Enable support for polls diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 87a740e3d433..62fb88daabb5 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -244,7 +244,6 @@ "^/_matrix/client/(api/v1|r0|v3|unstable)/join/", "^/_matrix/client/(api/v1|r0|v3|unstable)/knock/", "^/_matrix/client/(api/v1|r0|v3|unstable)/profile/", - "^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send", ], "shared_extra_conf": {}, "worker_extra_conf": "", diff --git a/docs/workers.md b/docs/workers.md index 991814c0bc81..735128762a47 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -232,7 +232,6 @@ information. ^/_matrix/client/v1/rooms/.*/hierarchy$ ^/_matrix/client/(v1|unstable)/rooms/.*/relations/ ^/_matrix/client/v1/rooms/.*/threads$ - ^/_matrix/client/unstable/org.matrix.msc2716/rooms/.*/batch_send$ ^/_matrix/client/unstable/im.nheko.summary/rooms/.*/summary$ ^/_matrix/client/(r0|v3|unstable)/account/3pid$ ^/_matrix/client/(r0|v3|unstable)/account/whoami$ diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index 131f26234ece..24b83cfeb605 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -246,10 +246,6 @@ else else export PASS_SYNAPSE_COMPLEMENT_DATABASE=sqlite fi - - # The tests for importing historical messages (MSC2716) - # only pass with monoliths, currently. - test_tags="$test_tags,msc2716" fi if [[ -n "$ASYNCIO_REACTOR" ]]; then diff --git a/synapse/api/constants.py b/synapse/api/constants.py index faf0770c663a..dc32553d0cca 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -123,10 +123,6 @@ class EventTypes: SpaceChild: Final = "m.space.child" SpaceParent: Final = "m.space.parent" - MSC2716_INSERTION: Final = "org.matrix.msc2716.insertion" - MSC2716_BATCH: Final = "org.matrix.msc2716.batch" - MSC2716_MARKER: Final = "org.matrix.msc2716.marker" - Reaction: Final = "m.reaction" @@ -222,16 +218,6 @@ class EventContentFields: # Used in m.room.guest_access events. GUEST_ACCESS: Final = "guest_access" - # Used on normal messages to indicate they were historically imported after the fact - MSC2716_HISTORICAL: Final = "org.matrix.msc2716.historical" - # For "insertion" events to indicate what the next batch ID should be in - # order to connect to it - MSC2716_NEXT_BATCH_ID: Final = "next_batch_id" - # Used on "batch" events to indicate which insertion event it connects to - MSC2716_BATCH_ID: Final = "batch_id" - # For "marker" events - MSC2716_INSERTION_EVENT_REFERENCE: Final = "insertion_event_reference" - # The authorising user for joining a restricted room. AUTHORISING_USER: Final = "join_authorised_via_users_server" diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index c5c71e242fea..25c105a4c80e 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -91,11 +91,6 @@ class RoomVersion: # MSC2403: Allows join_rules to be set to 'knock', changes auth rules to allow sending # m.room.membership event with membership 'knock'. msc2403_knocking: bool - # MSC2716: Adds m.room.power_levels -> content.historical field to control - # whether "insertion", "chunk", "marker" events can be sent - msc2716_historical: bool - # MSC2716: Adds support for redacting "insertion", "chunk", and "marker" events - msc2716_redactions: bool # MSC3389: Protect relation information from redaction. msc3389_relation_redactions: bool # MSC3787: Adds support for a `knock_restricted` join rule, mixing concepts of @@ -130,8 +125,6 @@ class RoomVersions: msc3083_join_rules=False, msc3375_redaction_rules=False, msc2403_knocking=False, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -153,8 +146,6 @@ class RoomVersions: msc3083_join_rules=False, msc3375_redaction_rules=False, msc2403_knocking=False, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -176,8 +167,6 @@ class RoomVersions: msc3083_join_rules=False, msc3375_redaction_rules=False, msc2403_knocking=False, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -199,8 +188,6 @@ class RoomVersions: msc3083_join_rules=False, msc3375_redaction_rules=False, msc2403_knocking=False, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -222,8 +209,6 @@ class RoomVersions: msc3083_join_rules=False, msc3375_redaction_rules=False, msc2403_knocking=False, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -245,8 +230,6 @@ class RoomVersions: msc3083_join_rules=False, msc3375_redaction_rules=False, msc2403_knocking=False, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -268,8 +251,6 @@ class RoomVersions: msc3083_join_rules=False, msc3375_redaction_rules=False, msc2403_knocking=False, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -291,8 +272,6 @@ class RoomVersions: msc3083_join_rules=False, msc3375_redaction_rules=False, msc2403_knocking=True, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -314,8 +293,6 @@ class RoomVersions: msc3083_join_rules=True, msc3375_redaction_rules=False, msc2403_knocking=True, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -337,8 +314,6 @@ class RoomVersions: msc3083_join_rules=True, msc3375_redaction_rules=True, msc2403_knocking=True, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -360,8 +335,6 @@ class RoomVersions: msc3083_join_rules=True, msc3375_redaction_rules=True, msc2403_knocking=True, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=True, msc3667_int_only_power_levels=False, @@ -383,8 +356,6 @@ class RoomVersions: msc3083_join_rules=True, msc3375_redaction_rules=True, msc2403_knocking=True, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, @@ -406,8 +377,6 @@ class RoomVersions: msc3083_join_rules=True, msc3375_redaction_rules=True, msc2403_knocking=True, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=True, msc3667_int_only_power_levels=True, @@ -415,29 +384,6 @@ class RoomVersions: msc3931_push_features=(), msc3989_redaction_rules=False, ) - MSC2716v4 = RoomVersion( - "org.matrix.msc2716v4", - RoomDisposition.UNSTABLE, - EventFormatVersions.ROOM_V4_PLUS, - StateResolutionVersions.V2, - enforce_key_validity=True, - special_case_aliases_auth=False, - strict_canonicaljson=True, - limit_notifications_power_levels=True, - msc2175_implicit_room_creator=False, - msc2176_redaction_rules=False, - msc3083_join_rules=False, - msc3375_redaction_rules=False, - msc2403_knocking=True, - msc2716_historical=True, - msc2716_redactions=True, - msc3389_relation_redactions=False, - msc3787_knock_restricted_join_rule=False, - msc3667_int_only_power_levels=False, - msc3821_redaction_rules=False, - msc3931_push_features=(), - msc3989_redaction_rules=False, - ) MSC1767v10 = RoomVersion( # MSC1767 (Extensible Events) based on room version "10" "org.matrix.msc1767.10", @@ -453,8 +399,6 @@ class RoomVersions: msc3083_join_rules=True, msc3375_redaction_rules=True, msc2403_knocking=True, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=True, msc3667_int_only_power_levels=True, @@ -476,8 +420,6 @@ class RoomVersions: msc3083_join_rules=True, msc3375_redaction_rules=True, msc2403_knocking=True, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=True, msc3667_int_only_power_levels=True, @@ -500,8 +442,6 @@ class RoomVersions: msc3083_join_rules=True, msc3375_redaction_rules=True, msc2403_knocking=True, - msc2716_historical=False, - msc2716_redactions=False, msc3389_relation_redactions=False, msc3787_knock_restricted_join_rule=True, msc3667_int_only_power_levels=True, @@ -526,7 +466,6 @@ class RoomVersions: RoomVersions.V9, RoomVersions.MSC3787, RoomVersions.V10, - RoomVersions.MSC2716v4, RoomVersions.MSC3989, RoomVersions.MSC3820opt2, ) diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 909ebccf78cb..7406c3948cb1 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -83,7 +83,6 @@ from synapse.storage.databases.main.registration import RegistrationWorkerStore from synapse.storage.databases.main.relations import RelationsWorkerStore from synapse.storage.databases.main.room import RoomWorkerStore -from synapse.storage.databases.main.room_batch import RoomBatchStore from synapse.storage.databases.main.roommember import RoomMemberWorkerStore from synapse.storage.databases.main.search import SearchStore from synapse.storage.databases.main.session import SessionStore @@ -120,7 +119,6 @@ class GenericWorkerStore( # the races it creates aren't too bad. KeyStore, RoomWorkerStore, - RoomBatchStore, DirectoryWorkerStore, PushRulesWorkerStore, ApplicationServiceTransactionWorkerStore, diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 1d5b5ded455d..8e0f5356b42d 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -247,9 +247,6 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # MSC3026 (busy presence state) self.msc3026_enabled: bool = experimental.get("msc3026_enabled", False) - # MSC2716 (importing historical messages) - self.msc2716_enabled: bool = experimental.get("msc2716_enabled", False) - # MSC3244 (room version capabilities) self.msc3244_enabled: bool = experimental.get("msc3244_enabled", True) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index b4b43ec4d720..3aaf53dfbdd6 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -339,13 +339,6 @@ def check_state_dependent_auth_rules( if event.type == EventTypes.Redaction: check_redaction(event.room_version, event, auth_dict) - if ( - event.type == EventTypes.MSC2716_INSERTION - or event.type == EventTypes.MSC2716_BATCH - or event.type == EventTypes.MSC2716_MARKER - ): - check_historical(event.room_version, event, auth_dict) - logger.debug("Allowing! %s", event) @@ -365,7 +358,6 @@ def check_state_dependent_auth_rules( RoomVersions.V9, RoomVersions.MSC3787, RoomVersions.V10, - RoomVersions.MSC2716v4, RoomVersions.MSC1767v10, } @@ -823,38 +815,6 @@ def check_redaction( raise AuthError(403, "You don't have permission to redact events") -def check_historical( - room_version_obj: RoomVersion, - event: "EventBase", - auth_events: StateMap["EventBase"], -) -> None: - """Check whether the event sender is allowed to send historical related - events like "insertion", "batch", and "marker". - - Returns: - None - - Raises: - AuthError if the event sender is not allowed to send historical related events - ("insertion", "batch", and "marker"). - """ - # Ignore the auth checks in room versions that do not support historical - # events - if not room_version_obj.msc2716_historical: - return - - user_level = get_user_power_level(event.user_id, auth_events) - - historical_level = get_named_level(auth_events, "historical", 100) - - if user_level < historical_level: - raise UnstableSpecAuthError( - 403, - 'You don\'t have permission to send send historical related events ("insertion", "batch", and "marker")', - errcode=Codes.INSUFFICIENT_POWER, - ) - - def _check_power_levels( room_version_obj: RoomVersion, event: "EventBase", diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index de7e5be42bec..75b62adb33b3 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -198,7 +198,6 @@ def __init__(self, internal_metadata_dict: JsonDict): soft_failed: DictProperty[bool] = DictProperty("soft_failed") proactively_send: DictProperty[bool] = DictProperty("proactively_send") redacted: DictProperty[bool] = DictProperty("redacted") - historical: DictProperty[bool] = DictProperty("historical") txn_id: DictProperty[str] = DictProperty("txn_id") """The transaction ID, if it was set when the event was created.""" @@ -288,14 +287,6 @@ def is_redacted(self) -> bool: """ return self._dict.get("redacted", False) - def is_historical(self) -> bool: - """Whether this is a historical message. - This is used by the batchsend historical message endpoint and - is needed to and mark the event as backfilled and skip some checks - like push notifications. - """ - return self._dict.get("historical", False) - def is_notifiable(self) -> bool: """Whether this event can trigger a push notification""" return not self.is_outlier() or self.is_out_of_band_membership() diff --git a/synapse/events/utils.py b/synapse/events/utils.py index e7b7b78b8454..a55efcca56d4 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -164,21 +164,12 @@ def add_fields(*fields: str) -> None: if room_version.msc2176_redaction_rules: add_fields("invite") - if room_version.msc2716_historical: - add_fields("historical") - elif event_type == EventTypes.Aliases and room_version.special_case_aliases_auth: add_fields("aliases") elif event_type == EventTypes.RoomHistoryVisibility: add_fields("history_visibility") elif event_type == EventTypes.Redaction and room_version.msc2176_redaction_rules: add_fields("redacts") - elif room_version.msc2716_redactions and event_type == EventTypes.MSC2716_INSERTION: - add_fields(EventContentFields.MSC2716_NEXT_BATCH_ID) - elif room_version.msc2716_redactions and event_type == EventTypes.MSC2716_BATCH: - add_fields(EventContentFields.MSC2716_BATCH_ID) - elif room_version.msc2716_redactions and event_type == EventTypes.MSC2716_MARKER: - add_fields(EventContentFields.MSC2716_INSERTION_EVENT_REFERENCE) # Protect the rel_type and event_id fields under the m.relates_to field. if room_version.msc3389_relation_redactions: diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index b7b5e2102036..cc5ed977300c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -105,14 +105,12 @@ ) +# TODO: We can refactor this away now that there is only one backfill point again class _BackfillPointType(Enum): # a regular backwards extremity (ie, an event which we don't yet have, but which # is referred to by other events in the DAG) BACKWARDS_EXTREMITY = enum.auto() - # an MSC2716 "insertion event" - INSERTION_PONT = enum.auto() - @attr.s(slots=True, auto_attribs=True, frozen=True) class _BackfillPoint: @@ -273,32 +271,10 @@ async def _maybe_backfill_inner( ) ] - insertion_events_to_be_backfilled: List[_BackfillPoint] = [] - if self.hs.config.experimental.msc2716_enabled: - insertion_events_to_be_backfilled = [ - _BackfillPoint(event_id, depth, _BackfillPointType.INSERTION_PONT) - for event_id, depth in await self.store.get_insertion_event_backward_extremities_in_room( - room_id=room_id, - current_depth=current_depth, - # We only need to end up with 5 extremities combined with - # the backfill points to make the `/backfill` request ... - # (see the other comment above for more context). - limit=50, - ) - ] - logger.debug( - "_maybe_backfill_inner: backwards_extremities=%s insertion_events_to_be_backfilled=%s", - backwards_extremities, - insertion_events_to_be_backfilled, - ) - # we now have a list of potential places to backpaginate from. We prefer to # start with the most recent (ie, max depth), so let's sort the list. sorted_backfill_points: List[_BackfillPoint] = sorted( - itertools.chain( - backwards_extremities, - insertion_events_to_be_backfilled, - ), + backwards_extremities, key=lambda e: -int(e.depth), ) @@ -411,10 +387,7 @@ async def _maybe_backfill_inner( # event but not anything before it. This would require looking at the # state *before* the event, ignoring the special casing certain event # types have. - if bp.type == _BackfillPointType.INSERTION_PONT: - event_ids_to_check = [bp.event_id] - else: - event_ids_to_check = await self.store.get_successor_events(bp.event_id) + event_ids_to_check = await self.store.get_successor_events(bp.event_id) events_to_check = await self.store.get_events_as_list( event_ids_to_check, diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 42141d367057..d32d224d5640 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -601,18 +601,6 @@ async def process_remote_join( room_id, [(event, context)] ) - # If we're joining the room again, check if there is new marker - # state indicating that there is new history imported somewhere in - # the DAG. Multiple markers can exist in the current state with - # unique state_keys. - # - # Do this after the state from the remote join was persisted (via - # `persist_events_and_notify`). Otherwise we can run into a - # situation where the create event doesn't exist yet in the - # `current_state_events` - for e in state: - await self._handle_marker_event(origin, e) - return stream_id_after_persist async def update_state_for_partial_state_event( @@ -915,13 +903,6 @@ async def _process_new_pulled_events(new_events: Collection[EventBase]) -> None: ) ) - # We construct the event lists in source order from `/backfill` response because - # it's a) easiest, but also b) the order in which we process things matters for - # MSC2716 historical batches because many historical events are all at the same - # `depth` and we rely on the tenuous sort that the other server gave us and hope - # they're doing their best. The brittle nature of this ordering for historical - # messages over federation is one of the reasons why we don't want to continue - # on MSC2716 until we have online topological ordering. events_with_failed_pull_attempts, fresh_events = partition( new_events, lambda e: e.event_id in event_ids_with_failed_pull_attempts ) @@ -1460,8 +1441,6 @@ async def _process_received_pdu( await self._run_push_actions_and_persist_event(event, context, backfilled) - await self._handle_marker_event(origin, event) - if backfilled or context.rejected: return @@ -1559,94 +1538,6 @@ async def _resync_device(self, sender: str) -> None: except Exception: logger.exception("Failed to resync device for %s", sender) - @trace - async def _handle_marker_event(self, origin: str, marker_event: EventBase) -> None: - """Handles backfilling the insertion event when we receive a marker - event that points to one. - - Args: - origin: Origin of the event. Will be called to get the insertion event - marker_event: The event to process - """ - - if marker_event.type != EventTypes.MSC2716_MARKER: - # Not a marker event - return - - if marker_event.rejected_reason is not None: - # Rejected event - return - - # Skip processing a marker event if the room version doesn't - # support it or the event is not from the room creator. - room_version = await self._store.get_room_version(marker_event.room_id) - create_event = await self._store.get_create_event_for_room(marker_event.room_id) - if not room_version.msc2175_implicit_room_creator: - room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) - else: - room_creator = create_event.sender - if not room_version.msc2716_historical and ( - not self._config.experimental.msc2716_enabled - or marker_event.sender != room_creator - ): - return - - logger.debug("_handle_marker_event: received %s", marker_event) - - insertion_event_id = marker_event.content.get( - EventContentFields.MSC2716_INSERTION_EVENT_REFERENCE - ) - - if insertion_event_id is None: - # Nothing to retrieve then (invalid marker) - return - - already_seen_insertion_event = await self._store.have_seen_event( - marker_event.room_id, insertion_event_id - ) - if already_seen_insertion_event: - # No need to process a marker again if we have already seen the - # insertion event that it was pointing to - return - - logger.debug( - "_handle_marker_event: backfilling insertion event %s", insertion_event_id - ) - - await self._get_events_and_persist( - origin, - marker_event.room_id, - [insertion_event_id], - ) - - insertion_event = await self._store.get_event( - insertion_event_id, allow_none=True - ) - if insertion_event is None: - logger.warning( - "_handle_marker_event: server %s didn't return insertion event %s for marker %s", - origin, - insertion_event_id, - marker_event.event_id, - ) - return - - logger.debug( - "_handle_marker_event: succesfully backfilled insertion event %s from marker event %s", - insertion_event, - marker_event, - ) - - await self._store.insert_insertion_extremity( - insertion_event_id, marker_event.room_id - ) - - logger.debug( - "_handle_marker_event: insertion extremity added for %s from marker event %s", - insertion_event, - marker_event, - ) - async def backfill_event_id( self, destinations: List[str], room_id: str, event_id: str ) -> PulledPduInfo: diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 0b61c2272b90..4292b4703762 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -60,7 +60,6 @@ from synapse.replication.http.send_events import ReplicationSendEventsRestServlet from synapse.storage.databases.main.events_worker import EventRedactBehaviour from synapse.types import ( - MutableStateMap, PersistedEventPosition, Requester, RoomAlias, @@ -573,7 +572,6 @@ async def create_event( state_event_ids: Optional[List[str]] = None, require_consent: bool = True, outlier: bool = False, - historical: bool = False, depth: Optional[int] = None, state_map: Optional[StateMap[str]] = None, for_batch: bool = False, @@ -599,7 +597,7 @@ async def create_event( allow_no_prev_events: Whether to allow this event to be created an empty list of prev_events. Normally this is prohibited just because most events should have a prev_event and we should only use this in special - cases like MSC2716. + cases (previously useful for MSC2716). prev_event_ids: the forward extremities to use as the prev_events for the new event. @@ -614,13 +612,10 @@ async def create_event( If non-None, prev_event_ids must also be provided. state_event_ids: - The full state at a given event. This is used particularly by the MSC2716 - /batch_send endpoint. One use case is with insertion events which float at - the beginning of a historical batch and don't have any `prev_events` to - derive from; we add all of these state events as the explicit state so the - rest of the historical batch can inherit the same state and state_group. - This should normally be left as None, which will cause the auth_event_ids - to be calculated based on the room state at the prev_events. + The full state at a given event. This was previously used particularly + by the MSC2716 /batch_send endpoint. This should normally be left as + None, which will cause the auth_event_ids to be calculated based on the + room state at the prev_events. require_consent: Whether to check if the requester has consented to the privacy policy. @@ -629,10 +624,6 @@ async def create_event( it's from an arbitrary point and floating in the DAG as opposed to being inline with the current DAG. - historical: Indicates whether the message is being inserted - back in time around some existing events. This is used to skip - a few checks and mark the event as backfilled. - depth: Override the depth used to order the event in the DAG. Should normally be set to None, which will cause the depth to be calculated based on the prev_events. @@ -717,8 +708,6 @@ async def create_event( builder.internal_metadata.outlier = outlier - builder.internal_metadata.historical = historical - event, unpersisted_context = await self.create_new_client_event( builder=builder, requester=requester, @@ -947,7 +936,6 @@ async def create_and_send_nonmember_event( txn_id: Optional[str] = None, ignore_shadow_ban: bool = False, outlier: bool = False, - historical: bool = False, depth: Optional[int] = None, ) -> Tuple[EventBase, int]: """ @@ -961,19 +949,16 @@ async def create_and_send_nonmember_event( allow_no_prev_events: Whether to allow this event to be created an empty list of prev_events. Normally this is prohibited just because most events should have a prev_event and we should only use this in special - cases like MSC2716. + cases (previously useful for MSC2716). prev_event_ids: The event IDs to use as the prev events. Should normally be left as None to automatically request them from the database. state_event_ids: - The full state at a given event. This is used particularly by the MSC2716 - /batch_send endpoint. One use case is with insertion events which float at - the beginning of a historical batch and don't have any `prev_events` to - derive from; we add all of these state events as the explicit state so the - rest of the historical batch can inherit the same state and state_group. - This should normally be left as None, which will cause the auth_event_ids - to be calculated based on the room state at the prev_events. + The full state at a given event. This was previously used particularly + by the MSC2716 /batch_send endpoint. This should normally be left as + None, which will cause the auth_event_ids to be calculated based on the + room state at the prev_events. ratelimit: Whether to rate limit this send. txn_id: The transaction ID. ignore_shadow_ban: True if shadow-banned users should be allowed to @@ -981,9 +966,6 @@ async def create_and_send_nonmember_event( outlier: Indicates whether the event is an `outlier`, i.e. if it's from an arbitrary point and floating in the DAG as opposed to being inline with the current DAG. - historical: Indicates whether the message is being inserted - back in time around some existing events. This is used to skip - a few checks and mark the event as backfilled. depth: Override the depth used to order the event in the DAG. Should normally be set to None, which will cause the depth to be calculated based on the prev_events. @@ -1053,7 +1035,6 @@ async def create_and_send_nonmember_event( prev_event_ids=prev_event_ids, state_event_ids=state_event_ids, outlier=outlier, - historical=historical, depth=depth, ) context = await unpersisted_context.persist(event) @@ -1145,7 +1126,7 @@ async def create_new_client_event( allow_no_prev_events: Whether to allow this event to be created an empty list of prev_events. Normally this is prohibited just because most events should have a prev_event and we should only use this in special - cases like MSC2716. + cases (previously useful for MSC2716). prev_event_ids: the forward extremities to use as the prev_events for the new event. @@ -1158,13 +1139,10 @@ async def create_new_client_event( based on the room state at the prev_events. state_event_ids: - The full state at a given event. This is used particularly by the MSC2716 - /batch_send endpoint. One use case is with insertion events which float at - the beginning of a historical batch and don't have any `prev_events` to - derive from; we add all of these state events as the explicit state so the - rest of the historical batch can inherit the same state and state_group. - This should normally be left as None, which will cause the auth_event_ids - to be calculated based on the room state at the prev_events. + The full state at a given event. This was previously used particularly + by the MSC2716 /batch_send endpoint. This should normally be left as + None, which will cause the auth_event_ids to be calculated based on the + room state at the prev_events. depth: Override the depth used to order the event in the DAG. Should normally be set to None, which will cause the depth to be calculated @@ -1261,52 +1239,6 @@ async def create_new_client_event( if builder.internal_metadata.outlier: event.internal_metadata.outlier = True context = EventContext.for_outlier(self._storage_controllers) - elif ( - event.type == EventTypes.MSC2716_INSERTION - and state_event_ids - and builder.internal_metadata.is_historical() - ): - # Add explicit state to the insertion event so it has state to derive - # from even though it's floating with no `prev_events`. The rest of - # the batch can derive from this state and state_group. - # - # TODO(faster_joins): figure out how this works, and make sure that the - # old state is complete. - # https://github.com/matrix-org/synapse/issues/13003 - metadata = await self.store.get_metadata_for_events(state_event_ids) - - state_map_for_event: MutableStateMap[str] = {} - for state_id in state_event_ids: - data = metadata.get(state_id) - if data is None: - # We're trying to persist a new historical batch of events - # with the given state, e.g. via - # `RoomBatchSendEventRestServlet`. The state can be inferred - # by Synapse or set directly by the client. - # - # Either way, we should have persisted all the state before - # getting here. - raise Exception( - f"State event {state_id} not found in DB," - " Synapse should have persisted it before using it." - ) - - if data.state_key is None: - raise Exception( - f"Trying to set non-state event {state_id} as state" - ) - - state_map_for_event[(data.event_type, data.state_key)] = state_id - - # TODO(faster_joins): check how MSC2716 works and whether we can have - # partial state here - # https://github.com/matrix-org/synapse/issues/13003 - context = await self.state.calculate_context_info( - event, - state_ids_before_event=state_map_for_event, - partial_state=False, - ) - else: context = await self.state.calculate_context_info(event) @@ -1876,28 +1808,6 @@ async def persist_and_notify_client_events( 403, "Redacting server ACL events is not permitted" ) - # Add a little safety stop-gap to prevent people from trying to - # redact MSC2716 related events when they're in a room version - # which does not support it yet. We allow people to use MSC2716 - # events in existing room versions but only from the room - # creator since it does not require any changes to the auth - # rules and in effect, the redaction algorithm . In the - # supported room version, we add the `historical` power level to - # auth the MSC2716 related events and adjust the redaction - # algorthim to keep the `historical` field around (redacting an - # event should only strip fields which don't affect the - # structural protocol level). - is_msc2716_event = ( - original_event.type == EventTypes.MSC2716_INSERTION - or original_event.type == EventTypes.MSC2716_BATCH - or original_event.type == EventTypes.MSC2716_MARKER - ) - if not room_version_obj.msc2716_historical and is_msc2716_event: - raise AuthError( - 403, - "Redacting MSC2716 events is not supported in this room version", - ) - event_types = event_auth.auth_types_for_event(event.room_version, event) prev_state_ids = await context.get_prev_state_ids( StateFilter.from_types(event_types) @@ -1935,58 +1845,12 @@ async def persist_and_notify_client_events( if prev_state_ids: raise AuthError(403, "Changing the room create event is forbidden") - if event.type == EventTypes.MSC2716_INSERTION: - room_version = await self.store.get_room_version_id(event.room_id) - room_version_obj = KNOWN_ROOM_VERSIONS[room_version] - - create_event = await self.store.get_create_event_for_room(event.room_id) - if not room_version_obj.msc2175_implicit_room_creator: - room_creator = create_event.content.get( - EventContentFields.ROOM_CREATOR - ) - else: - room_creator = create_event.sender - - # Only check an insertion event if the room version - # supports it or the event is from the room creator. - if room_version_obj.msc2716_historical or ( - self.config.experimental.msc2716_enabled - and event.sender == room_creator - ): - next_batch_id = event.content.get( - EventContentFields.MSC2716_NEXT_BATCH_ID - ) - conflicting_insertion_event_id = None - if next_batch_id: - conflicting_insertion_event_id = ( - await self.store.get_insertion_event_id_by_batch_id( - event.room_id, next_batch_id - ) - ) - if conflicting_insertion_event_id is not None: - # The current insertion event that we're processing is invalid - # because an insertion event already exists in the room with the - # same next_batch_id. We can't allow multiple because the batch - # pointing will get weird, e.g. we can't determine which insertion - # event the batch event is pointing to. - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Another insertion event already exists with the same next_batch_id", - errcode=Codes.INVALID_PARAM, - ) - - # Mark any `m.historical` messages as backfilled so they don't appear - # in `/sync` and have the proper decrementing `stream_ordering` as we import - backfilled = False - if event.internal_metadata.is_historical(): - backfilled = True - assert self._storage_controllers.persistence is not None ( persisted_events, max_stream_token, ) = await self._storage_controllers.persistence.persist_events( - events_and_context, backfilled=backfilled + events_and_context, ) events_and_pos = [] diff --git a/synapse/handlers/room_batch.py b/synapse/handlers/room_batch.py deleted file mode 100644 index bf9df60218a1..000000000000 --- a/synapse/handlers/room_batch.py +++ /dev/null @@ -1,466 +0,0 @@ -import logging -from typing import TYPE_CHECKING, List, Tuple - -from synapse.api.constants import EventContentFields, EventTypes -from synapse.appservice import ApplicationService -from synapse.http.servlet import assert_params_in_dict -from synapse.types import JsonDict, Requester, UserID, create_requester -from synapse.util.stringutils import random_string - -if TYPE_CHECKING: - from synapse.server import HomeServer - -logger = logging.getLogger(__name__) - - -class RoomBatchHandler: - def __init__(self, hs: "HomeServer"): - self.hs = hs - self.store = hs.get_datastores().main - self._state_storage_controller = hs.get_storage_controllers().state - self.event_creation_handler = hs.get_event_creation_handler() - self.room_member_handler = hs.get_room_member_handler() - self.auth = hs.get_auth() - - async def inherit_depth_from_prev_ids(self, prev_event_ids: List[str]) -> int: - """Finds the depth which would sort it after the most-recent - prev_event_id but before the successors of those events. If no - successors are found, we assume it's an historical extremity part of the - current batch and use the same depth of the prev_event_ids. - - Args: - prev_event_ids: List of prev event IDs - - Returns: - Inherited depth - """ - ( - most_recent_prev_event_id, - most_recent_prev_event_depth, - ) = await self.store.get_max_depth_of(prev_event_ids) - - # We want to insert the historical event after the `prev_event` but before the successor event - # - # We inherit depth from the successor event instead of the `prev_event` - # because events returned from `/messages` are first sorted by `topological_ordering` - # which is just the `depth` and then tie-break with `stream_ordering`. - # - # We mark these inserted historical events as "backfilled" which gives them a - # negative `stream_ordering`. If we use the same depth as the `prev_event`, - # then our historical event will tie-break and be sorted before the `prev_event` - # when it should come after. - # - # We want to use the successor event depth so they appear after `prev_event` because - # it has a larger `depth` but before the successor event because the `stream_ordering` - # is negative before the successor event. - assert most_recent_prev_event_id is not None - successor_event_ids = await self.store.get_successor_events( - most_recent_prev_event_id - ) - - # If we can't find any successor events, then it's a forward extremity of - # historical messages and we can just inherit from the previous historical - # event which we can already assume has the correct depth where we want - # to insert into. - if not successor_event_ids: - depth = most_recent_prev_event_depth - else: - ( - _, - oldest_successor_depth, - ) = await self.store.get_min_depth_of(successor_event_ids) - - depth = oldest_successor_depth - - return depth - - def create_insertion_event_dict( - self, sender: str, room_id: str, origin_server_ts: int - ) -> JsonDict: - """Creates an event dict for an "insertion" event with the proper fields - and a random batch ID. - - Args: - sender: The event author MXID - room_id: The room ID that the event belongs to - origin_server_ts: Timestamp when the event was sent - - Returns: - The new event dictionary to insert. - """ - - next_batch_id = random_string(8) - insertion_event = { - "type": EventTypes.MSC2716_INSERTION, - "sender": sender, - "room_id": room_id, - "content": { - EventContentFields.MSC2716_NEXT_BATCH_ID: next_batch_id, - EventContentFields.MSC2716_HISTORICAL: True, - }, - "origin_server_ts": origin_server_ts, - } - - return insertion_event - - async def create_requester_for_user_id_from_app_service( - self, user_id: str, app_service: ApplicationService - ) -> Requester: - """Creates a new requester for the given user_id - and validates that the app service is allowed to control - the given user. - - Args: - user_id: The author MXID that the app service is controlling - app_service: The app service that controls the user - - Returns: - Requester object - """ - - await self.auth.validate_appservice_can_control_user_id(app_service, user_id) - - return create_requester(user_id, app_service=app_service) - - async def get_most_recent_full_state_ids_from_event_id_list( - self, event_ids: List[str] - ) -> List[str]: - """Find the most recent event_id and grab the full state at that event. - We will use this as a base to auth our historical messages against. - - Args: - event_ids: List of event ID's to look at - - Returns: - List of event ID's - """ - - ( - most_recent_event_id, - _, - ) = await self.store.get_max_depth_of(event_ids) - # mapping from (type, state_key) -> state_event_id - assert most_recent_event_id is not None - prev_state_map = await self._state_storage_controller.get_state_ids_for_event( - most_recent_event_id - ) - # List of state event ID's - full_state_ids = list(prev_state_map.values()) - - return full_state_ids - - async def persist_state_events_at_start( - self, - state_events_at_start: List[JsonDict], - room_id: str, - initial_state_event_ids: List[str], - app_service_requester: Requester, - ) -> List[str]: - """Takes all `state_events_at_start` event dictionaries and creates/persists - them in a floating state event chain which don't resolve into the current room - state. They are floating because they reference no prev_events which disconnects - them from the normal DAG. - - Args: - state_events_at_start: - room_id: Room where you want the events persisted in. - initial_state_event_ids: - The base set of state for the historical batch which the floating - state chain will derive from. This should probably be the state - from the `prev_event` defined by `/batch_send?prev_event_id=$abc`. - app_service_requester: The requester of an application service. - - Returns: - List of state event ID's we just persisted - """ - assert app_service_requester.app_service - - state_event_ids_at_start = [] - state_event_ids = initial_state_event_ids.copy() - - # Make the state events float off on their own by specifying no - # prev_events for the first one in the chain so we don't have a bunch of - # `@mxid joined the room` noise between each batch. - prev_event_ids_for_state_chain: List[str] = [] - - for index, state_event in enumerate(state_events_at_start): - assert_params_in_dict( - state_event, ["type", "origin_server_ts", "content", "sender"] - ) - - logger.debug( - "RoomBatchSendEventRestServlet inserting state_event=%s", state_event - ) - - event_dict = { - "type": state_event["type"], - "origin_server_ts": state_event["origin_server_ts"], - "content": state_event["content"], - "room_id": room_id, - "sender": state_event["sender"], - "state_key": state_event["state_key"], - } - - # Mark all events as historical - event_dict["content"][EventContentFields.MSC2716_HISTORICAL] = True - - # TODO: This is pretty much the same as some other code to handle inserting state in this file - if event_dict["type"] == EventTypes.Member: - membership = event_dict["content"].get("membership", None) - event_id, _ = await self.room_member_handler.update_membership( - await self.create_requester_for_user_id_from_app_service( - state_event["sender"], app_service_requester.app_service - ), - target=UserID.from_string(event_dict["state_key"]), - room_id=room_id, - action=membership, - content=event_dict["content"], - historical=True, - # Only the first event in the state chain should be floating. - # The rest should hang off each other in a chain. - allow_no_prev_events=index == 0, - prev_event_ids=prev_event_ids_for_state_chain, - # The first event in the state chain is floating with no - # `prev_events` which means it can't derive state from - # anywhere automatically. So we need to set some state - # explicitly. - # - # Make sure to use a copy of this list because we modify it - # later in the loop here. Otherwise it will be the same - # reference and also update in the event when we append - # later. - state_event_ids=state_event_ids.copy(), - ) - else: - ( - event, - _, - ) = await self.event_creation_handler.create_and_send_nonmember_event( - await self.create_requester_for_user_id_from_app_service( - state_event["sender"], app_service_requester.app_service - ), - event_dict, - historical=True, - # Only the first event in the state chain should be floating. - # The rest should hang off each other in a chain. - allow_no_prev_events=index == 0, - prev_event_ids=prev_event_ids_for_state_chain, - # The first event in the state chain is floating with no - # `prev_events` which means it can't derive state from - # anywhere automatically. So we need to set some state - # explicitly. - # - # Make sure to use a copy of this list because we modify it - # later in the loop here. Otherwise it will be the same - # reference and also update in the event when we append later. - state_event_ids=state_event_ids.copy(), - ) - event_id = event.event_id - - state_event_ids_at_start.append(event_id) - state_event_ids.append(event_id) - # Connect all the state in a floating chain - prev_event_ids_for_state_chain = [event_id] - - return state_event_ids_at_start - - async def persist_historical_events( - self, - events_to_create: List[JsonDict], - room_id: str, - inherited_depth: int, - initial_state_event_ids: List[str], - app_service_requester: Requester, - ) -> List[str]: - """Create and persists all events provided sequentially. Handles the - complexity of creating events in chronological order so they can - reference each other by prev_event but still persists in - reverse-chronoloical order so they have the correct - (topological_ordering, stream_ordering) and sort correctly from - /messages. - - Args: - events_to_create: List of historical events to create in JSON - dictionary format. - room_id: Room where you want the events persisted in. - inherited_depth: The depth to create the events at (you will - probably by calling inherit_depth_from_prev_ids(...)). - initial_state_event_ids: - This is used to set explicit state for the insertion event at - the start of the historical batch since it's floating with no - prev_events to derive state from automatically. - app_service_requester: The requester of an application service. - - Returns: - List of persisted event IDs - """ - assert app_service_requester.app_service - - # We expect the first event in a historical batch to be an insertion event - assert events_to_create[0]["type"] == EventTypes.MSC2716_INSERTION - # We expect the last event in a historical batch to be an batch event - assert events_to_create[-1]["type"] == EventTypes.MSC2716_BATCH - - # Make the historical event chain float off on its own by specifying no - # prev_events for the first event in the chain which causes the HS to - # ask for the state at the start of the batch later. - prev_event_ids: List[str] = [] - - event_ids = [] - events_to_persist = [] - for index, ev in enumerate(events_to_create): - assert_params_in_dict(ev, ["type", "origin_server_ts", "content", "sender"]) - - assert self.hs.is_mine_id(ev["sender"]), "User must be our own: %s" % ( - ev["sender"], - ) - - event_dict = { - "type": ev["type"], - "origin_server_ts": ev["origin_server_ts"], - "content": ev["content"], - "room_id": room_id, - "sender": ev["sender"], # requester.user.to_string(), - "prev_events": prev_event_ids.copy(), - } - - # Mark all events as historical - event_dict["content"][EventContentFields.MSC2716_HISTORICAL] = True - - event, unpersisted_context = await self.event_creation_handler.create_event( - await self.create_requester_for_user_id_from_app_service( - ev["sender"], app_service_requester.app_service - ), - event_dict, - # Only the first event (which is the insertion event) in the - # chain should be floating. The rest should hang off each other - # in a chain. - allow_no_prev_events=index == 0, - prev_event_ids=event_dict.get("prev_events"), - # Since the first event (which is the insertion event) in the - # chain is floating with no `prev_events`, it can't derive state - # from anywhere automatically. So we need to set some state - # explicitly. - state_event_ids=initial_state_event_ids if index == 0 else None, - historical=True, - depth=inherited_depth, - ) - context = await unpersisted_context.persist(event) - assert context._state_group - - # Normally this is done when persisting the event but we have to - # pre-emptively do it here because we create all the events first, - # then persist them in another pass below. And we want to share - # state_groups across the whole batch so this lookup needs to work - # for the next event in the batch in this loop. - await self.store.store_state_group_id_for_event_id( - event_id=event.event_id, - state_group_id=context._state_group, - ) - - logger.debug( - "RoomBatchSendEventRestServlet inserting event=%s, prev_event_ids=%s", - event, - prev_event_ids, - ) - - events_to_persist.append((event, context)) - event_id = event.event_id - - event_ids.append(event_id) - prev_event_ids = [event_id] - - # Persist events in reverse-chronological order so they have the - # correct stream_ordering as they are backfilled (which decrements). - # Events are sorted by (topological_ordering, stream_ordering) - # where topological_ordering is just depth. - for event, context in reversed(events_to_persist): - # This call can't raise `PartialStateConflictError` since we forbid - # use of the historical batch API during partial state - await self.event_creation_handler.handle_new_client_event( - await self.create_requester_for_user_id_from_app_service( - event.sender, app_service_requester.app_service - ), - events_and_context=[(event, context)], - ) - - return event_ids - - async def handle_batch_of_events( - self, - events_to_create: List[JsonDict], - room_id: str, - batch_id_to_connect_to: str, - inherited_depth: int, - initial_state_event_ids: List[str], - app_service_requester: Requester, - ) -> Tuple[List[str], str]: - """ - Handles creating and persisting all of the historical events as well as - insertion and batch meta events to make the batch navigable in the DAG. - - Args: - events_to_create: List of historical events to create in JSON - dictionary format. - room_id: Room where you want the events created in. - batch_id_to_connect_to: The batch_id from the insertion event you - want this batch to connect to. - inherited_depth: The depth to create the events at (you will - probably by calling inherit_depth_from_prev_ids(...)). - initial_state_event_ids: - This is used to set explicit state for the insertion event at - the start of the historical batch since it's floating with no - prev_events to derive state from automatically. This should - probably be the state from the `prev_event` defined by - `/batch_send?prev_event_id=$abc` plus the outcome of - `persist_state_events_at_start` - app_service_requester: The requester of an application service. - - Returns: - Tuple containing a list of created events and the next_batch_id - """ - - # Connect this current batch to the insertion event from the previous batch - last_event_in_batch = events_to_create[-1] - batch_event = { - "type": EventTypes.MSC2716_BATCH, - "sender": app_service_requester.user.to_string(), - "room_id": room_id, - "content": { - EventContentFields.MSC2716_BATCH_ID: batch_id_to_connect_to, - EventContentFields.MSC2716_HISTORICAL: True, - }, - # Since the batch event is put at the end of the batch, - # where the newest-in-time event is, copy the origin_server_ts from - # the last event we're inserting - "origin_server_ts": last_event_in_batch["origin_server_ts"], - } - # Add the batch event to the end of the batch (newest-in-time) - events_to_create.append(batch_event) - - # Add an "insertion" event to the start of each batch (next to the oldest-in-time - # event in the batch) so the next batch can be connected to this one. - insertion_event = self.create_insertion_event_dict( - sender=app_service_requester.user.to_string(), - room_id=room_id, - # Since the insertion event is put at the start of the batch, - # where the oldest-in-time event is, copy the origin_server_ts from - # the first event we're inserting - origin_server_ts=events_to_create[0]["origin_server_ts"], - ) - next_batch_id = insertion_event["content"][ - EventContentFields.MSC2716_NEXT_BATCH_ID - ] - # Prepend the insertion event to the start of the batch (oldest-in-time) - events_to_create = [insertion_event] + events_to_create - - # Create and persist all of the historical events - event_ids = await self.persist_historical_events( - events_to_create=events_to_create, - room_id=room_id, - inherited_depth=inherited_depth, - initial_state_event_ids=initial_state_event_ids, - app_service_requester=app_service_requester, - ) - - return event_ids, next_batch_id diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 55df34bd06a9..82e4fa73636e 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -362,7 +362,6 @@ async def _local_membership_update( content: Optional[dict] = None, require_consent: bool = True, outlier: bool = False, - historical: bool = False, origin_server_ts: Optional[int] = None, ) -> Tuple[str, int]: """ @@ -378,16 +377,13 @@ async def _local_membership_update( allow_no_prev_events: Whether to allow this event to be created an empty list of prev_events. Normally this is prohibited just because most events should have a prev_event and we should only use this in special - cases like MSC2716. + cases (previously useful for MSC2716). prev_event_ids: The event IDs to use as the prev events state_event_ids: - The full state at a given event. This is used particularly by the MSC2716 - /batch_send endpoint. One use case is the historical `state_events_at_start`; - since each is marked as an `outlier`, the `EventContext.for_outlier()` won't - have any `state_ids` set and therefore can't derive any state even though the - prev_events are set so we need to set them ourself via this argument. - This should normally be left as None, which will cause the auth_event_ids - to be calculated based on the room state at the prev_events. + The full state at a given event. This was previously used particularly + by the MSC2716 /batch_send endpoint. This should normally be left as + None, which will cause the auth_event_ids to be calculated based on the + room state at the prev_events. depth: Override the depth used to order the event in the DAG. Should normally be set to None, which will cause the depth to be calculated based on the prev_events. @@ -400,9 +396,6 @@ async def _local_membership_update( outlier: Indicates whether the event is an `outlier`, i.e. if it's from an arbitrary point and floating in the DAG as opposed to being inline with the current DAG. - historical: Indicates whether the message is being inserted - back in time around some existing events. This is used to skip - a few checks and mark the event as backfilled. origin_server_ts: The origin_server_ts to use if a new event is created. Uses the current timestamp if set to None. @@ -477,7 +470,6 @@ async def _local_membership_update( depth=depth, require_consent=require_consent, outlier=outlier, - historical=historical, ) context = await unpersisted_context.persist(event) prev_state_ids = await context.get_prev_state_ids( @@ -585,7 +577,6 @@ async def update_membership( new_room: bool = False, require_consent: bool = True, outlier: bool = False, - historical: bool = False, allow_no_prev_events: bool = False, prev_event_ids: Optional[List[str]] = None, state_event_ids: Optional[List[str]] = None, @@ -610,22 +601,16 @@ async def update_membership( outlier: Indicates whether the event is an `outlier`, i.e. if it's from an arbitrary point and floating in the DAG as opposed to being inline with the current DAG. - historical: Indicates whether the message is being inserted - back in time around some existing events. This is used to skip - a few checks and mark the event as backfilled. allow_no_prev_events: Whether to allow this event to be created an empty list of prev_events. Normally this is prohibited just because most events should have a prev_event and we should only use this in special - cases like MSC2716. + cases (previously useful for MSC2716). prev_event_ids: The event IDs to use as the prev events state_event_ids: - The full state at a given event. This is used particularly by the MSC2716 - /batch_send endpoint. One use case is the historical `state_events_at_start`; - since each is marked as an `outlier`, the `EventContext.for_outlier()` won't - have any `state_ids` set and therefore can't derive any state even though the - prev_events are set so we need to set them ourself via this argument. - This should normally be left as None, which will cause the auth_event_ids - to be calculated based on the room state at the prev_events. + The full state at a given event. This was previously used particularly + by the MSC2716 /batch_send endpoint. This should normally be left as + None, which will cause the auth_event_ids to be calculated based on the + room state at the prev_events. depth: Override the depth used to order the event in the DAG. Should normally be set to None, which will cause the depth to be calculated based on the prev_events. @@ -667,7 +652,6 @@ async def update_membership( new_room=new_room, require_consent=require_consent, outlier=outlier, - historical=historical, allow_no_prev_events=allow_no_prev_events, prev_event_ids=prev_event_ids, state_event_ids=state_event_ids, @@ -691,7 +675,6 @@ async def update_membership_locked( new_room: bool = False, require_consent: bool = True, outlier: bool = False, - historical: bool = False, allow_no_prev_events: bool = False, prev_event_ids: Optional[List[str]] = None, state_event_ids: Optional[List[str]] = None, @@ -718,22 +701,16 @@ async def update_membership_locked( outlier: Indicates whether the event is an `outlier`, i.e. if it's from an arbitrary point and floating in the DAG as opposed to being inline with the current DAG. - historical: Indicates whether the message is being inserted - back in time around some existing events. This is used to skip - a few checks and mark the event as backfilled. allow_no_prev_events: Whether to allow this event to be created an empty list of prev_events. Normally this is prohibited just because most events should have a prev_event and we should only use this in special - cases like MSC2716. + cases (previously useful for MSC2716). prev_event_ids: The event IDs to use as the prev events state_event_ids: - The full state at a given event. This is used particularly by the MSC2716 - /batch_send endpoint. One use case is the historical `state_events_at_start`; - since each is marked as an `outlier`, the `EventContext.for_outlier()` won't - have any `state_ids` set and therefore can't derive any state even though the - prev_events are set so we need to set them ourself via this argument. - This should normally be left as None, which will cause the auth_event_ids - to be calculated based on the room state at the prev_events. + The full state at a given event. This was previously used particularly + by the MSC2716 /batch_send endpoint. This should normally be left as + None, which will cause the auth_event_ids to be calculated based on the + room state at the prev_events. depth: Override the depth used to order the event in the DAG. Should normally be set to None, which will cause the depth to be calculated based on the prev_events. @@ -877,7 +854,6 @@ async def update_membership_locked( content=content, require_consent=require_consent, outlier=outlier, - historical=historical, origin_server_ts=origin_server_ts, ) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 33002cc0f275..67377c647b66 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -322,7 +322,6 @@ async def _action_for_event_by_user( ) -> None: if ( not event.internal_metadata.is_notifiable() - or event.internal_metadata.is_historical() or event.room_id in self.hs.config.server.rooms_to_exclude_from_sync ): # Push rules for events that aren't notifiable can't be processed by this and diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 1af8d99d2028..df0845edb209 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -48,7 +48,6 @@ rendezvous, report_event, room, - room_batch, room_keys, room_upgrade_rest_servlet, sendtodevice, @@ -132,7 +131,6 @@ def register_servlets(client_resource: HttpServer, hs: "HomeServer") -> None: user_directory.register_servlets(hs, client_resource) if is_main_process: room_upgrade_rest_servlet.register_servlets(hs, client_resource) - room_batch.register_servlets(hs, client_resource) capabilities.register_servlets(hs, client_resource) if is_main_process: account_validity.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/room_batch.py b/synapse/rest/client/room_batch.py deleted file mode 100644 index 69f85112d8fe..000000000000 --- a/synapse/rest/client/room_batch.py +++ /dev/null @@ -1,254 +0,0 @@ -# Copyright 2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import re -from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple - -from synapse.api.constants import EventContentFields -from synapse.api.errors import AuthError, Codes, SynapseError -from synapse.http.server import HttpServer -from synapse.http.servlet import ( - RestServlet, - assert_params_in_dict, - parse_json_object_from_request, - parse_string, - parse_strings_from_args, -) -from synapse.http.site import SynapseRequest -from synapse.types import JsonDict - -if TYPE_CHECKING: - from synapse.server import HomeServer - -logger = logging.getLogger(__name__) - - -class RoomBatchSendEventRestServlet(RestServlet): - """ - API endpoint which can insert a batch of events historically back in time - next to the given `prev_event`. - - `batch_id` comes from `next_batch_id `in the response of the batch send - endpoint and is derived from the "insertion" events added to each batch. - It's not required for the first batch send. - - `state_events_at_start` is used to define the historical state events - needed to auth the events like join events. These events will float - outside of the normal DAG as outlier's and won't be visible in the chat - history which also allows us to insert multiple batches without having a bunch - of `@mxid joined the room` noise between each batch. - - `events` is chronological list of events you want to insert. - There is a reverse-chronological constraint on batches so once you insert - some messages, you can only insert older ones after that. - tldr; Insert batches from your most recent history -> oldest history. - - POST /_matrix/client/unstable/org.matrix.msc2716/rooms//batch_send?prev_event_id=&batch_id= - { - "events": [ ... ], - "state_events_at_start": [ ... ] - } - """ - - PATTERNS = ( - re.compile( - "^/_matrix/client/unstable/org.matrix.msc2716" - "/rooms/(?P[^/]*)/batch_send$" - ), - ) - CATEGORY = "Client API requests" - - def __init__(self, hs: "HomeServer"): - super().__init__() - self.store = hs.get_datastores().main - self.event_creation_handler = hs.get_event_creation_handler() - self.auth = hs.get_auth() - self.room_batch_handler = hs.get_room_batch_handler() - - async def on_POST( - self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: - requester = await self.auth.get_user_by_req(request, allow_guest=False) - - if not requester.app_service: - raise AuthError( - HTTPStatus.FORBIDDEN, - "Only application services can use the /batchsend endpoint", - ) - - body = parse_json_object_from_request(request) - assert_params_in_dict(body, ["state_events_at_start", "events"]) - - assert request.args is not None - prev_event_ids_from_query = parse_strings_from_args( - request.args, "prev_event_id" - ) - batch_id_from_query = parse_string(request, "batch_id") - - if prev_event_ids_from_query is None: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "prev_event query parameter is required when inserting historical messages back in time", - errcode=Codes.MISSING_PARAM, - ) - - if await self.store.is_partial_state_room(room_id): - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Cannot insert history batches until we have fully joined the room", - errcode=Codes.UNABLE_DUE_TO_PARTIAL_STATE, - ) - - # Verify the batch_id_from_query corresponds to an actual insertion event - # and have the batch connected. - if batch_id_from_query: - corresponding_insertion_event_id = ( - await self.store.get_insertion_event_id_by_batch_id( - room_id, batch_id_from_query - ) - ) - if corresponding_insertion_event_id is None: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "No insertion event corresponds to the given ?batch_id", - errcode=Codes.INVALID_PARAM, - ) - - # Make sure that the prev_event_ids exist and aren't outliers - ie, they are - # regular parts of the room DAG where we know the state. - non_outlier_prev_events = await self.store.have_events_in_timeline( - prev_event_ids_from_query - ) - for prev_event_id in prev_event_ids_from_query: - if prev_event_id not in non_outlier_prev_events: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "prev_event %s does not exist, or is an outlier" % (prev_event_id,), - errcode=Codes.INVALID_PARAM, - ) - - # For the event we are inserting next to (`prev_event_ids_from_query`), - # find the most recent state events that allowed that message to be - # sent. We will use that as a base to auth our historical messages - # against. - state_event_ids = await self.room_batch_handler.get_most_recent_full_state_ids_from_event_id_list( - prev_event_ids_from_query - ) - - state_event_ids_at_start = [] - # Create and persist all of the state events that float off on their own - # before the batch. These will most likely be all of the invite/member - # state events used to auth the upcoming historical messages. - if body["state_events_at_start"]: - state_event_ids_at_start = ( - await self.room_batch_handler.persist_state_events_at_start( - state_events_at_start=body["state_events_at_start"], - room_id=room_id, - initial_state_event_ids=state_event_ids, - app_service_requester=requester, - ) - ) - # Update our ongoing auth event ID list with all of the new state we - # just created - state_event_ids.extend(state_event_ids_at_start) - - inherited_depth = await self.room_batch_handler.inherit_depth_from_prev_ids( - prev_event_ids_from_query - ) - - events_to_create = body["events"] - - # Figure out which batch to connect to. If they passed in - # batch_id_from_query let's use it. The batch ID passed in comes - # from the batch_id in the "insertion" event from the previous batch. - last_event_in_batch = events_to_create[-1] - base_insertion_event = None - if batch_id_from_query: - batch_id_to_connect_to = batch_id_from_query - # Otherwise, create an insertion event to act as a starting point. - # - # We don't always have an insertion event to start hanging more history - # off of (ideally there would be one in the main DAG, but that's not the - # case if we're wanting to add history to e.g. existing rooms without - # an insertion event), in which case we just create a new insertion event - # that can then get pointed to by a "marker" event later. - else: - base_insertion_event_dict = ( - self.room_batch_handler.create_insertion_event_dict( - sender=requester.user.to_string(), - room_id=room_id, - origin_server_ts=last_event_in_batch["origin_server_ts"], - ) - ) - base_insertion_event_dict["prev_events"] = prev_event_ids_from_query.copy() - - ( - base_insertion_event, - _, - ) = await self.event_creation_handler.create_and_send_nonmember_event( - await self.room_batch_handler.create_requester_for_user_id_from_app_service( - base_insertion_event_dict["sender"], - requester.app_service, - ), - base_insertion_event_dict, - prev_event_ids=base_insertion_event_dict.get("prev_events"), - # Also set the explicit state here because we want to resolve - # any `state_events_at_start` here too. It's not strictly - # necessary to accomplish anything but if someone asks for the - # state at this point, we probably want to show them the - # historical state that was part of this batch. - state_event_ids=state_event_ids, - historical=True, - depth=inherited_depth, - ) - - batch_id_to_connect_to = base_insertion_event.content[ - EventContentFields.MSC2716_NEXT_BATCH_ID - ] - - # Create and persist all of the historical events as well as insertion - # and batch meta events to make the batch navigable in the DAG. - event_ids, next_batch_id = await self.room_batch_handler.handle_batch_of_events( - events_to_create=events_to_create, - room_id=room_id, - batch_id_to_connect_to=batch_id_to_connect_to, - inherited_depth=inherited_depth, - initial_state_event_ids=state_event_ids, - app_service_requester=requester, - ) - - insertion_event_id = event_ids[0] - batch_event_id = event_ids[-1] - historical_event_ids = event_ids[1:-1] - - response_dict = { - "state_event_ids": state_event_ids_at_start, - "event_ids": historical_event_ids, - "next_batch_id": next_batch_id, - "insertion_event_id": insertion_event_id, - "batch_event_id": batch_event_id, - } - if base_insertion_event is not None: - response_dict["base_insertion_event_id"] = base_insertion_event.event_id - - return HTTPStatus.OK, response_dict - - -def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - msc2716_enabled = hs.config.experimental.msc2716_enabled - - if msc2716_enabled: - RoomBatchSendEventRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 191064875561..95400ba570a9 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -102,8 +102,6 @@ def on_GET(self, request: Request) -> Tuple[int, JsonDict]: "org.matrix.msc2285.stable": True, # TODO: Remove when MSC2285 becomes a part of the spec # Supports filtering of /publicRooms by room type as per MSC3827 "org.matrix.msc3827.stable": True, - # Adds support for importing historical messages as per MSC2716 - "org.matrix.msc2716": self.config.experimental.msc2716_enabled, # Adds support for thread relations, per MSC3440. "org.matrix.msc3440.stable": True, # TODO: remove when "v1.3" is added above # Support for thread read receipts & notification counts. diff --git a/synapse/server.py b/synapse/server.py index 0f36ef69cb91..b72b76a38b35 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -91,7 +91,6 @@ RoomShutdownHandler, TimestampLookupHandler, ) -from synapse.handlers.room_batch import RoomBatchHandler from synapse.handlers.room_list import RoomListHandler from synapse.handlers.room_member import ( RoomForgetterHandler, @@ -492,10 +491,6 @@ def get_replication_client(self) -> ReplicationClient: def get_room_creation_handler(self) -> RoomCreationHandler: return RoomCreationHandler(self) - @cache_in_self - def get_room_batch_handler(self) -> RoomBatchHandler: - return RoomBatchHandler(self) - @cache_in_self def get_room_shutdown_handler(self) -> RoomShutdownHandler: return RoomShutdownHandler(self) diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 0032a92f49a6..3a10c265c9ea 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -61,7 +61,6 @@ from .rejections import RejectionsStore from .relations import RelationsStore from .room import RoomStore -from .room_batch import RoomBatchStore from .roommember import RoomMemberStore from .search import SearchStore from .session import SessionStore @@ -87,7 +86,6 @@ class DataStore( DeviceStore, RoomMemberStore, RoomStore, - RoomBatchStore, RegistrationStore, ProfileStore, PresenceStore, diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index 2681917d0b6a..8b6e3c1dc734 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -31,7 +31,7 @@ import attr from prometheus_client import Counter, Gauge -from synapse.api.constants import MAX_DEPTH, EventTypes +from synapse.api.constants import MAX_DEPTH from synapse.api.errors import StoreError from synapse.api.room_versions import EventFormatVersions, RoomVersion from synapse.events import EventBase, make_event_from_dict @@ -891,124 +891,6 @@ def get_backfill_points_in_room_txn( room_id, ) - @trace - async def get_insertion_event_backward_extremities_in_room( - self, - room_id: str, - current_depth: int, - limit: int, - ) -> List[Tuple[str, int]]: - """ - Get the insertion events we know about that we haven't backfilled yet - along with the approximate depth. Only returns insertion events that are - at a depth lower than or equal to the `current_depth`. Sorted by depth, - highest to lowest (descending) so the closest events to the - `current_depth` are first in the list. - - We ignore insertion events that are newer than the user's current scroll - position (ie, those with depth greater than `current_depth`) as: - 1. we don't really care about getting events that have happened - after our current position; and - 2. by the nature of paginating and scrolling back, we have likely - previously tried and failed to backfill from that insertion event, so - to avoid getting "stuck" requesting the same backfill repeatedly - we drop those insertion event. - - Args: - room_id: Room where we want to find the oldest events - current_depth: The depth at the user's current scrollback position - limit: The max number of insertion event extremities to return - - Returns: - List of (event_id, depth) tuples. Sorted by depth, highest to lowest - (descending) so the closest events to the `current_depth` are first - in the list. - """ - - def get_insertion_event_backward_extremities_in_room_txn( - txn: LoggingTransaction, room_id: str - ) -> List[Tuple[str, int]]: - if isinstance(self.database_engine, PostgresEngine): - least_function = "LEAST" - elif isinstance(self.database_engine, Sqlite3Engine): - least_function = "MIN" - else: - raise RuntimeError("Unknown database engine") - - sql = f""" - SELECT - insertion_event_extremity.event_id, event.depth - /* We only want insertion events that are also marked as backwards extremities */ - FROM insertion_event_extremities AS insertion_event_extremity - /* Get the depth of the insertion event from the events table */ - INNER JOIN events AS event USING (event_id) - /** - * We use this info to make sure we don't retry to use a backfill point - * if we've already attempted to backfill from it recently. - */ - LEFT JOIN event_failed_pull_attempts AS failed_backfill_attempt_info - ON - failed_backfill_attempt_info.room_id = insertion_event_extremity.room_id - AND failed_backfill_attempt_info.event_id = insertion_event_extremity.event_id - WHERE - insertion_event_extremity.room_id = ? - /** - * We only want extremities that are older than or at - * the same position of the given `current_depth` (where older - * means less than the given depth) because we're looking backwards - * from the `current_depth` when backfilling. - * - * current_depth (ignore events that come after this, ignore 2-4) - * | - * ▼ - * [0]<--[1]<--[2]<--[3]<--[4] - */ - AND event.depth <= ? /* current_depth */ - /** - * Exponential back-off (up to the upper bound) so we don't retry the - * same backfill point over and over. ex. 2hr, 4hr, 8hr, 16hr, etc - * - * We use `1 << n` as a power of 2 equivalent for compatibility - * with older SQLites. The left shift equivalent only works with - * powers of 2 because left shift is a binary operation (base-2). - * Otherwise, we would use `power(2, n)` or the power operator, `2^n`. - */ - AND ( - failed_backfill_attempt_info.event_id IS NULL - OR ? /* current_time */ >= failed_backfill_attempt_info.last_attempt_ts + ( - (1 << {least_function}(failed_backfill_attempt_info.num_attempts, ? /* max doubling steps */)) - * ? /* step */ - ) - ) - /** - * Sort from highest (closest to the `current_depth`) to the lowest depth - * because the closest are most relevant to backfill from first. - * Then tie-break on alphabetical order of the event_ids so we get a - * consistent ordering which is nice when asserting things in tests. - */ - ORDER BY event.depth DESC, insertion_event_extremity.event_id DESC - LIMIT ? - """ - - txn.execute( - sql, - ( - room_id, - current_depth, - self._clock.time_msec(), - BACKFILL_EVENT_EXPONENTIAL_BACKOFF_MAXIMUM_DOUBLING_STEPS, - BACKFILL_EVENT_EXPONENTIAL_BACKOFF_STEP_MILLISECONDS, - limit, - ), - ) - return cast(List[Tuple[str, int]], txn.fetchall()) - - return await self.db_pool.runInteraction( - "get_insertion_event_backward_extremities_in_room", - get_insertion_event_backward_extremities_in_room_txn, - room_id, - ) - async def get_max_depth_of( self, event_ids: Collection[str] ) -> Tuple[Optional[str], int]: @@ -1280,50 +1162,6 @@ def get_forward_extremeties_for_room_txn(txn: LoggingTransaction) -> List[str]: return event_ids - def _get_connected_batch_event_backfill_results_txn( - self, txn: LoggingTransaction, insertion_event_id: str, limit: int - ) -> List[BackfillQueueNavigationItem]: - """ - Find any batch connections of a given insertion event. - A batch event points at a insertion event via: - batch_event.content[MSC2716_BATCH_ID] -> insertion_event.content[MSC2716_NEXT_BATCH_ID] - - Args: - txn: The database transaction to use - insertion_event_id: The event ID to navigate from. We will find - batch events that point back at this insertion event. - limit: Max number of event ID's to query for and return - - Returns: - List of batch events that the backfill queue can process - """ - batch_connection_query = """ - SELECT e.depth, e.stream_ordering, c.event_id, e.type FROM insertion_events AS i - /* Find the batch that connects to the given insertion event */ - INNER JOIN batch_events AS c - ON i.next_batch_id = c.batch_id - /* Get the depth of the batch start event from the events table */ - INNER JOIN events AS e ON c.event_id = e.event_id - /* Find an insertion event which matches the given event_id */ - WHERE i.event_id = ? - LIMIT ? - """ - - # Find any batch connections for the given insertion event - txn.execute( - batch_connection_query, - (insertion_event_id, limit), - ) - return [ - BackfillQueueNavigationItem( - depth=row[0], - stream_ordering=row[1], - event_id=row[2], - type=row[3], - ) - for row in txn - ] - def _get_connected_prev_event_backfill_results_txn( self, txn: LoggingTransaction, event_id: str, limit: int ) -> List[BackfillQueueNavigationItem]: @@ -1472,40 +1310,6 @@ def _get_backfill_events( event_id_results.add(event_id) - # Try and find any potential historical batches of message history. - if self.hs.config.experimental.msc2716_enabled: - # We need to go and try to find any batch events connected - # to a given insertion event (by batch_id). If we find any, we'll - # add them to the queue and navigate up the DAG like normal in the - # next iteration of the loop. - if event_type == EventTypes.MSC2716_INSERTION: - # Find any batch connections for the given insertion event - connected_batch_event_backfill_results = ( - self._get_connected_batch_event_backfill_results_txn( - txn, event_id, limit - len(event_id_results) - ) - ) - logger.debug( - "_get_backfill_events(room_id=%s): connected_batch_event_backfill_results=%s", - room_id, - connected_batch_event_backfill_results, - ) - for ( - connected_batch_event_backfill_item - ) in connected_batch_event_backfill_results: - if ( - connected_batch_event_backfill_item.event_id - not in event_id_results - ): - queue.put( - ( - -connected_batch_event_backfill_item.depth, - -connected_batch_event_backfill_item.stream_ordering, - connected_batch_event_backfill_item.event_id, - connected_batch_event_backfill_item.type, - ) - ) - # Now we just look up the DAG by prev_events as normal connected_prev_event_backfill_results = ( self._get_connected_prev_event_backfill_results_txn( @@ -1748,19 +1552,6 @@ def _delete_old_forward_extrem_cache_txn(txn: LoggingTransaction) -> None: _delete_old_forward_extrem_cache_txn, ) - @trace - async def insert_insertion_extremity(self, event_id: str, room_id: str) -> None: - await self.db_pool.simple_upsert( - table="insertion_event_extremities", - keyvalues={"event_id": event_id}, - values={ - "event_id": event_id, - "room_id": room_id, - }, - insertion_values={}, - desc="insert_insertion_extremity", - ) - async def insert_received_event_to_staging( self, origin: str, event: EventBase ) -> None: diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index 44af3357af47..5c9db7554ef4 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1664,9 +1664,6 @@ def _update_metadata_tables_txn( self._handle_event_relations(txn, event) - self._handle_insertion_event(txn, event) - self._handle_batch_event(txn, event) - # Store the labels for this event. labels = event.content.get(EventContentFields.LABELS) if labels: @@ -1927,128 +1924,6 @@ def _handle_event_relations( ), ) - def _handle_insertion_event( - self, txn: LoggingTransaction, event: EventBase - ) -> None: - """Handles keeping track of insertion events and edges/connections. - Part of MSC2716. - - Args: - txn: The database transaction object - event: The event to process - """ - - if event.type != EventTypes.MSC2716_INSERTION: - # Not a insertion event - return - - # Skip processing an insertion event if the room version doesn't - # support it or the event is not from the room creator. - room_version = self.store.get_room_version_txn(txn, event.room_id) - room_creator = self.db_pool.simple_select_one_onecol_txn( - txn, - table="rooms", - keyvalues={"room_id": event.room_id}, - retcol="creator", - allow_none=True, - ) - if not room_version.msc2716_historical and ( - not self.hs.config.experimental.msc2716_enabled - or event.sender != room_creator - ): - return - - next_batch_id = event.content.get(EventContentFields.MSC2716_NEXT_BATCH_ID) - if next_batch_id is None: - # Invalid insertion event without next batch ID - return - - logger.debug( - "_handle_insertion_event (next_batch_id=%s) %s", next_batch_id, event - ) - - # Keep track of the insertion event and the batch ID - self.db_pool.simple_insert_txn( - txn, - table="insertion_events", - values={ - "event_id": event.event_id, - "room_id": event.room_id, - "next_batch_id": next_batch_id, - }, - ) - - # Insert an edge for every prev_event connection - for prev_event_id in event.prev_event_ids(): - self.db_pool.simple_insert_txn( - txn, - table="insertion_event_edges", - values={ - "event_id": event.event_id, - "room_id": event.room_id, - "insertion_prev_event_id": prev_event_id, - }, - ) - - def _handle_batch_event(self, txn: LoggingTransaction, event: EventBase) -> None: - """Handles inserting the batch edges/connections between the batch event - and an insertion event. Part of MSC2716. - - Args: - txn: The database transaction object - event: The event to process - """ - - if event.type != EventTypes.MSC2716_BATCH: - # Not a batch event - return - - # Skip processing a batch event if the room version doesn't - # support it or the event is not from the room creator. - room_version = self.store.get_room_version_txn(txn, event.room_id) - room_creator = self.db_pool.simple_select_one_onecol_txn( - txn, - table="rooms", - keyvalues={"room_id": event.room_id}, - retcol="creator", - allow_none=True, - ) - if not room_version.msc2716_historical and ( - not self.hs.config.experimental.msc2716_enabled - or event.sender != room_creator - ): - return - - batch_id = event.content.get(EventContentFields.MSC2716_BATCH_ID) - if batch_id is None: - # Invalid batch event without a batch ID - return - - logger.debug("_handle_batch_event batch_id=%s %s", batch_id, event) - - # Keep track of the insertion event and the batch ID - self.db_pool.simple_insert_txn( - txn, - table="batch_events", - values={ - "event_id": event.event_id, - "room_id": event.room_id, - "batch_id": batch_id, - }, - ) - - # When we receive an event with a `batch_id` referencing the - # `next_batch_id` of the insertion event, we can remove it from the - # `insertion_event_extremities` table. - sql = """ - DELETE FROM insertion_event_extremities WHERE event_id IN ( - SELECT event_id FROM insertion_events - WHERE next_batch_id = ? - ) - """ - - txn.execute(sql, (batch_id,)) - def _handle_redact_relations( self, txn: LoggingTransaction, room_id: str, redacted_event_id: str ) -> None: diff --git a/synapse/storage/databases/main/room_batch.py b/synapse/storage/databases/main/room_batch.py deleted file mode 100644 index 131f357d04d5..000000000000 --- a/synapse/storage/databases/main/room_batch.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2021 The Matrix.org Foundation C.I.C. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Optional - -from synapse.storage._base import SQLBaseStore - - -class RoomBatchStore(SQLBaseStore): - async def get_insertion_event_id_by_batch_id( - self, room_id: str, batch_id: str - ) -> Optional[str]: - """Retrieve a insertion event ID. - - Args: - batch_id: The batch ID of the insertion event to retrieve. - - Returns: - The event_id of an insertion event, or None if there is no known - insertion event for the given insertion event. - """ - return await self.db_pool.simple_select_one_onecol( - table="insertion_events", - keyvalues={"room_id": room_id, "next_batch_id": batch_id}, - retcol="event_id", - allow_none=True, - ) - - async def store_state_group_id_for_event_id( - self, event_id: str, state_group_id: int - ) -> None: - await self.db_pool.simple_upsert( - table="event_to_state_groups", - keyvalues={"event_id": event_id}, - values={"state_group": state_group_id, "event_id": event_id}, - ) diff --git a/tests/rest/client/test_room_batch.py b/tests/rest/client/test_room_batch.py deleted file mode 100644 index 9d5cb60d1682..000000000000 --- a/tests/rest/client/test_room_batch.py +++ /dev/null @@ -1,302 +0,0 @@ -import logging -from typing import List, Tuple -from unittest.mock import Mock, patch - -from twisted.test.proto_helpers import MemoryReactor - -from synapse.api.constants import EventContentFields, EventTypes -from synapse.appservice import ApplicationService -from synapse.rest import admin -from synapse.rest.client import login, register, room, room_batch, sync -from synapse.server import HomeServer -from synapse.types import JsonDict, RoomStreamToken -from synapse.util import Clock - -from tests import unittest - -logger = logging.getLogger(__name__) - - -def _create_join_state_events_for_batch_send_request( - virtual_user_ids: List[str], - insert_time: int, -) -> List[JsonDict]: - return [ - { - "type": EventTypes.Member, - "sender": virtual_user_id, - "origin_server_ts": insert_time, - "content": { - "membership": "join", - "displayname": "display-name-for-%s" % (virtual_user_id,), - }, - "state_key": virtual_user_id, - } - for virtual_user_id in virtual_user_ids - ] - - -def _create_message_events_for_batch_send_request( - virtual_user_id: str, insert_time: int, count: int -) -> List[JsonDict]: - return [ - { - "type": EventTypes.Message, - "sender": virtual_user_id, - "origin_server_ts": insert_time, - "content": { - "msgtype": "m.text", - "body": "Historical %d" % (i), - EventContentFields.MSC2716_HISTORICAL: True, - }, - } - for i in range(count) - ] - - -class RoomBatchTestCase(unittest.HomeserverTestCase): - """Test importing batches of historical messages.""" - - servlets = [ - admin.register_servlets_for_client_rest_resource, - room_batch.register_servlets, - room.register_servlets, - register.register_servlets, - login.register_servlets, - sync.register_servlets, - ] - - def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - config = self.default_config() - - self.appservice = ApplicationService( - token="i_am_an_app_service", - id="1234", - namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, - # Note: this user does not have to match the regex above - sender="@as_main:test", - ) - - mock_load_appservices = Mock(return_value=[self.appservice]) - with patch( - "synapse.storage.databases.main.appservice.load_appservices", - mock_load_appservices, - ): - hs = self.setup_test_homeserver(config=config) - return hs - - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: - self.clock = clock - self._storage_controllers = hs.get_storage_controllers() - - self.virtual_user_id, _ = self.register_appservice_user( - "as_user_potato", self.appservice.token - ) - - def _create_test_room(self) -> Tuple[str, str, str, str]: - room_id = self.helper.create_room_as( - self.appservice.sender, tok=self.appservice.token - ) - - res_a = self.helper.send_event( - room_id=room_id, - type=EventTypes.Message, - content={ - "msgtype": "m.text", - "body": "A", - }, - tok=self.appservice.token, - ) - event_id_a = res_a["event_id"] - - res_b = self.helper.send_event( - room_id=room_id, - type=EventTypes.Message, - content={ - "msgtype": "m.text", - "body": "B", - }, - tok=self.appservice.token, - ) - event_id_b = res_b["event_id"] - - res_c = self.helper.send_event( - room_id=room_id, - type=EventTypes.Message, - content={ - "msgtype": "m.text", - "body": "C", - }, - tok=self.appservice.token, - ) - event_id_c = res_c["event_id"] - - return room_id, event_id_a, event_id_b, event_id_c - - @unittest.override_config({"experimental_features": {"msc2716_enabled": True}}) - def test_same_state_groups_for_whole_historical_batch(self) -> None: - """Make sure that when using the `/batch_send` endpoint to import a - bunch of historical messages, it re-uses the same `state_group` across - the whole batch. This is an easy optimization to make sure we're getting - right because the state for the whole batch is contained in - `state_events_at_start` and can be shared across everything. - """ - - time_before_room = int(self.clock.time_msec()) - room_id, event_id_a, _, _ = self._create_test_room() - - channel = self.make_request( - "POST", - "/_matrix/client/unstable/org.matrix.msc2716/rooms/%s/batch_send?prev_event_id=%s" - % (room_id, event_id_a), - content={ - "events": _create_message_events_for_batch_send_request( - self.virtual_user_id, time_before_room, 3 - ), - "state_events_at_start": _create_join_state_events_for_batch_send_request( - [self.virtual_user_id], time_before_room - ), - }, - access_token=self.appservice.token, - ) - self.assertEqual(channel.code, 200, channel.result) - - # Get the historical event IDs that we just imported - historical_event_ids = channel.json_body["event_ids"] - self.assertEqual(len(historical_event_ids), 3) - - # Fetch the state_groups - state_group_map = self.get_success( - self._storage_controllers.state.get_state_groups_ids( - room_id, historical_event_ids - ) - ) - - # We expect all of the historical events to be using the same state_group - # so there should only be a single state_group here! - self.assertEqual( - len(state_group_map.keys()), - 1, - "Expected a single state_group to be returned by saw state_groups=%s" - % (state_group_map.keys(),), - ) - - @unittest.override_config({"experimental_features": {"msc2716_enabled": True}}) - def test_sync_while_batch_importing(self) -> None: - """ - Make sure that /sync correctly returns full room state when a user joins - during ongoing batch backfilling. - See: https://github.com/matrix-org/synapse/issues/12281 - """ - # Create user who will be invited & join room - user_id = self.register_user("beep", "test") - user_tok = self.login("beep", "test") - - time_before_room = int(self.clock.time_msec()) - - # Create a room with some events - room_id, _, _, _ = self._create_test_room() - # Invite the user - self.helper.invite( - room_id, src=self.appservice.sender, tok=self.appservice.token, targ=user_id - ) - - # Create another room, send a bunch of events to advance the stream token - other_room_id = self.helper.create_room_as( - self.appservice.sender, tok=self.appservice.token - ) - for _ in range(5): - self.helper.send_event( - room_id=other_room_id, - type=EventTypes.Message, - content={"msgtype": "m.text", "body": "C"}, - tok=self.appservice.token, - ) - - # Join the room as the normal user - self.helper.join(room_id, user_id, tok=user_tok) - - # Create an event to hang the historical batch from - In order to see - # the failure case originally reported in #12281, the historical batch - # must be hung from the most recent event in the room so the base - # insertion event ends up with the highest `topogological_ordering` - # (`depth`) in the room but will have a negative `stream_ordering` - # because it's a `historical` event. Previously, when assembling the - # `state` for the `/sync` response, the bugged logic would sort by - # `topological_ordering` descending and pick up the base insertion - # event because it has a negative `stream_ordering` below the given - # pagination token. Now we properly sort by `stream_ordering` - # descending which puts `historical` events with a negative - # `stream_ordering` way at the bottom and aren't selected as expected. - response = self.helper.send_event( - room_id=room_id, - type=EventTypes.Message, - content={ - "msgtype": "m.text", - "body": "C", - }, - tok=self.appservice.token, - ) - event_to_hang_id = response["event_id"] - - channel = self.make_request( - "POST", - "/_matrix/client/unstable/org.matrix.msc2716/rooms/%s/batch_send?prev_event_id=%s" - % (room_id, event_to_hang_id), - content={ - "events": _create_message_events_for_batch_send_request( - self.virtual_user_id, time_before_room, 3 - ), - "state_events_at_start": _create_join_state_events_for_batch_send_request( - [self.virtual_user_id], time_before_room - ), - }, - access_token=self.appservice.token, - ) - self.assertEqual(channel.code, 200, channel.result) - - # Now we need to find the invite + join events stream tokens so we can sync between - main_store = self.hs.get_datastores().main - events, next_key = self.get_success( - main_store.get_recent_events_for_room( - room_id, - 50, - end_token=main_store.get_room_max_token(), - ), - ) - invite_event_position = None - for event in events: - if ( - event.type == "m.room.member" - and event.content["membership"] == "invite" - ): - invite_event_position = self.get_success( - main_store.get_topological_token_for_event(event.event_id) - ) - break - - assert invite_event_position is not None, "No invite event found" - - # Remove the topological order from the token by re-creating w/stream only - invite_event_position = RoomStreamToken(None, invite_event_position.stream) - - # Sync everything after this token - since_token = self.get_success(invite_event_position.to_string(main_store)) - sync_response = self.make_request( - "GET", - f"/sync?since={since_token}", - access_token=user_tok, - ) - - # Assert that, for this room, the user was considered to have joined and thus - # receives the full state history - state_event_types = [ - event["type"] - for event in sync_response.json_body["rooms"]["join"][room_id]["state"][ - "events" - ] - ] - - assert ( - "m.room.create" in state_event_types - ), "Missing room full state in sync response" diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index 4b8d8328d742..0f3b0744f184 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -20,7 +20,6 @@ from twisted.test.proto_helpers import MemoryReactor -from synapse.api.constants import EventTypes from synapse.api.room_versions import ( KNOWN_ROOM_VERSIONS, EventFormatVersions, @@ -924,216 +923,6 @@ def test_get_backfill_points_in_room_works_after_many_failed_pull_attempts_that_ backfill_event_ids = [backfill_point[0] for backfill_point in backfill_points] self.assertEqual(backfill_event_ids, ["b3", "b2", "b1"]) - def _setup_room_for_insertion_backfill_tests(self) -> _BackfillSetupInfo: - """ - Sets up a room with various insertion event backward extremities to test - backfill functions against. - - Returns: - _BackfillSetupInfo including the `room_id` to test against and - `depth_map` of events in the room - """ - room_id = "!backfill-room-test:some-host" - - depth_map: Dict[str, int] = { - "1": 1, - "2": 2, - "insertion_eventA": 3, - "3": 4, - "insertion_eventB": 5, - "4": 6, - "5": 7, - } - - def populate_db(txn: LoggingTransaction) -> None: - # Insert the room to satisfy the foreign key constraint of - # `event_failed_pull_attempts` - self.store.db_pool.simple_insert_txn( - txn, - "rooms", - { - "room_id": room_id, - "creator": "room_creator_user_id", - "is_public": True, - "room_version": "6", - }, - ) - - # Insert our server events - stream_ordering = 0 - for event_id, depth in depth_map.items(): - self.store.db_pool.simple_insert_txn( - txn, - table="events", - values={ - "event_id": event_id, - "type": EventTypes.MSC2716_INSERTION - if event_id.startswith("insertion_event") - else "test_regular_type", - "room_id": room_id, - "depth": depth, - "topological_ordering": depth, - "stream_ordering": stream_ordering, - "processed": True, - "outlier": False, - }, - ) - - if event_id.startswith("insertion_event"): - self.store.db_pool.simple_insert_txn( - txn, - table="insertion_event_extremities", - values={ - "event_id": event_id, - "room_id": room_id, - }, - ) - - stream_ordering += 1 - - self.get_success( - self.store.db_pool.runInteraction( - "_setup_room_for_insertion_backfill_tests_populate_db", - populate_db, - ) - ) - - return _BackfillSetupInfo(room_id=room_id, depth_map=depth_map) - - def test_get_insertion_event_backward_extremities_in_room(self) -> None: - """ - Test to make sure only insertion event backward extremities that are - older and come before the `current_depth` are returned. - """ - setup_info = self._setup_room_for_insertion_backfill_tests() - room_id = setup_info.room_id - depth_map = setup_info.depth_map - - # Try at "insertion_eventB" - backfill_points = self.get_success( - self.store.get_insertion_event_backward_extremities_in_room( - room_id, depth_map["insertion_eventB"], limit=100 - ) - ) - backfill_event_ids = [backfill_point[0] for backfill_point in backfill_points] - self.assertEqual(backfill_event_ids, ["insertion_eventB", "insertion_eventA"]) - - # Try at "insertion_eventA" - backfill_points = self.get_success( - self.store.get_insertion_event_backward_extremities_in_room( - room_id, depth_map["insertion_eventA"], limit=100 - ) - ) - backfill_event_ids = [backfill_point[0] for backfill_point in backfill_points] - # Event "2" has a depth of 2 but is not included here because we only - # know the approximate depth of 5 from our event "3". - self.assertListEqual(backfill_event_ids, ["insertion_eventA"]) - - def test_get_insertion_event_backward_extremities_in_room_excludes_events_we_have_attempted( - self, - ) -> None: - """ - Test to make sure that insertion events we have attempted to backfill - (and within backoff timeout duration) do not show up as an event to - backfill again. - """ - setup_info = self._setup_room_for_insertion_backfill_tests() - room_id = setup_info.room_id - depth_map = setup_info.depth_map - - # Record some attempts to backfill these events which will make - # `get_insertion_event_backward_extremities_in_room` exclude them - # because we haven't passed the backoff interval. - self.get_success( - self.store.record_event_failed_pull_attempt( - room_id, "insertion_eventA", "fake cause" - ) - ) - - # No time has passed since we attempted to backfill ^ - - # Try at "insertion_eventB" - backfill_points = self.get_success( - self.store.get_insertion_event_backward_extremities_in_room( - room_id, depth_map["insertion_eventB"], limit=100 - ) - ) - backfill_event_ids = [backfill_point[0] for backfill_point in backfill_points] - # Only the backfill points that we didn't record earlier exist here. - self.assertEqual(backfill_event_ids, ["insertion_eventB"]) - - def test_get_insertion_event_backward_extremities_in_room_attempted_event_retry_after_backoff_duration( - self, - ) -> None: - """ - Test to make sure after we fake attempt to backfill event - "insertion_eventA" many times, we can see retry and see the - "insertion_eventA" again after the backoff timeout duration has - exceeded. - """ - setup_info = self._setup_room_for_insertion_backfill_tests() - room_id = setup_info.room_id - depth_map = setup_info.depth_map - - # Record some attempts to backfill these events which will make - # `get_backfill_points_in_room` exclude them because we - # haven't passed the backoff interval. - self.get_success( - self.store.record_event_failed_pull_attempt( - room_id, "insertion_eventB", "fake cause" - ) - ) - self.get_success( - self.store.record_event_failed_pull_attempt( - room_id, "insertion_eventA", "fake cause" - ) - ) - self.get_success( - self.store.record_event_failed_pull_attempt( - room_id, "insertion_eventA", "fake cause" - ) - ) - self.get_success( - self.store.record_event_failed_pull_attempt( - room_id, "insertion_eventA", "fake cause" - ) - ) - self.get_success( - self.store.record_event_failed_pull_attempt( - room_id, "insertion_eventA", "fake cause" - ) - ) - - # Now advance time by 2 hours and we should only be able to see - # "insertion_eventB" because we have waited long enough for the single - # attempt (2^1 hours) but we still shouldn't see "insertion_eventA" - # because we haven't waited long enough for this many attempts. - self.reactor.advance(datetime.timedelta(hours=2).total_seconds()) - - # Try at "insertion_eventA" and make sure that "insertion_eventA" is not - # in the list because we've already attempted many times - backfill_points = self.get_success( - self.store.get_insertion_event_backward_extremities_in_room( - room_id, depth_map["insertion_eventA"], limit=100 - ) - ) - backfill_event_ids = [backfill_point[0] for backfill_point in backfill_points] - self.assertEqual(backfill_event_ids, []) - - # Now advance time by 20 hours (above 2^4 because we made 4 attemps) and - # see if we can now backfill it - self.reactor.advance(datetime.timedelta(hours=20).total_seconds()) - - # Try at "insertion_eventA" again after we advanced enough time and we - # should see "insertion_eventA" again - backfill_points = self.get_success( - self.store.get_insertion_event_backward_extremities_in_room( - room_id, depth_map["insertion_eventA"], limit=100 - ) - ) - backfill_event_ids = [backfill_point[0] for backfill_point in backfill_points] - self.assertEqual(backfill_event_ids, ["insertion_eventA"]) - def test_get_event_ids_with_failed_pull_attempts(self) -> None: """ Test to make sure we properly get event_ids based on whether they have any From 10c509425fb6e36cba63de94402a9a5772cba54c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 10:28:43 +0100 Subject: [PATCH 087/153] Bump serde_json from 1.0.96 to 1.0.97 (#15797) Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.96 to 1.0.97. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.96...v1.0.97) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9bb8225226f7..51ff26ec1baa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -340,9 +340,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "bdf3bf93142acad5821c99197022e170842cdbc1c30482b98750c688c640842a" dependencies = [ "itoa", "ryu", From d3cd9881c0446526b7f71762e2ca8d464eb60b18 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 10:28:57 +0100 Subject: [PATCH 088/153] Bump ruff from 0.0.265 to 0.0.272 (#15799) Bumps [ruff](https://github.com/charliermarsh/ruff) from 0.0.265 to 0.0.272. - [Release notes](https://github.com/charliermarsh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/BREAKING_CHANGES.md) - [Commits](https://github.com/charliermarsh/ruff/compare/v0.0.265...v0.0.272) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index cf4a89c85ad1..385963301e5a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2242,28 +2242,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.0.265" +version = "0.0.272" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.265-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:30ddfe22de6ce4eb1260408f4480bbbce998f954dbf470228a21a9b2c45955e4"}, - {file = "ruff-0.0.265-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a11bd0889e88d3342e7bc514554bb4461bf6cc30ec115821c2425cfaac0b1b6a"}, - {file = "ruff-0.0.265-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a9b38bdb40a998cbc677db55b6225a6c4fadcf8819eb30695e1b8470942426b"}, - {file = "ruff-0.0.265-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8b44a245b60512403a6a03a5b5212da274d33862225c5eed3bcf12037eb19bb"}, - {file = "ruff-0.0.265-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b279fa55ea175ef953208a6d8bfbcdcffac1c39b38cdb8c2bfafe9222add70bb"}, - {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5028950f7af9b119d43d91b215d5044976e43b96a0d1458d193ef0dd3c587bf8"}, - {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4057eb539a1d88eb84e9f6a36e0a999e0f261ed850ae5d5817e68968e7b89ed9"}, - {file = "ruff-0.0.265-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d586e69ab5cbf521a1910b733412a5735936f6a610d805b89d35b6647e2a66aa"}, - {file = "ruff-0.0.265-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa17b13cd3f29fc57d06bf34c31f21d043735cc9a681203d634549b0e41047d1"}, - {file = "ruff-0.0.265-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9ac13b11d9ad3001de9d637974ec5402a67cefdf9fffc3929ab44c2fcbb850a1"}, - {file = "ruff-0.0.265-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:62a9578b48cfd292c64ea3d28681dc16b1aa7445b7a7709a2884510fc0822118"}, - {file = "ruff-0.0.265-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0f9967f84da42d28e3d9d9354cc1575f96ed69e6e40a7d4b780a7a0418d9409"}, - {file = "ruff-0.0.265-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1d5a8de2fbaf91ea5699451a06f4074e7a312accfa774ad9327cde3e4fda2081"}, - {file = "ruff-0.0.265-py3-none-win32.whl", hash = "sha256:9e9db5ccb810742d621f93272e3cc23b5f277d8d00c4a79668835d26ccbe48dd"}, - {file = "ruff-0.0.265-py3-none-win_amd64.whl", hash = "sha256:f54facf286103006171a00ce20388d88ed1d6732db3b49c11feb9bf3d46f90e9"}, - {file = "ruff-0.0.265-py3-none-win_arm64.whl", hash = "sha256:c78470656e33d32ddc54e8482b1b0fc6de58f1195586731e5ff1405d74421499"}, - {file = "ruff-0.0.265.tar.gz", hash = "sha256:53c17f0dab19ddc22b254b087d1381b601b155acfa8feed514f0d6a413d0ab3a"}, + {file = "ruff-0.0.272-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:ae9b57546e118660175d45d264b87e9b4c19405c75b587b6e4d21e6a17bf4fdf"}, + {file = "ruff-0.0.272-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1609b864a8d7ee75a8c07578bdea0a7db75a144404e75ef3162e0042bfdc100d"}, + {file = "ruff-0.0.272-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee76b4f05fcfff37bd6ac209d1370520d509ea70b5a637bdf0a04d0c99e13dff"}, + {file = "ruff-0.0.272-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48eccf225615e106341a641f826b15224b8a4240b84269ead62f0afd6d7e2d95"}, + {file = "ruff-0.0.272-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:677284430ac539bb23421a2b431b4ebc588097ef3ef918d0e0a8d8ed31fea216"}, + {file = "ruff-0.0.272-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9c4bfb75456a8e1efe14c52fcefb89cfb8f2a0d31ed8d804b82c6cf2dc29c42c"}, + {file = "ruff-0.0.272-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86bc788245361a8148ff98667da938a01e1606b28a45e50ac977b09d3ad2c538"}, + {file = "ruff-0.0.272-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b2ea68d2aa69fff1b20b67636b1e3e22a6a39e476c880da1282c3e4bf6ee5a"}, + {file = "ruff-0.0.272-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd2bbe337a3f84958f796c77820d55ac2db1e6753f39d1d1baed44e07f13f96d"}, + {file = "ruff-0.0.272-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d5a208f8ef0e51d4746930589f54f9f92f84bb69a7d15b1de34ce80a7681bc00"}, + {file = "ruff-0.0.272-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:905ff8f3d6206ad56fcd70674453527b9011c8b0dc73ead27618426feff6908e"}, + {file = "ruff-0.0.272-py3-none-musllinux_1_2_i686.whl", hash = "sha256:19643d448f76b1eb8a764719072e9c885968971bfba872e14e7257e08bc2f2b7"}, + {file = "ruff-0.0.272-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:691d72a00a99707a4e0b2846690961157aef7b17b6b884f6b4420a9f25cd39b5"}, + {file = "ruff-0.0.272-py3-none-win32.whl", hash = "sha256:dc406e5d756d932da95f3af082814d2467943631a587339ee65e5a4f4fbe83eb"}, + {file = "ruff-0.0.272-py3-none-win_amd64.whl", hash = "sha256:a37ec80e238ead2969b746d7d1b6b0d31aa799498e9ba4281ab505b93e1f4b28"}, + {file = "ruff-0.0.272-py3-none-win_arm64.whl", hash = "sha256:06b8ee4eb8711ab119db51028dd9f5384b44728c23586424fd6e241a5b9c4a3b"}, + {file = "ruff-0.0.272.tar.gz", hash = "sha256:273a01dc8c3c4fd4c2af7ea7a67c8d39bb09bce466e640dd170034da75d14cab"}, ] [[package]] @@ -3291,4 +3291,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.7.1" -content-hash = "7ad11e62a675e09444cf33ca2de3216fc4efc5874a2575e54d95d577a52439d3" +content-hash = "090924370b17fd265407b5a3f9cbc00997308f575b455399b39a48e3ca1a5a8e" diff --git a/pyproject.toml b/pyproject.toml index 097bd039433f..7f2f7927c9bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -311,7 +311,7 @@ all = [ # We pin black so that our tests don't start failing on new releases. isort = ">=5.10.1" black = ">=22.3.0" -ruff = "0.0.265" +ruff = "0.0.272" # Typechecking lxml-stubs = ">=0.4.0" From 207cbe519dff0d97ec6e1d2134c9cc82dd944aa2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 10:29:10 +0100 Subject: [PATCH 089/153] Bump phonenumbers from 8.13.13 to 8.13.14 (#15798) Bumps [phonenumbers](https://github.com/daviddrysdale/python-phonenumbers) from 8.13.13 to 8.13.14. - [Commits](https://github.com/daviddrysdale/python-phonenumbers/compare/v8.13.13...v8.13.14) --- updated-dependencies: - dependency-name: phonenumbers dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 385963301e5a..9549e1c6a0d9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1583,13 +1583,13 @@ files = [ [[package]] name = "phonenumbers" -version = "8.13.13" +version = "8.13.14" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.13-py2.py3-none-any.whl", hash = "sha256:55657adb607484aba6d56270b8a1f9b302f35496076e6c02051d06ed366374d9"}, - {file = "phonenumbers-8.13.13.tar.gz", hash = "sha256:4bdf8c989aff0cdb105aef170ad2c21f14b4537bcb32cf349f1f710df992a40a"}, + {file = "phonenumbers-8.13.14-py2.py3-none-any.whl", hash = "sha256:a4b20b6ba7dd402728f5cc8e86e1f29b1a873af45f5381dbee7e3083af497ff6"}, + {file = "phonenumbers-8.13.14.tar.gz", hash = "sha256:5fa952b4abf9fccdaf1f130d96114a520c48890d4091b50a064e22c0fdc12dec"}, ] [[package]] From 5f9d5190aa214c4ef119293a8368e9f589c5ed35 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 10:30:03 +0100 Subject: [PATCH 090/153] Bump attrs from 22.2.0 to 23.1.0 (#15801) Bumps [attrs](https://github.com/python-attrs/attrs) from 22.2.0 to 23.1.0. - [Release notes](https://github.com/python-attrs/attrs/releases) - [Changelog](https://github.com/python-attrs/attrs/blob/main/CHANGELOG.md) - [Commits](https://github.com/python-attrs/attrs/compare/22.2.0...23.1.0) --- updated-dependencies: - dependency-name: attrs dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9549e1c6a0d9..88c8960b0a94 100644 --- a/poetry.lock +++ b/poetry.lock @@ -32,21 +32,24 @@ wrapt = [ [[package]] name = "attrs" -version = "22.2.0" +version = "23.1.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + [package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "authlib" From 4ba528d9c31e1af75f3532ac2a52c1b12365f592 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 10:30:17 +0100 Subject: [PATCH 091/153] Bump ijson from 3.2.0.post0 to 3.2.1 (#15802) Bumps [ijson](https://github.com/ICRAR/ijson) from 3.2.0.post0 to 3.2.1. - [Changelog](https://github.com/ICRAR/ijson/blob/master/CHANGELOG.md) - [Commits](https://github.com/ICRAR/ijson/compare/v3.2.0.post0...v3.2.1) --- updated-dependencies: - dependency-name: ijson dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 158 ++++++++++++++++++++++++++-------------------------- 1 file changed, 79 insertions(+), 79 deletions(-) diff --git a/poetry.lock b/poetry.lock index 88c8960b0a94..88f1e9548aca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -728,89 +728,89 @@ files = [ [[package]] name = "ijson" -version = "3.2.0.post0" +version = "3.2.1" description = "Iterative JSON parser with standard Python iterator interfaces" optional = false python-versions = "*" files = [ - {file = "ijson-3.2.0.post0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5809752045ef74c26adf159ed03df7fb7e7a8d656992fd7562663ed47d6d39d9"}, - {file = "ijson-3.2.0.post0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce4be2beece2629bd24bcab147741d1532bd5ed40fb52f2b4fcde5c5bf606df0"}, - {file = "ijson-3.2.0.post0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5d365df54d18076f1d5f2ffb1eef2ac7f0d067789838f13d393b5586fbb77b02"}, - {file = "ijson-3.2.0.post0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c93ae4d49d8cf8accfedc8a8e7815851f56ceb6e399b0c186754a68fed22844"}, - {file = "ijson-3.2.0.post0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47a56e3628c227081a2aa58569cbf2af378bad8af648aa904080e87cd6644cfb"}, - {file = "ijson-3.2.0.post0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8af68fe579f6f0b9a8b3f033d10caacfed6a4b89b8c7a1d9478a8f5d8aba4a1"}, - {file = "ijson-3.2.0.post0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6eed1ddd3147de49226db4f213851cf7860493a7b6c7bd5e62516941c007094c"}, - {file = "ijson-3.2.0.post0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9ecbf85a6d73fc72f6534c38f7d92ed15d212e29e0dbe9810a465d61c8a66d23"}, - {file = "ijson-3.2.0.post0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd218b338ac68213c997d4c88437c0e726f16d301616bf837e1468901934042c"}, - {file = "ijson-3.2.0.post0-cp310-cp310-win32.whl", hash = "sha256:4e7c4fdc7d24747c8cc7d528c145afda4de23210bf4054bd98cd63bf07e4882d"}, - {file = "ijson-3.2.0.post0-cp310-cp310-win_amd64.whl", hash = "sha256:4d4e143908f47307042c9678803d27706e0e2099d0a6c1988c6cae1da07760bf"}, - {file = "ijson-3.2.0.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:56500dac8f52989ef7c0075257a8b471cbea8ef77f1044822742b3cbf2246e8b"}, - {file = "ijson-3.2.0.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:535665a77408b6bea56eb828806fae125846dff2e2e0ed4cb2e0a8e36244d753"}, - {file = "ijson-3.2.0.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4465c90b25ca7903410fabe4145e7b45493295cc3b84ec1216653fbe9021276"}, - {file = "ijson-3.2.0.post0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efee1e9b4f691e1086730f3010e31c55625bc2e0f7db292a38a2cdf2774c2e13"}, - {file = "ijson-3.2.0.post0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd55f7a46429de95383fc0d0158c1bfb798e976d59d52830337343c2d9bda5c"}, - {file = "ijson-3.2.0.post0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25919b444426f58dcc62f763d1c6be6297f309da85ecab55f51da6ca86fc9fdf"}, - {file = "ijson-3.2.0.post0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c85892d68895ba7a0b16a0e6b7d9f9a0e30e86f2b1e0f6986243473ba8735432"}, - {file = "ijson-3.2.0.post0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:27409ba44cfd006901971063d37699f72e092b5efaa1586288b5067d80c6b5bd"}, - {file = "ijson-3.2.0.post0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:11dfd64633fe1382c4237477ac3836f682ca17e25e0d0799e84737795b0611df"}, - {file = "ijson-3.2.0.post0-cp311-cp311-win32.whl", hash = "sha256:41e955e173f77f54337fecaaa58a35c464b75e232b1f939b282497134a4d4f0e"}, - {file = "ijson-3.2.0.post0-cp311-cp311-win_amd64.whl", hash = "sha256:b3bdd2e12d9b9a18713dd6f3c5ef3734fdab25b79b177054ba9e35ecc746cb6e"}, - {file = "ijson-3.2.0.post0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:26b57838e712b8852c40ec6d74c6de8bb226446440e1af1354c077a6f81b9142"}, - {file = "ijson-3.2.0.post0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6464242f7895268d3086d7829ef031b05c77870dad1e13e51ef79d0a9cfe029"}, - {file = "ijson-3.2.0.post0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3c6cf18b61b94db9590f86af0dd60edbccb36e151643152b8688066f677fbc9"}, - {file = "ijson-3.2.0.post0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:992e9e68003df32e2aa0f31eb82c0a94f21286203ab2f2b2c666410e17b59d2f"}, - {file = "ijson-3.2.0.post0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d3e255ef05b434f20fc9d4b18ea15733d1038bec3e4960d772b06216fa79e82d"}, - {file = "ijson-3.2.0.post0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:424232c2bf3e8181f1b572db92c179c2376b57eba9fc8931453fba975f48cb80"}, - {file = "ijson-3.2.0.post0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bced6cd5b09d4d002dda9f37292dd58d26eb1c4d0d179b820d3708d776300bb4"}, - {file = "ijson-3.2.0.post0-cp36-cp36m-win32.whl", hash = "sha256:a8c84dff2d60ae06d5280ec87cd63050bbd74a90c02bfc7c390c803cfc8ac8fc"}, - {file = "ijson-3.2.0.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:a340413a9bf307fafd99254a4dd4ac6c567b91a205bf896dde18888315fd7fcd"}, - {file = "ijson-3.2.0.post0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b3456cd5b16ec9db3ef23dd27f37bf5a14f765e8272e9af3e3de9ee9a4cba867"}, - {file = "ijson-3.2.0.post0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eb838b4e4360e65c00aa13c78b35afc2477759d423b602b60335af5bed3de5b"}, - {file = "ijson-3.2.0.post0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe7f414edd69dd9199b0dfffa0ada22f23d8009e10fe2a719e0993b7dcc2e6e2"}, - {file = "ijson-3.2.0.post0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:183841b8d033ca95457f61fb0719185dc7f51a616070bdf1dcaf03473bed05b2"}, - {file = "ijson-3.2.0.post0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1302dc6490da7d44c3a76a5f0b87d8bec9f918454c6d6e6bf4ed922e47da58bb"}, - {file = "ijson-3.2.0.post0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3b21b1ecd20ed2f918f6f99cdfa68284a416c0f015ffa64b68fa933df1b24d40"}, - {file = "ijson-3.2.0.post0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e97e6e07851cefe7baa41f1ebf5c0899d2d00d94bfef59825752e4c784bebbe8"}, - {file = "ijson-3.2.0.post0-cp37-cp37m-win32.whl", hash = "sha256:cd0450e76b9c629b7f86e7d5b91b7cc9c281dd719630160a992b19a856f7bdbd"}, - {file = "ijson-3.2.0.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:bed8dcb7dbfdb98e647ad47676045e0891f610d38095dcfdae468e1e1efb2766"}, - {file = "ijson-3.2.0.post0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a7698bc480df76073067017f73ba4139dbaae20f7a6c9a0c7855b9c5e9a62124"}, - {file = "ijson-3.2.0.post0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2f204f6d4cedeb28326c230a0b046968b5263c234c65a5b18cee22865800fff7"}, - {file = "ijson-3.2.0.post0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9829a17f6f78d7f4d0aeff28c126926a1e5f86828ebb60d6a0acfa0d08457f9f"}, - {file = "ijson-3.2.0.post0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f470f3d750e00df86e03254fdcb422d2f726f4fb3a0d8eeee35e81343985e58a"}, - {file = "ijson-3.2.0.post0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb167ee21d9c413d6b0ab65ec12f3e7ea0122879da8b3569fa1063526f9f03a8"}, - {file = "ijson-3.2.0.post0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84eed88177f6c243c52b280cb094f751de600d98d2221e0dec331920894889ec"}, - {file = "ijson-3.2.0.post0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53f1a13eb99ab514c562869513172135d4b55a914b344e6518ba09ad3ef1e503"}, - {file = "ijson-3.2.0.post0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f6785ba0f65eb64b1ce3b7fcfec101085faf98f4e77b234f14287fd4138ffb25"}, - {file = "ijson-3.2.0.post0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:79b94662c2e9d366ab362c2c5858097eae0da100dea0dfd340db09ab28c8d5e8"}, - {file = "ijson-3.2.0.post0-cp38-cp38-win32.whl", hash = "sha256:5242cb2313ba3ece307b426efa56424ac13cc291c36f292b501d412a98ad0703"}, - {file = "ijson-3.2.0.post0-cp38-cp38-win_amd64.whl", hash = "sha256:775444a3b647350158d0b3c6c39c88b4a0995643a076cb104bf25042c9aedcf8"}, - {file = "ijson-3.2.0.post0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1d64ffaab1d006a4fa9584a4c723e95cc9609bf6c3365478e250cd0bffaaadf3"}, - {file = "ijson-3.2.0.post0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:434e57e7ec5c334ccb0e67bb4d9e60c264dcb2a3843713dbeb12cb19fe42a668"}, - {file = "ijson-3.2.0.post0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:158494bfe89ccb32618d0e53b471364080ceb975462ec464d9f9f37d9832b653"}, - {file = "ijson-3.2.0.post0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f20072376e338af0e51ccecb02335b4e242d55a9218a640f545be7fc64cca99"}, - {file = "ijson-3.2.0.post0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3e8d46c1004afcf2bf513a8fb575ee2ec3d8009a2668566b5926a2dcf7f1a45"}, - {file = "ijson-3.2.0.post0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:986a0347fe19e5117a5241276b72add570839e5bcdc7a6dac4b538c5928eeff5"}, - {file = "ijson-3.2.0.post0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:535a59d61b9aef6fc2a3d01564c1151e38e5a44b92cd6583cb4e8ccf0f58043f"}, - {file = "ijson-3.2.0.post0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:830de03f391f7e72b8587bb178c22d534da31153e9ee4234d54ef82cde5ace5e"}, - {file = "ijson-3.2.0.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6def9ac8d73b76cb02e9e9837763f27f71e5e67ec0afae5f1f4cf8f61c39b1ac"}, - {file = "ijson-3.2.0.post0-cp39-cp39-win32.whl", hash = "sha256:11bb84a53c37e227e733c6dffad2037391cf0b3474bff78596dc4373b02008a0"}, - {file = "ijson-3.2.0.post0-cp39-cp39-win_amd64.whl", hash = "sha256:f349bee14d0a4a72ba41e1b1cce52af324ebf704f5066c09e3dd04cfa6f545f0"}, - {file = "ijson-3.2.0.post0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5418066666b25b05f2b8ae2698408daa0afa68f07b0b217f2ab24465b7e9cbd9"}, - {file = "ijson-3.2.0.post0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ccc4d4b947549f9c431651c02b95ef571412c78f88ded198612a41d5c5701a0"}, - {file = "ijson-3.2.0.post0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcec67fc15e5978ad286e8cc2a3f9347076e28e0e01673b5ace18c73da64e3ff"}, - {file = "ijson-3.2.0.post0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee9537e8a8aa15dd2d0912737aeb6265e781e74f7f7cad8165048fcb5f39230"}, - {file = "ijson-3.2.0.post0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:03dfd4c8ed19e704d04b0ad4f34f598dc569fd3f73089f80eed698e7f6069233"}, - {file = "ijson-3.2.0.post0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2d50b2ad9c6c51ca160aa60de7f4dacd1357c38d0e503f51aed95c1c1945ff53"}, - {file = "ijson-3.2.0.post0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51c1db80d7791fb761ad9a6c70f521acd2c4b0e5afa2fe0d813beb2140d16c37"}, - {file = "ijson-3.2.0.post0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13f2939db983327dd0492f6c1c0e77be3f2cbf9b620c92c7547d1d2cd6ef0486"}, - {file = "ijson-3.2.0.post0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f9d449f86f8971c24609e319811f7f3b6b734f0218c4a0e799debe19300d15b"}, - {file = "ijson-3.2.0.post0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7e0d1713a9074a7677eb8e43f424b731589d1c689d4676e2f57a5ce59d089e89"}, - {file = "ijson-3.2.0.post0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c8646eb81eec559d7d8b1e51a5087299d06ecab3bc7da54c01f7df94350df135"}, - {file = "ijson-3.2.0.post0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fe3a53e00c59de33b825ba8d6d39f544a7d7180983cd3d6bd2c3794ae35442"}, - {file = "ijson-3.2.0.post0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93aaec00cbde65c192f15c21f3ee44d2ab0c11eb1a35020b5c4c2676f7fe01d0"}, - {file = "ijson-3.2.0.post0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00594ed3ef2218fee8c652d9e7f862fb39f8251b67c6379ef12f7e044bf6bbf3"}, - {file = "ijson-3.2.0.post0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1a75cfb34217b41136b714985be645f12269e4345da35d7b48aabd317c82fd10"}, - {file = "ijson-3.2.0.post0.tar.gz", hash = "sha256:80a5bd7e9923cab200701f67ad2372104328b99ddf249dbbe8834102c852d316"}, + {file = "ijson-3.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6f827f6961f093e1055a2be0c3137f0e7d667979da455ac9648f72d4a2bb8970"}, + {file = "ijson-3.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b6e51f4497065cd0d09f5e906cd538a8d22609eab716e3c883769acf147ab1b6"}, + {file = "ijson-3.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f022686c40bff3e340627a5a0c9212718d529e787ada3b76ba546d47a9ecdbbd"}, + {file = "ijson-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4105c15a13fa1dc24ebd3bf2e679fa14dcbfcc48bc39138a0fa3f4ddf6cc09b"}, + {file = "ijson-3.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:404423e666f185dfb753ddc92705c84dffdc4cc872aaf825bbe0607893cb5b02"}, + {file = "ijson-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39e71f32830827cf21d0233a814092e5a23668e18f52eca5cac4f670d9df1240"}, + {file = "ijson-3.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43af7ed5292caa1452747e2b62485b6c0ece4bcbc5bf6f2758abd547e4124a14"}, + {file = "ijson-3.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e805aa6897a11b0f73f1f6bca078981df8960aeeccf527a214f240409c742bab"}, + {file = "ijson-3.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5b2df0bd84889e9017a670328fe3e82ec509fd6744c7ac2c99c7ee2300d76afa"}, + {file = "ijson-3.2.1-cp310-cp310-win32.whl", hash = "sha256:675259c7ea7f51ffaf8cb9e79bf875e28bb09622892943f4f415588fd7ab7bec"}, + {file = "ijson-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:90d4b2eb771a3585c8186820fe50e3282ef62477b865e765a50a8295674abeac"}, + {file = "ijson-3.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fc581a61e210bf6013c1fa6536566e51127be1cfbd69539b63d8b813206d2fe0"}, + {file = "ijson-3.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75cdf7ad4c00a8f5ac94ff27e3b7c1bf5ac463f125bca2be1744c5bc9600db5c"}, + {file = "ijson-3.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85a2bf4636ace4d92e7c5d857a1c5694f42407c868953cf2927f18127bcd0d58"}, + {file = "ijson-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fe0cb66e7dd4aa11da5fff60bdf5ee04819a5e6a57acf7ca12c65f7fc009afc"}, + {file = "ijson-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6f7957ad38cb714378944032f2c2ee9c6531b5b0b38c5ccd08cedbb0ceddd02"}, + {file = "ijson-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13283d264cca8a63e5bad91e82eec39711e95893e7e8d4a419799a8c5f85203a"}, + {file = "ijson-3.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:12c24cde850fe79bc806be0e9fc38b47dd5ac0a223070ccb12e9b695425e2936"}, + {file = "ijson-3.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2ce8eed838e5a0791cb5948117b5453f2b3b3c28d93d06ee2bbf2c198c47881c"}, + {file = "ijson-3.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b81c2589f191b0dc741f532be00b4bea617297dd9698431c8053e2d28272d4db"}, + {file = "ijson-3.2.1-cp311-cp311-win32.whl", hash = "sha256:ba2beac56ac96f728d0f2430e4c667c66819a423d321bb9db9ebdebd803e1b5b"}, + {file = "ijson-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:c71614ed4bbc6a32ff1e42d7ce92a176fb67d658913343792d2c4567aa130817"}, + {file = "ijson-3.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:683fc8b0ea085e75ea34044fdc70649b37367d494f132a2bd1e59d7135054d89"}, + {file = "ijson-3.2.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deeaecec2f4e20e8bec20b0a5cdc34daebe7903f2e700f7dcaef68b5925d35ea"}, + {file = "ijson-3.2.1-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11923ac3188877f19dbb7051f7345202701cc39bf8e5ac44f8ae536c9eca8c82"}, + {file = "ijson-3.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:400deefcdae21e90fc39c1dcfc6ba2df24537e8c65bd57b763ed5256b73ba64d"}, + {file = "ijson-3.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:56bc4bad53770710a3a91944fe640fdeb269987a14352b74ebbad2aa55801c00"}, + {file = "ijson-3.2.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f5a179523e085126844c6161aabcd193dbb5747bd01fadb68e92abf048f32ec9"}, + {file = "ijson-3.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ee24655986e4415fbb7a0cf51445fff3072ceac0e219f4bbbd5c53535a3c5680"}, + {file = "ijson-3.2.1-cp36-cp36m-win32.whl", hash = "sha256:4a5c672b0540005c1bb0bba97aa559a87a2e4ee409fc68e2f5ba5b30f009ac99"}, + {file = "ijson-3.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cfaf1d89b0e122e69c87a15db6d6f44feb9db96d2af7fe88cdc464177a257b5d"}, + {file = "ijson-3.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1cbd052eb67c1b3611f25974ba967886e89391faaf55afec93808c19f06ca612"}, + {file = "ijson-3.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13ffc491886e5d7bde7d68712d168bce0141b2a918db1164bc8599c0123e293"}, + {file = "ijson-3.2.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4c4fc6bafc777f8422fe36edb1cbd72a13cb29695893a064c9c95776a4bdf9"}, + {file = "ijson-3.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42fcb2bf9748c26f004690b2feb6e13e4875bb7c9d83535f887c21e0a982a7c"}, + {file = "ijson-3.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0c92f7bc2f3a947c2ba7f7aa48382c36079f8259c930e81d9164341f9b853c45"}, + {file = "ijson-3.2.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fd497042db562379339660e787bc8679ed3abaa740768d39bc3746e769e7c7a5"}, + {file = "ijson-3.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7d61c7cd8ddd75dcef818ff5a111a31b902a6a0e410ee0c2b2ecaa6dac92658a"}, + {file = "ijson-3.2.1-cp37-cp37m-win32.whl", hash = "sha256:36caf624d263fc40e7e805d759d09ea368d8cf497aecb3241ac2f0a286ad8eca"}, + {file = "ijson-3.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:32f9ed25ff80942e433119600bca13b86a8f9b8b0966edbc1d91a48ccbdd4d54"}, + {file = "ijson-3.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e89bbd747140eac3a3c9e7e5835b90d85c4a02763fc5134861bfc1ea03b66ae7"}, + {file = "ijson-3.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d69b4b1d509de36ec42a0e4af30ede39fb754e4039b2928ef7282ebc2125ffdd"}, + {file = "ijson-3.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e7feb0771f50deabe6ce85b210fa9e005843d3d3c60fb3315d69e1f9d0d75e0c"}, + {file = "ijson-3.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fd8148a363888054ff06eaaa1103f2f98720ab39666084a214e4fedfc13cf64"}, + {file = "ijson-3.2.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:598638dcc5141e9ae269903901877103f5362e0db4443e34721df8f8d34577b4"}, + {file = "ijson-3.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e979190b7d0fabca20d6b7224ab1c1aa461ad1ab72ba94f1bb1e5894cd59f342"}, + {file = "ijson-3.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bc810eb80b4f486c7957201ba2a53f53ddc9b3233af67e4359e29371bf04883b"}, + {file = "ijson-3.2.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:26e758584611dfe826dd18ffd94dc0d8a062ce56e41674ad3bfa371c7b78c4b5"}, + {file = "ijson-3.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:24e9ae5b35b85ea094b6c36495bc856089254aed6a48bada8d7eec5a04f74439"}, + {file = "ijson-3.2.1-cp38-cp38-win32.whl", hash = "sha256:4b5dc7b5b4b8cb3087d188f37911cd67e26672d33d3571e73440de3f0a86f7e6"}, + {file = "ijson-3.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:1af94ff40609270bbb3eac47e072582bb578f5023fac8408cccd80fe5892d221"}, + {file = "ijson-3.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2dda67affceebc52c8bc5fe72c3a4a1e338e4d4b0497dbac5089c2d3862df214"}, + {file = "ijson-3.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd780303ddfedc8d57cdb9f2d53a8cea2f2f4a6fb857bf8fe5a0c3ab1d4ca901"}, + {file = "ijson-3.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4fbab6af1bab88a8e46beda08cf44610eed0adb8d157a1a60b4bb6c3a121c6de"}, + {file = "ijson-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97a07988a1e0ce2bc8e8a62eb5f25195a3bd58a939ac353cbc6018a548cc08d"}, + {file = "ijson-3.2.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a65671a6826ae723837143914c823ad7bcc0d1a3e38d87c71df897a2556fb48f"}, + {file = "ijson-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1806372008bbed9ee92db5747e38c047fa1c4ee89cb2dd5daaa57feb46ce50a"}, + {file = "ijson-3.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:91e5a8e96f78a59e2520078c227a4fec5bf91c13adeded9e33fb13981cb823c3"}, + {file = "ijson-3.2.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1f820fce8ef093718f2319ff6f1322390664659b783775919dadccb1b470153d"}, + {file = "ijson-3.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bca3e8c91a1076a20620dbaa6a2848772b0e8a4055e86d42d3fa39221b53ed1a"}, + {file = "ijson-3.2.1-cp39-cp39-win32.whl", hash = "sha256:de87f137b7438d43840f4339a37d4e6a58c987f4bb2a70609969f854f8ae20f3"}, + {file = "ijson-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:0caebb8350b47266a58b766ec08e1de441d6d160702c428b5cf7504d93c832c4"}, + {file = "ijson-3.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37389785c1abd27fcc24800fcfa9a6b1022743413e4056507fd32356b623ff33"}, + {file = "ijson-3.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b364b82231d51cbeae52468c3b27e8a042e544ab764c8f3975e912cf010603f"}, + {file = "ijson-3.2.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a5999d0ec28a8ec47cf20c736fd4f895dc077bf6441bf237b00b074315a295d"}, + {file = "ijson-3.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd481857a39305517fb6f1313d558c2dc4e78c9e9384cc5bc1c3e28f1afbedf"}, + {file = "ijson-3.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:545f62f12f89350d4d73f2a779cb269198ae578fac080085a1927148b803e602"}, + {file = "ijson-3.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4d5622505d01c2f3d7b9638c1eb8c747eb550936b505225893704289ff28576f"}, + {file = "ijson-3.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20293bb36423b129fad3753858ccf7b2ccb5b2c0d3759efe810d0b9d79633a7e"}, + {file = "ijson-3.2.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cd8a4921b852fd2cb5b0c985540c97ff6893139a57fe7121d510ec5d1c0ca44"}, + {file = "ijson-3.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc902ff1ae1efed7d526294d7a9dd3df66d29b2cdc05fb5479838fef1327a534"}, + {file = "ijson-3.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2925a7978d8170146a9cb49a15a982b71fbbf21980bf2e16cd90c528545b7c02"}, + {file = "ijson-3.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c21c6509f6944939399f3630c5dc424d30d71d375f6cd58f9af56158fdf7251c"}, + {file = "ijson-3.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5729fc7648bc972d70922d7dad15459cca3a9e5ed0328eb9ae3ffa004066194"}, + {file = "ijson-3.2.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:805a2d5ed5a15d60327bc9347f2d4125ab621fb18071db98b1c598f1ee99e8f1"}, + {file = "ijson-3.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d0220a4b6c63f44589e429157174e3f4b8d1e534d5fb82bdb43a7f8dd77ae4b"}, + {file = "ijson-3.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:271d9b7c083f65c58ff0afd9dbb5d2f3d445f734632aebfef4a261b0a337abdb"}, + {file = "ijson-3.2.1.tar.gz", hash = "sha256:8574bf19f31fab870488769ad919a80f130825236ac8bde9a733f69c2961d7a7"}, ] [[package]] From 887fa4b66b038c886634a3eef92af108e391be34 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 20 Jun 2023 04:05:31 -0500 Subject: [PATCH 092/153] Switch from `matrix://` to `matrix-federation://` scheme for internal Synapse routing of outbound federation traffic (#15806) `matrix://` is a registered specced scheme nowadays and doesn't make sense for our internal to Synapse use case anymore. ([discussion] (https://github.com/matrix-org/synapse/pull/15773#discussion_r1227598679)) --- changelog.d/15806.misc | 1 + contrib/lnav/synapse-log-format.json | 2 +- scripts-dev/federation_client.py | 4 +- .../federation/matrix_federation_agent.py | 14 ++++--- synapse/http/matrixfederationclient.py | 9 ++++- tests/federation/test_federation_client.py | 4 +- .../test_matrix_federation_agent.py | 38 ++++++++++--------- 7 files changed, 43 insertions(+), 29 deletions(-) create mode 100644 changelog.d/15806.misc diff --git a/changelog.d/15806.misc b/changelog.d/15806.misc new file mode 100644 index 000000000000..80d0eb2f8f46 --- /dev/null +++ b/changelog.d/15806.misc @@ -0,0 +1 @@ +Switch from `matrix://` to `matrix-federation://` scheme for internal Synapse routing of outbound federation traffic. diff --git a/contrib/lnav/synapse-log-format.json b/contrib/lnav/synapse-log-format.json index ad7017ee5ec2..649cd623e8b4 100644 --- a/contrib/lnav/synapse-log-format.json +++ b/contrib/lnav/synapse-log-format.json @@ -29,7 +29,7 @@ "level": "error" }, { - "line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')", + "line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix-federation://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')", "level": "warning" }, { diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py index b1d5e2e61667..63f0b25ddd7f 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py @@ -136,11 +136,11 @@ def request( authorization_headers.append(header) print("Authorization: %s" % header, file=sys.stderr) - dest = "matrix://%s%s" % (destination, path) + dest = "matrix-federation://%s%s" % (destination, path) print("Requesting %s" % dest, file=sys.stderr) s = requests.Session() - s.mount("matrix://", MatrixConnectionAdapter()) + s.mount("matrix-federation://", MatrixConnectionAdapter()) headers: Dict[str, str] = { "Authorization": authorization_headers[0], diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 7e8cf316822d..91a24efcd019 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -51,8 +51,10 @@ @implementer(IAgent) class MatrixFederationAgent: """An Agent-like thing which provides a `request` method which correctly - handles resolving matrix server names when using matrix://. Handles standard - https URIs as normal. + handles resolving matrix server names when using `matrix-federation://`. Handles + standard https URIs as normal. The `matrix-federation://` scheme is internal to + Synapse and we purposely want to avoid colliding with the `matrix://` URL scheme + which is now specced. Doesn't implement any retries. (Those are done in MatrixFederationHttpClient.) @@ -167,14 +169,14 @@ def request( # There must be a valid hostname. assert parsed_uri.hostname - # If this is a matrix:// URI check if the server has delegated matrix + # If this is a matrix-federation:// URI check if the server has delegated matrix # traffic using well-known delegation. # # We have to do this here and not in the endpoint as we need to rewrite # the host header with the delegated server name. delegated_server = None if ( - parsed_uri.scheme == b"matrix" + parsed_uri.scheme == b"matrix-federation" and not _is_ip_literal(parsed_uri.hostname) and not parsed_uri.port ): @@ -250,7 +252,7 @@ def endpointForURI(self, parsed_uri: URI) -> "MatrixHostnameEndpoint": @implementer(IStreamClientEndpoint) class MatrixHostnameEndpoint: - """An endpoint that resolves matrix:// URLs using Matrix server name + """An endpoint that resolves matrix-federation:// URLs using Matrix server name resolution (i.e. via SRV). Does not check for well-known delegation. Args: @@ -379,7 +381,7 @@ async def _resolve_server(self) -> List[Server]: connect to. """ - if self._parsed_uri.scheme != b"matrix": + if self._parsed_uri.scheme != b"matrix-federation": return [Server(host=self._parsed_uri.host, port=self._parsed_uri.port)] # Note: We don't do well-known lookup as that needs to have happened diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index abb5ae581521..fc0101808d0f 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -174,7 +174,14 @@ def __attrs_post_init__(self) -> None: # The object is frozen so we can pre-compute this. uri = urllib.parse.urlunparse( - (b"matrix", destination_bytes, path_bytes, None, query_bytes, b"") + ( + b"matrix-federation", + destination_bytes, + path_bytes, + None, + query_bytes, + b"", + ) ) object.__setattr__(self, "uri", uri) diff --git a/tests/federation/test_federation_client.py b/tests/federation/test_federation_client.py index 91694e4fcada..a45ab8368347 100644 --- a/tests/federation/test_federation_client.py +++ b/tests/federation/test_federation_client.py @@ -124,7 +124,7 @@ def test_get_room_state(self) -> None: # check the right call got made to the agent self._mock_agent.request.assert_called_once_with( b"GET", - b"matrix://yet.another.server/_matrix/federation/v1/state/%21room_id?event_id=event_id", + b"matrix-federation://yet.another.server/_matrix/federation/v1/state/%21room_id?event_id=event_id", headers=mock.ANY, bodyProducer=None, ) @@ -232,7 +232,7 @@ def _get_pdu_once(self) -> EventBase: # check the right call got made to the agent self._mock_agent.request.assert_called_once_with( b"GET", - b"matrix://yet.another.server/_matrix/federation/v1/event/event_id", + b"matrix-federation://yet.another.server/_matrix/federation/v1/event/event_id", headers=mock.ANY, bodyProducer=None, ) diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index 105b4caefa3d..aed2a4c07a46 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -292,7 +292,7 @@ def _do_get(self) -> None: self.agent = self._make_agent() self.reactor.lookups["testserv"] = "1.2.3.4" - test_d = self._make_get_request(b"matrix://testserv:8448/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv:8448/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -393,7 +393,7 @@ def _do_get_via_proxy( self.reactor.lookups["testserv"] = "1.2.3.4" self.reactor.lookups["proxy.com"] = "9.9.9.9" - test_d = self._make_get_request(b"matrix://testserv:8448/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv:8448/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -532,7 +532,7 @@ def test_get_ip_address(self) -> None: # there will be a getaddrinfo on the IP self.reactor.lookups["1.2.3.4"] = "1.2.3.4" - test_d = self._make_get_request(b"matrix://1.2.3.4/foo/bar") + test_d = self._make_get_request(b"matrix-federation://1.2.3.4/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -568,7 +568,7 @@ def test_get_ipv6_address(self) -> None: # there will be a getaddrinfo on the IP self.reactor.lookups["::1"] = "::1" - test_d = self._make_get_request(b"matrix://[::1]/foo/bar") + test_d = self._make_get_request(b"matrix-federation://[::1]/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -604,7 +604,7 @@ def test_get_ipv6_address_with_port(self) -> None: # there will be a getaddrinfo on the IP self.reactor.lookups["::1"] = "::1" - test_d = self._make_get_request(b"matrix://[::1]:80/foo/bar") + test_d = self._make_get_request(b"matrix-federation://[::1]:80/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -639,7 +639,7 @@ def test_get_hostname_bad_cert(self) -> None: self.mock_resolver.resolve_service.side_effect = generate_resolve_service([]) self.reactor.lookups["testserv1"] = "1.2.3.4" - test_d = self._make_get_request(b"matrix://testserv1/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv1/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -693,7 +693,7 @@ def test_get_ip_address_bad_cert(self) -> None: # there will be a getaddrinfo on the IP self.reactor.lookups["1.2.3.5"] = "1.2.3.5" - test_d = self._make_get_request(b"matrix://1.2.3.5/foo/bar") + test_d = self._make_get_request(b"matrix-federation://1.2.3.5/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -725,7 +725,7 @@ def test_get_no_srv_no_well_known(self) -> None: self.mock_resolver.resolve_service.side_effect = generate_resolve_service([]) self.reactor.lookups["testserv"] = "1.2.3.4" - test_d = self._make_get_request(b"matrix://testserv/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -780,7 +780,7 @@ def test_get_well_known(self) -> None: self.reactor.lookups["testserv"] = "1.2.3.4" self.reactor.lookups["target-server"] = "1::f" - test_d = self._make_get_request(b"matrix://testserv/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -844,7 +844,7 @@ def test_get_well_known_redirect(self) -> None: self.reactor.lookups["testserv"] = "1.2.3.4" self.reactor.lookups["target-server"] = "1::f" - test_d = self._make_get_request(b"matrix://testserv/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -933,7 +933,7 @@ def test_get_invalid_well_known(self) -> None: self.mock_resolver.resolve_service.side_effect = generate_resolve_service([]) self.reactor.lookups["testserv"] = "1.2.3.4" - test_d = self._make_get_request(b"matrix://testserv/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -1009,7 +1009,7 @@ def test_get_well_known_unsigned_cert(self) -> None: ), ) - test_d = agent.request(b"GET", b"matrix://testserv/foo/bar") + test_d = agent.request(b"GET", b"matrix-federation://testserv/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -1042,7 +1042,7 @@ def test_get_hostname_srv(self) -> None: ) self.reactor.lookups["srvtarget"] = "1.2.3.4" - test_d = self._make_get_request(b"matrix://testserv/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -1082,7 +1082,7 @@ def test_get_well_known_srv(self) -> None: self.reactor.lookups["testserv"] = "1.2.3.4" self.reactor.lookups["srvtarget"] = "5.6.7.8" - test_d = self._make_get_request(b"matrix://testserv/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv/foo/bar") # Nothing happened yet self.assertNoResult(test_d) @@ -1143,7 +1143,9 @@ def test_idna_servername(self) -> None: self.reactor.lookups["xn--bcher-kva.com"] = "1.2.3.4" # this is idna for bücher.com - test_d = self._make_get_request(b"matrix://xn--bcher-kva.com/foo/bar") + test_d = self._make_get_request( + b"matrix-federation://xn--bcher-kva.com/foo/bar" + ) # Nothing happened yet self.assertNoResult(test_d) @@ -1204,7 +1206,9 @@ def test_idna_srv_target(self) -> None: ) self.reactor.lookups["xn--trget-3qa.com"] = "1.2.3.4" - test_d = self._make_get_request(b"matrix://xn--bcher-kva.com/foo/bar") + test_d = self._make_get_request( + b"matrix-federation://xn--bcher-kva.com/foo/bar" + ) # Nothing happened yet self.assertNoResult(test_d) @@ -1411,7 +1415,7 @@ def test_srv_fallbacks(self) -> None: ) self.reactor.lookups["target.com"] = "1.2.3.4" - test_d = self._make_get_request(b"matrix://testserv/foo/bar") + test_d = self._make_get_request(b"matrix-federation://testserv/foo/bar") # Nothing happened yet self.assertNoResult(test_d) From 2301a09d7af2a7b1c85cceb9cc60eee895784c20 Mon Sep 17 00:00:00 2001 From: ew-at-vier <136321107+ew-at-vier@users.noreply.github.com> Date: Tue, 20 Jun 2023 12:45:26 +0200 Subject: [PATCH 093/153] Fix admin api documentation typo (#15805) * Fix admin api documentation typo Signed-off-by: Eric Wolf --- changelog.d/15805.doc | 1 + docs/admin_api/rooms.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15805.doc diff --git a/changelog.d/15805.doc b/changelog.d/15805.doc new file mode 100644 index 000000000000..446f627cfc13 --- /dev/null +++ b/changelog.d/15805.doc @@ -0,0 +1 @@ +Fix a typo in the [Admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html). diff --git a/docs/admin_api/rooms.md b/docs/admin_api/rooms.md index 66b29e82dcaa..90b06045a820 100644 --- a/docs/admin_api/rooms.md +++ b/docs/admin_api/rooms.md @@ -419,7 +419,7 @@ The following query parameters are available: * `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint. -* `to` - The token to spot returning events at. +* `to` - The token to stop returning events at. * `limit` - The maximum number of events to return. Defaults to `10`. * `filter` - A JSON RoomEventFilter to filter returned events with. * `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting From 6a5cf1a759526d567b58e33ad18d6db9e3ebde25 Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 20 Jun 2023 07:55:46 -0700 Subject: [PATCH 094/153] Fix Sytest environmental variable evaluation in CI (#15804) --- .github/workflows/tests.yml | 4 ++-- changelog.d/15804.bugfix | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15804.bugfix diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index a0d1c24e9075..6c2298499777 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -399,8 +399,8 @@ jobs: env: SYTEST_BRANCH: ${{ github.head_ref }} POSTGRES: ${{ matrix.job.postgres && 1}} - MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}} - ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') && 1 }} + MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }} + ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }} WORKERS: ${{ matrix.job.workers && 1 }} BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }} TOP: ${{ github.workspace }} diff --git a/changelog.d/15804.bugfix b/changelog.d/15804.bugfix new file mode 100644 index 000000000000..7c8b954397e4 --- /dev/null +++ b/changelog.d/15804.bugfix @@ -0,0 +1 @@ +Fix Sytest environmental variable evaluation in CI. From 7d3da399dd905d2a05da5a1941e996cbf2380e99 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Tue, 20 Jun 2023 17:22:50 +0200 Subject: [PATCH 095/153] 1.86.0 --- CHANGES.md | 6 ++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index f2f39c3b6eb3..ff4126044bd9 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,9 @@ +Synapse 1.86.0 (2023-06-20) +=========================== + +No significant changes since 1.86.0rc2. + + Synapse 1.86.0rc2 (2023-06-14) ============================== diff --git a/debian/changelog b/debian/changelog index 81b71ba342a1..9d057c65ef46 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.86.0) stable; urgency=medium + + * New Synapse release 1.86.0. + + -- Synapse Packaging team Tue, 20 Jun 2023 17:22:46 +0200 + matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium * New Synapse release 1.86.0rc2. diff --git a/pyproject.toml b/pyproject.toml index 097bd039433f..3626be9797e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.86.0rc2" +version = "1.86.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 496f73103df838795b0e98f8c1c7337468e41abc Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Wed, 21 Jun 2023 10:41:11 +0200 Subject: [PATCH 096/153] Allow for the configuration of max request retries and min/max retry delays in the matrix federation client (#15783) --- changelog.d/15783.misc | 1 + .../configuration/config_documentation.md | 26 ++++++++ synapse/config/federation.py | 16 +++++ synapse/http/matrixfederationclient.py | 59 ++++++++++++------- tests/http/test_matrixfederationclient.py | 20 ++++++- 5 files changed, 100 insertions(+), 22 deletions(-) create mode 100644 changelog.d/15783.misc diff --git a/changelog.d/15783.misc b/changelog.d/15783.misc new file mode 100644 index 000000000000..0bebaa213d9e --- /dev/null +++ b/changelog.d/15783.misc @@ -0,0 +1 @@ +Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 0cf6e075ff11..26d7c7900cbe 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1196,6 +1196,32 @@ Example configuration: allow_device_name_lookup_over_federation: true ``` --- +### `federation` + +The federation section defines some sub-options related to federation. + +The following options are related to configuring timeout and retry logic for one request, +independently of the others. +Short retry algorithm is used when something or someone will wait for the request to have an +answer, while long retry is used for requests that happen in the background, +like sending a federation transaction. + +* `client_timeout`: timeout for the federation requests. Default to 60s. +* `max_short_retry_delay`: maximum delay to be used for the short retry algo. Default to 2s. +* `max_long_retry_delay`: maximum delay to be used for the short retry algo. Default to 60s. +* `max_short_retries`: maximum number of retries for the short retry algo. Default to 3 attempts. +* `max_long_retries`: maximum number of retries for the long retry algo. Default to 10 attempts. + +Example configuration: +```yaml +federation: + client_timeout: 180s + max_short_retry_delay: 7s + max_long_retry_delay: 100s + max_short_retries: 5 + max_long_retries: 20 +``` +--- ## Caching Options related to caching. diff --git a/synapse/config/federation.py b/synapse/config/federation.py index 336fca578aa1..0e1cb8b6e30c 100644 --- a/synapse/config/federation.py +++ b/synapse/config/federation.py @@ -22,6 +22,8 @@ class FederationConfig(Config): section = "federation" def read_config(self, config: JsonDict, **kwargs: Any) -> None: + federation_config = config.setdefault("federation", {}) + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist: Optional[dict] = None federation_domain_whitelist = config.get("federation_domain_whitelist", None) @@ -49,5 +51,19 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "allow_device_name_lookup_over_federation", False ) + # Allow for the configuration of timeout, max request retries + # and min/max retry delays in the matrix federation client. + self.client_timeout_ms = Config.parse_duration( + federation_config.get("client_timeout", "60s") + ) + self.max_long_retry_delay_ms = Config.parse_duration( + federation_config.get("max_long_retry_delay", "60s") + ) + self.max_short_retry_delay_ms = Config.parse_duration( + federation_config.get("max_short_retry_delay", "2s") + ) + self.max_long_retries = federation_config.get("max_long_retries", 10) + self.max_short_retries = federation_config.get("max_short_retries", 3) + _METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}} diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index fc0101808d0f..cc4e258b0fba 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -95,8 +95,6 @@ ) -MAX_LONG_RETRIES = 10 -MAX_SHORT_RETRIES = 3 MAXINT = sys.maxsize @@ -413,7 +411,16 @@ def __init__( self.clock = hs.get_clock() self._store = hs.get_datastores().main self.version_string_bytes = hs.version_string.encode("ascii") - self.default_timeout = 60 + self.default_timeout_seconds = hs.config.federation.client_timeout_ms / 1000 + + self.max_long_retry_delay_seconds = ( + hs.config.federation.max_long_retry_delay_ms / 1000 + ) + self.max_short_retry_delay_seconds = ( + hs.config.federation.max_short_retry_delay_ms / 1000 + ) + self.max_long_retries = hs.config.federation.max_long_retries + self.max_short_retries = hs.config.federation.max_short_retries self._cooperator = Cooperator(scheduler=_make_scheduler(self.reactor)) @@ -542,10 +549,10 @@ async def _send_request( logger.exception(f"Invalid destination: {request.destination}.") raise FederationDeniedError(request.destination) - if timeout: + if timeout is not None: _sec_timeout = timeout / 1000 else: - _sec_timeout = self.default_timeout + _sec_timeout = self.default_timeout_seconds if ( self.hs.config.federation.federation_domain_whitelist is not None @@ -590,9 +597,9 @@ async def _send_request( # XXX: Would be much nicer to retry only at the transaction-layer # (once we have reliable transactions in place) if long_retries: - retries_left = MAX_LONG_RETRIES + retries_left = self.max_long_retries else: - retries_left = MAX_SHORT_RETRIES + retries_left = self.max_short_retries url_bytes = request.uri url_str = url_bytes.decode("ascii") @@ -737,24 +744,34 @@ async def _send_request( if retries_left and not timeout: if long_retries: - delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left) - delay = min(delay, 60) - delay *= random.uniform(0.8, 1.4) + delay_seconds = 4 ** ( + self.max_long_retries + 1 - retries_left + ) + delay_seconds = min( + delay_seconds, self.max_long_retry_delay_seconds + ) + delay_seconds *= random.uniform(0.8, 1.4) else: - delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left) - delay = min(delay, 2) - delay *= random.uniform(0.8, 1.4) + delay_seconds = 0.5 * 2 ** ( + self.max_short_retries - retries_left + ) + delay_seconds = min( + delay_seconds, self.max_short_retry_delay_seconds + ) + delay_seconds *= random.uniform(0.8, 1.4) logger.debug( "{%s} [%s] Waiting %ss before re-sending...", request.txn_id, request.destination, - delay, + delay_seconds, ) # Sleep for the calculated delay, or wake up immediately # if we get notified that the server is back up. - await self._sleeper.sleep(request.destination, delay * 1000) + await self._sleeper.sleep( + request.destination, delay_seconds * 1000 + ) retries_left -= 1 else: raise @@ -953,7 +970,7 @@ async def put_json( if timeout is not None: _sec_timeout = timeout / 1000 else: - _sec_timeout = self.default_timeout + _sec_timeout = self.default_timeout_seconds if parser is None: parser = cast(ByteParser[T], JsonParser()) @@ -1031,10 +1048,10 @@ async def post_json( ignore_backoff=ignore_backoff, ) - if timeout: + if timeout is not None: _sec_timeout = timeout / 1000 else: - _sec_timeout = self.default_timeout + _sec_timeout = self.default_timeout_seconds body = await _handle_response( self.reactor, _sec_timeout, request, response, start_ms, parser=JsonParser() @@ -1142,7 +1159,7 @@ async def get_json( if timeout is not None: _sec_timeout = timeout / 1000 else: - _sec_timeout = self.default_timeout + _sec_timeout = self.default_timeout_seconds if parser is None: parser = cast(ByteParser[T], JsonParser()) @@ -1218,7 +1235,7 @@ async def delete_json( if timeout is not None: _sec_timeout = timeout / 1000 else: - _sec_timeout = self.default_timeout + _sec_timeout = self.default_timeout_seconds body = await _handle_response( self.reactor, _sec_timeout, request, response, start_ms, parser=JsonParser() @@ -1270,7 +1287,7 @@ async def get_file( try: d = read_body_with_max_size(response, output_stream, max_size) - d.addTimeout(self.default_timeout, self.reactor) + d.addTimeout(self.default_timeout_seconds, self.reactor) length = await make_deferred_yieldable(d) except BodyExceededMaxSize: msg = "Requested file is too large > %r bytes" % (max_size,) diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index 0dfc03ce50f4..b5f4a60fe500 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -40,7 +40,7 @@ from synapse.util import Clock from tests.server import FakeTransport -from tests.unittest import HomeserverTestCase +from tests.unittest import HomeserverTestCase, override_config def check_logcontext(context: LoggingContextOrSentinel) -> None: @@ -640,3 +640,21 @@ def test_build_auth_headers_rejects_falsey_destinations(self) -> None: self.cl.build_auth_headers( b"", b"GET", b"https://example.com", destination_is=b"" ) + + @override_config( + { + "federation": { + "client_timeout": "180s", + "max_long_retry_delay": "100s", + "max_short_retry_delay": "7s", + "max_long_retries": 20, + "max_short_retries": 5, + } + } + ) + def test_configurable_retry_and_delay_values(self) -> None: + self.assertEqual(self.cl.default_timeout_seconds, 180) + self.assertEqual(self.cl.max_long_retry_delay_seconds, 100) + self.assertEqual(self.cl.max_short_retry_delay_seconds, 7) + self.assertEqual(self.cl.max_long_retries, 20) + self.assertEqual(self.cl.max_short_retries, 5) From 6c749c51244df582cce8579f99a0856f79145c2e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 21 Jun 2023 11:34:32 +0100 Subject: [PATCH 097/153] Fix typo in faster join docs (#15812) Fixes #15756 --- changelog.d/15812.doc | 1 + docs/development/synapse_architecture/faster_joins.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15812.doc diff --git a/changelog.d/15812.doc b/changelog.d/15812.doc new file mode 100644 index 000000000000..de7b64e4a25c --- /dev/null +++ b/changelog.d/15812.doc @@ -0,0 +1 @@ +Fix typo in MSC number in faster remote room join architecture doc. diff --git a/docs/development/synapse_architecture/faster_joins.md b/docs/development/synapse_architecture/faster_joins.md index c32d713b8af7..1e6d585b001a 100644 --- a/docs/development/synapse_architecture/faster_joins.md +++ b/docs/development/synapse_architecture/faster_joins.md @@ -6,7 +6,7 @@ This is a work-in-progress set of notes with two goals: See also [MSC3902](https://github.com/matrix-org/matrix-spec-proposals/pull/3902). -The key idea is described by [MSC706](https://github.com/matrix-org/matrix-spec-proposals/pull/3902). This allows servers to +The key idea is described by [MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706). This allows servers to request a lightweight response to the federation `/send_join` endpoint. This is called a **faster join**, also known as a **partial join**. In these notes we'll usually use the word "partial" as it matches the database schema. From 289ce3b8d946c4c4964fac2ff020a0535dead4f0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 21 Jun 2023 14:20:46 +0100 Subject: [PATCH 098/153] Fix harmless exception in port DB script (#15814) The port DB script would try and run database background tasks, which could fail if the data they acted on was in the process of being ported. These exceptions were non fatal. Fixes #15789 --- changelog.d/15814.misc | 1 + synapse/_scripts/synapse_port_db.py | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 changelog.d/15814.misc diff --git a/changelog.d/15814.misc b/changelog.d/15814.misc new file mode 100644 index 000000000000..8e1107212f17 --- /dev/null +++ b/changelog.d/15814.misc @@ -0,0 +1 @@ +Fix harmless exceptions being printed when running the port DB script. diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 27fee3d9a934..a803ada8ad06 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -1369,6 +1369,9 @@ def main() -> None: sys.stderr.write("Database must use the 'psycopg2' connector.\n") sys.exit(3) + # Don't run the background tasks that get started by the data stores. + hs_config["run_background_tasks_on"] = "some_other_process" + config = HomeServerConfig() config.parse_config_dict(hs_config, "", "") From e0c39d6bb526b01368393ae5d2173c8e6d39b60f Mon Sep 17 00:00:00 2001 From: Nicolas Werner <89468146+nico-famedly@users.noreply.github.com> Date: Wed, 21 Jun 2023 15:56:31 +0200 Subject: [PATCH 099/153] Fix forgotten rooms missing in initial sync (#15815) If you leave a room and forget it, then rejoin it, the room would be missing from the next initial sync. fixes #13262 Signed-off-by: Nicolas Werner --- changelog.d/15815.bugfix | 1 + synapse/storage/databases/main/cache.py | 13 +++++++++++++ tests/handlers/test_room_member.py | 21 +++++++++++++++++++++ 3 files changed, 35 insertions(+) create mode 100644 changelog.d/15815.bugfix diff --git a/changelog.d/15815.bugfix b/changelog.d/15815.bugfix new file mode 100644 index 000000000000..e20b5acac1ae --- /dev/null +++ b/changelog.d/15815.bugfix @@ -0,0 +1 @@ +Fix forgotten rooms missing from initial sync after rejoining them. Contributed by Nico from Famedly. diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 6e1c7d681fe5..c940f864d173 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -289,6 +289,17 @@ def _invalidate_caches_for_event( ) self._attempt_to_invalidate_cache("get_rooms_for_user", (state_key,)) + self._attempt_to_invalidate_cache( + "did_forget", + ( + state_key, + room_id, + ), + ) + self._attempt_to_invalidate_cache( + "get_forgotten_rooms_for_user", (state_key,) + ) + if relates_to: self._attempt_to_invalidate_cache("get_relations_for_event", (relates_to,)) self._attempt_to_invalidate_cache("get_references_for_event", (relates_to,)) @@ -336,6 +347,8 @@ def _invalidate_caches_for_room_events(self, room_id: str) -> None: "get_rooms_for_user_with_stream_ordering", None ) self._attempt_to_invalidate_cache("get_rooms_for_user", None) + self._attempt_to_invalidate_cache("did_forget", None) + self._attempt_to_invalidate_cache("get_forgotten_rooms_for_user", None) self._attempt_to_invalidate_cache("get_references_for_event", None) self._attempt_to_invalidate_cache("get_thread_summary", None) self._attempt_to_invalidate_cache("get_thread_participated", None) diff --git a/tests/handlers/test_room_member.py b/tests/handlers/test_room_member.py index a444d822cd4a..41199ffa297f 100644 --- a/tests/handlers/test_room_member.py +++ b/tests/handlers/test_room_member.py @@ -333,6 +333,27 @@ def test_leave_and_forget(self) -> None: self.get_success(self.store.is_locally_forgotten_room(self.room_id)) ) + def test_leave_and_unforget(self) -> None: + """Tests if rejoining a room unforgets the room, so that it shows up in sync again.""" + self.helper.join(self.room_id, user=self.bob, tok=self.bob_token) + + # alice is not the last room member that leaves and forgets the room + self.helper.leave(self.room_id, user=self.alice, tok=self.alice_token) + self.get_success(self.handler.forget(self.alice_ID, self.room_id)) + self.assertTrue( + self.get_success(self.store.did_forget(self.alice, self.room_id)) + ) + + self.helper.join(self.room_id, user=self.alice, tok=self.alice_token) + self.assertFalse( + self.get_success(self.store.did_forget(self.alice, self.room_id)) + ) + + # the server has not forgotten the room + self.assertFalse( + self.get_success(self.store.is_locally_forgotten_room(self.room_id)) + ) + @override_config({"forget_rooms_on_leave": True}) def test_leave_and_auto_forget(self) -> None: """Tests the `forget_rooms_on_leave` config option.""" From d53547352063c3e66302f7bdc40d7ac7270e605c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Jun 2023 16:32:53 +0100 Subject: [PATCH 100/153] Bump cryptography from 40.0.2 to 41.0.1 (#15800) Bumps [cryptography](https://github.com/pyca/cryptography) from 40.0.2 to 41.0.1. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/40.0.2...41.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 50 +++++++++++++++++++++++++------------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/poetry.lock b/poetry.lock index 88f1e9548aca..d7b7a5aa2c22 100644 --- a/poetry.lock +++ b/poetry.lock @@ -465,30 +465,30 @@ files = [ [[package]] name = "cryptography" -version = "40.0.2" +version = "41.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, - {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, - {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, - {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, + {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, + {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, + {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, ] [package.dependencies] @@ -497,12 +497,12 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff"] -sdist = ["setuptools-rust (>=0.11.4)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -tox = ["tox"] [[package]] name = "defusedxml" From 6e65ca0b3619454bf7a9304f6deef8f08f45944e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 09:09:57 +0100 Subject: [PATCH 101/153] Bump types-setuptools from 67.8.0.0 to 68.0.0.0 (#15835) Bumps [types-setuptools](https://github.com/python/typeshed) from 67.8.0.0 to 68.0.0.0. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-setuptools dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d7b7a5aa2c22..133ffad6c385 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3003,13 +3003,13 @@ types-urllib3 = "*" [[package]] name = "types-setuptools" -version = "67.8.0.0" +version = "68.0.0.0" description = "Typing stubs for setuptools" optional = false python-versions = "*" files = [ - {file = "types-setuptools-67.8.0.0.tar.gz", hash = "sha256:95c9ed61871d6c0e258433373a4e1753c0a7c3627a46f4d4058c7b5a08ab844f"}, - {file = "types_setuptools-67.8.0.0-py3-none-any.whl", hash = "sha256:6df73340d96b238a4188b7b7668814b37e8018168aef1eef94a3b1872e3f60ff"}, + {file = "types-setuptools-68.0.0.0.tar.gz", hash = "sha256:fc958b4123b155ffc069a66d3af5fe6c1f9d0600c35c0c8444b2ab4147112641"}, + {file = "types_setuptools-68.0.0.0-py3-none-any.whl", hash = "sha256:cc00e09ba8f535362cbe1ea8b8407d15d14b59c57f4190cceaf61a9e57616446"}, ] [[package]] From 53ea381ec3571b281079654f5b284034a1827919 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 09:14:20 +0100 Subject: [PATCH 102/153] Bump ruff from 0.0.272 to 0.0.275 (#15833) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.0.272 to 0.0.275. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/BREAKING_CHANGES.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.0.272...v0.0.275) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 133ffad6c385..7dbea889eff9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2245,28 +2245,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.0.272" +version = "0.0.275" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.272-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:ae9b57546e118660175d45d264b87e9b4c19405c75b587b6e4d21e6a17bf4fdf"}, - {file = "ruff-0.0.272-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1609b864a8d7ee75a8c07578bdea0a7db75a144404e75ef3162e0042bfdc100d"}, - {file = "ruff-0.0.272-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee76b4f05fcfff37bd6ac209d1370520d509ea70b5a637bdf0a04d0c99e13dff"}, - {file = "ruff-0.0.272-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48eccf225615e106341a641f826b15224b8a4240b84269ead62f0afd6d7e2d95"}, - {file = "ruff-0.0.272-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:677284430ac539bb23421a2b431b4ebc588097ef3ef918d0e0a8d8ed31fea216"}, - {file = "ruff-0.0.272-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9c4bfb75456a8e1efe14c52fcefb89cfb8f2a0d31ed8d804b82c6cf2dc29c42c"}, - {file = "ruff-0.0.272-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86bc788245361a8148ff98667da938a01e1606b28a45e50ac977b09d3ad2c538"}, - {file = "ruff-0.0.272-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b2ea68d2aa69fff1b20b67636b1e3e22a6a39e476c880da1282c3e4bf6ee5a"}, - {file = "ruff-0.0.272-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd2bbe337a3f84958f796c77820d55ac2db1e6753f39d1d1baed44e07f13f96d"}, - {file = "ruff-0.0.272-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d5a208f8ef0e51d4746930589f54f9f92f84bb69a7d15b1de34ce80a7681bc00"}, - {file = "ruff-0.0.272-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:905ff8f3d6206ad56fcd70674453527b9011c8b0dc73ead27618426feff6908e"}, - {file = "ruff-0.0.272-py3-none-musllinux_1_2_i686.whl", hash = "sha256:19643d448f76b1eb8a764719072e9c885968971bfba872e14e7257e08bc2f2b7"}, - {file = "ruff-0.0.272-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:691d72a00a99707a4e0b2846690961157aef7b17b6b884f6b4420a9f25cd39b5"}, - {file = "ruff-0.0.272-py3-none-win32.whl", hash = "sha256:dc406e5d756d932da95f3af082814d2467943631a587339ee65e5a4f4fbe83eb"}, - {file = "ruff-0.0.272-py3-none-win_amd64.whl", hash = "sha256:a37ec80e238ead2969b746d7d1b6b0d31aa799498e9ba4281ab505b93e1f4b28"}, - {file = "ruff-0.0.272-py3-none-win_arm64.whl", hash = "sha256:06b8ee4eb8711ab119db51028dd9f5384b44728c23586424fd6e241a5b9c4a3b"}, - {file = "ruff-0.0.272.tar.gz", hash = "sha256:273a01dc8c3c4fd4c2af7ea7a67c8d39bb09bce466e640dd170034da75d14cab"}, + {file = "ruff-0.0.275-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5e6554a072e7ce81eb6f0bec1cebd3dcb0e358652c0f4900d7d630d61691e914"}, + {file = "ruff-0.0.275-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1cc599022fe5ffb143a965b8d659eb64161ab8ab4433d208777eab018a1aab67"}, + {file = "ruff-0.0.275-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5206fc1cd8c1c1deadd2e6360c0dbcd690f1c845da588ca9d32e4a764a402c60"}, + {file = "ruff-0.0.275-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c4e6468da26f77b90cae35319d310999f471a8c352998e9b39937a23750149e"}, + {file = "ruff-0.0.275-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0dbdea02942131dbc15dd45f431d152224f15e1dd1859fcd0c0487b658f60f1a"}, + {file = "ruff-0.0.275-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:22efd9f41af27ef8fb9779462c46c35c89134d33e326c889971e10b2eaf50c63"}, + {file = "ruff-0.0.275-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c09662112cfa22d7467a19252a546291fd0eae4f423e52b75a7a2000a1894db"}, + {file = "ruff-0.0.275-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80043726662144876a381efaab88841c88e8df8baa69559f96b22d4fa216bef1"}, + {file = "ruff-0.0.275-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5859ee543b01b7eb67835dfd505faa8bb7cc1550f0295c92c1401b45b42be399"}, + {file = "ruff-0.0.275-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c8ace4d40a57b5ea3c16555f25a6b16bc5d8b2779ae1912ce2633543d4e9b1da"}, + {file = "ruff-0.0.275-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8347fc16aa185aae275906c4ac5b770e00c896b6a0acd5ba521f158801911998"}, + {file = "ruff-0.0.275-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ec43658c64bfda44fd84bbea9da8c7a3b34f65448192d1c4dd63e9f4e7abfdd4"}, + {file = "ruff-0.0.275-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:508b13f7ca37274cceaba4fb3ea5da6ca192356323d92acf39462337c33ad14e"}, + {file = "ruff-0.0.275-py3-none-win32.whl", hash = "sha256:6afb1c4422f24f361e877937e2a44b3f8176774a476f5e33845ebfe887dd5ec2"}, + {file = "ruff-0.0.275-py3-none-win_amd64.whl", hash = "sha256:d9b264d78621bf7b698b6755d4913ab52c19bd28bee1a16001f954d64c1a1220"}, + {file = "ruff-0.0.275-py3-none-win_arm64.whl", hash = "sha256:a19ce3bea71023eee5f0f089dde4a4272d088d5ac0b675867e074983238ccc65"}, + {file = "ruff-0.0.275.tar.gz", hash = "sha256:a63a0b645da699ae5c758fce19188e901b3033ec54d862d93fcd042addf7f38d"}, ] [[package]] @@ -3294,4 +3294,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.7.1" -content-hash = "090924370b17fd265407b5a3f9cbc00997308f575b455399b39a48e3ca1a5a8e" +content-hash = "7f31754a1009d7b6c9a1bd7221a0b243ffd510f362c28f0da417aaac16757a87" diff --git a/pyproject.toml b/pyproject.toml index 90812de019e8..a44ecd65e4a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -311,7 +311,7 @@ all = [ # We pin black so that our tests don't start failing on new releases. isort = ">=5.10.1" black = ">=22.3.0" -ruff = "0.0.272" +ruff = "0.0.275" # Typechecking lxml-stubs = ">=0.4.0" From 52d8131e87f90114018cac6daf01b1dd64eac28b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 09:26:01 +0100 Subject: [PATCH 103/153] Bump types-opentracing from 2.4.10.4 to 2.4.10.5 (#15830) Bumps [types-opentracing](https://github.com/python/typeshed) from 2.4.10.4 to 2.4.10.5. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-opentracing dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7dbea889eff9..b1b6087d09fd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2931,13 +2931,13 @@ files = [ [[package]] name = "types-opentracing" -version = "2.4.10.4" +version = "2.4.10.5" description = "Typing stubs for opentracing" optional = false python-versions = "*" files = [ - {file = "types-opentracing-2.4.10.4.tar.gz", hash = "sha256:347040c9da4ada7d3c795659912c95d98c5651e242e8eaa0344815fee5bb97e2"}, - {file = "types_opentracing-2.4.10.4-py3-none-any.whl", hash = "sha256:73c9b958eea3df6c4906ebf3865608a562dd9981c1bbc75a373a583c613bed56"}, + {file = "types-opentracing-2.4.10.5.tar.gz", hash = "sha256:852d13ab1324832835d50c00cfd58b9267f0e79ec3189e5664c2a90c26880fd4"}, + {file = "types_opentracing-2.4.10.5-py3-none-any.whl", hash = "sha256:8f12ab4dce3e298a8e6655da9a6d52171e7a275357eae4cec22a1663d94023a7"}, ] [[package]] From 25c55a9d2267b09da3e18090ee538782d6771a27 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 26 Jun 2023 15:12:20 +0100 Subject: [PATCH 104/153] Add login spam checker API (#15838) --- changelog.d/15838.feature | 1 + docs/modules/spam_checker_callbacks.md | 36 ++++++ synapse/http/site.py | 11 ++ synapse/module_api/__init__.py | 3 + .../callbacks/spamchecker_callbacks.py | 80 +++++++++++++ synapse/rest/client/login.py | 52 ++++++++- tests/rest/client/test_login.py | 108 +++++++++++++++++- 7 files changed, 285 insertions(+), 6 deletions(-) create mode 100644 changelog.d/15838.feature diff --git a/changelog.d/15838.feature b/changelog.d/15838.feature new file mode 100644 index 000000000000..04c77bd72396 --- /dev/null +++ b/changelog.d/15838.feature @@ -0,0 +1 @@ +Add spam checker module API for logins. diff --git a/docs/modules/spam_checker_callbacks.md b/docs/modules/spam_checker_callbacks.md index 1a0c6ec954ca..ffdfe6082e1b 100644 --- a/docs/modules/spam_checker_callbacks.md +++ b/docs/modules/spam_checker_callbacks.md @@ -348,6 +348,42 @@ callback returns `False`, Synapse falls through to the next one. The value of th callback that does not return `False` will be used. If this happens, Synapse will not call any of the subsequent implementations of this callback. + +### `check_login_for_spam` + +_First introduced in Synapse v1.87.0_ + +```python +async def check_login_for_spam( + user_id: str, + device_id: Optional[str], + initial_display_name: Optional[str], + request_info: Collection[Tuple[Optional[str], str]], + auth_provider_id: Optional[str] = None, +) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"] +``` + +Called when a user logs in. + +The arguments passed to this callback are: + +* `user_id`: The user ID the user is logging in with +* `device_id`: The device ID the user is re-logging into. +* `initial_display_name`: The device display name, if any. +* `request_info`: A collection of tuples, which first item is a user agent, and which + second item is an IP address. These user agents and IP addresses are the ones that were + used during the login process. +* `auth_provider_id`: The identifier of the SSO authentication provider, if any. + +If multiple modules implement this callback, they will be considered in order. If a +callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one. +The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will +be used. If this happens, Synapse will not call any of the subsequent implementations of +this callback. + +*Note:* This will not be called when a user registers. + + ## Example The example below is a module that implements the spam checker callback diff --git a/synapse/http/site.py b/synapse/http/site.py index c530966ef336..5b5a7c1e598f 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -521,6 +521,11 @@ def get_client_ip_if_available(self) -> str: else: return self.getClientAddress().host + def request_info(self) -> "RequestInfo": + h = self.getHeader(b"User-Agent") + user_agent = h.decode("ascii", "replace") if h else None + return RequestInfo(user_agent=user_agent, ip=self.get_client_ip_if_available()) + class XForwardedForRequest(SynapseRequest): """Request object which honours proxy headers @@ -661,3 +666,9 @@ def request_factory(channel: HTTPChannel, queued: bool) -> Request: def log(self, request: SynapseRequest) -> None: pass + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class RequestInfo: + user_agent: Optional[str] + ip: str diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 84b2aef62086..95f780011153 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -80,6 +80,7 @@ ) from synapse.module_api.callbacks.spamchecker_callbacks import ( CHECK_EVENT_FOR_SPAM_CALLBACK, + CHECK_LOGIN_FOR_SPAM_CALLBACK, CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK, CHECK_REGISTRATION_FOR_SPAM_CALLBACK, CHECK_USERNAME_FOR_SPAM_CALLBACK, @@ -302,6 +303,7 @@ def register_spam_checker_callbacks( CHECK_REGISTRATION_FOR_SPAM_CALLBACK ] = None, check_media_file_for_spam: Optional[CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK] = None, + check_login_for_spam: Optional[CHECK_LOGIN_FOR_SPAM_CALLBACK] = None, ) -> None: """Registers callbacks for spam checking capabilities. @@ -319,6 +321,7 @@ def register_spam_checker_callbacks( check_username_for_spam=check_username_for_spam, check_registration_for_spam=check_registration_for_spam, check_media_file_for_spam=check_media_file_for_spam, + check_login_for_spam=check_login_for_spam, ) def register_account_validity_callbacks( diff --git a/synapse/module_api/callbacks/spamchecker_callbacks.py b/synapse/module_api/callbacks/spamchecker_callbacks.py index 4456d1b81eb2..7cee442145b9 100644 --- a/synapse/module_api/callbacks/spamchecker_callbacks.py +++ b/synapse/module_api/callbacks/spamchecker_callbacks.py @@ -196,6 +196,26 @@ ] ], ] +CHECK_LOGIN_FOR_SPAM_CALLBACK = Callable[ + [ + str, + Optional[str], + Optional[str], + Collection[Tuple[Optional[str], str]], + Optional[str], + ], + Awaitable[ + Union[ + Literal["NOT_SPAM"], + Codes, + # Highly experimental, not officially part of the spamchecker API, may + # disappear without warning depending on the results of ongoing + # experiments. + # Use this to return additional information as part of an error. + Tuple[Codes, JsonDict], + ] + ], +] def load_legacy_spam_checkers(hs: "synapse.server.HomeServer") -> None: @@ -315,6 +335,7 @@ def __init__(self, hs: "synapse.server.HomeServer") -> None: self._check_media_file_for_spam_callbacks: List[ CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK ] = [] + self._check_login_for_spam_callbacks: List[CHECK_LOGIN_FOR_SPAM_CALLBACK] = [] def register_callbacks( self, @@ -335,6 +356,7 @@ def register_callbacks( CHECK_REGISTRATION_FOR_SPAM_CALLBACK ] = None, check_media_file_for_spam: Optional[CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK] = None, + check_login_for_spam: Optional[CHECK_LOGIN_FOR_SPAM_CALLBACK] = None, ) -> None: """Register callbacks from module for each hook.""" if check_event_for_spam is not None: @@ -378,6 +400,9 @@ def register_callbacks( if check_media_file_for_spam is not None: self._check_media_file_for_spam_callbacks.append(check_media_file_for_spam) + if check_login_for_spam is not None: + self._check_login_for_spam_callbacks.append(check_login_for_spam) + @trace async def check_event_for_spam( self, event: "synapse.events.EventBase" @@ -819,3 +844,58 @@ async def check_media_file_for_spam( return synapse.api.errors.Codes.FORBIDDEN, {} return self.NOT_SPAM + + async def check_login_for_spam( + self, + user_id: str, + device_id: Optional[str], + initial_display_name: Optional[str], + request_info: Collection[Tuple[Optional[str], str]], + auth_provider_id: Optional[str] = None, + ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + """Checks if we should allow the given registration request. + + Args: + user_id: The request user ID + request_info: List of tuples of user agent and IP that + were used during the registration process. + auth_provider_id: The SSO IdP the user used, e.g "oidc", "saml", + "cas". If any. Note this does not include users registered + via a password provider. + + Returns: + Enum for how the request should be handled + """ + + for callback in self._check_login_for_spam_callbacks: + with Measure( + self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) + ): + res = await delay_cancellation( + callback( + user_id, + device_id, + initial_display_name, + request_info, + auth_provider_id, + ) + ) + # Normalize return values to `Codes` or `"NOT_SPAM"`. + if res is self.NOT_SPAM: + continue + elif isinstance(res, synapse.api.errors.Codes): + return res, {} + elif ( + isinstance(res, tuple) + and len(res) == 2 + and isinstance(res[0], synapse.api.errors.Codes) + and isinstance(res[1], dict) + ): + return res + else: + logger.warning( + "Module returned invalid value, rejecting login as spam" + ) + return synapse.api.errors.Codes.FORBIDDEN, {} + + return self.NOT_SPAM diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 6493b00bb803..d724c6892067 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -50,7 +50,7 @@ parse_json_object_from_request, parse_string, ) -from synapse.http.site import SynapseRequest +from synapse.http.site import RequestInfo, SynapseRequest from synapse.rest.client._base import client_patterns from synapse.rest.well_known import WellKnownBuilder from synapse.types import JsonDict, UserID @@ -114,6 +114,7 @@ def __init__(self, hs: "HomeServer"): self.auth_handler = self.hs.get_auth_handler() self.registration_handler = hs.get_registration_handler() self._sso_handler = hs.get_sso_handler() + self._spam_checker = hs.get_module_api_callbacks().spam_checker self._well_known_builder = WellKnownBuilder(hs) self._address_ratelimiter = Ratelimiter( @@ -197,6 +198,8 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, LoginResponse]: self._refresh_tokens_enabled and client_requested_refresh_token ) + request_info = request.request_info() + try: if login_submission["type"] == LoginRestServlet.APPSERVICE_TYPE: requester = await self.auth.get_user_by_req(request) @@ -216,6 +219,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, LoginResponse]: login_submission, appservice, should_issue_refresh_token=should_issue_refresh_token, + request_info=request_info, ) elif ( self.jwt_enabled @@ -227,6 +231,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, LoginResponse]: result = await self._do_jwt_login( login_submission, should_issue_refresh_token=should_issue_refresh_token, + request_info=request_info, ) elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE: await self._address_ratelimiter.ratelimit( @@ -235,6 +240,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, LoginResponse]: result = await self._do_token_login( login_submission, should_issue_refresh_token=should_issue_refresh_token, + request_info=request_info, ) else: await self._address_ratelimiter.ratelimit( @@ -243,6 +249,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, LoginResponse]: result = await self._do_other_login( login_submission, should_issue_refresh_token=should_issue_refresh_token, + request_info=request_info, ) except KeyError: raise SynapseError(400, "Missing JSON keys.") @@ -265,6 +272,8 @@ async def _do_appservice_login( login_submission: JsonDict, appservice: ApplicationService, should_issue_refresh_token: bool = False, + *, + request_info: RequestInfo, ) -> LoginResponse: identifier = login_submission.get("identifier") logger.info("Got appservice login request with identifier: %r", identifier) @@ -300,10 +309,15 @@ async def _do_appservice_login( # The user represented by an appservice's configured sender_localpart # is not actually created in Synapse. should_check_deactivated=qualified_user_id != appservice.sender, + request_info=request_info, ) async def _do_other_login( - self, login_submission: JsonDict, should_issue_refresh_token: bool = False + self, + login_submission: JsonDict, + should_issue_refresh_token: bool = False, + *, + request_info: RequestInfo, ) -> LoginResponse: """Handle non-token/saml/jwt logins @@ -333,6 +347,7 @@ async def _do_other_login( login_submission, callback, should_issue_refresh_token=should_issue_refresh_token, + request_info=request_info, ) return result @@ -347,6 +362,8 @@ async def _complete_login( should_issue_refresh_token: bool = False, auth_provider_session_id: Optional[str] = None, should_check_deactivated: bool = True, + *, + request_info: RequestInfo, ) -> LoginResponse: """Called when we've successfully authed the user and now need to actually login them in (e.g. create devices). This gets called on @@ -371,6 +388,7 @@ async def _complete_login( This exists purely for appservice's configured sender_localpart which doesn't have an associated user in the database. + request_info: The user agent/IP address of the user. Returns: Dictionary of account information after successful login. @@ -417,6 +435,22 @@ async def _complete_login( ) initial_display_name = login_submission.get("initial_device_display_name") + spam_check = await self._spam_checker.check_login_for_spam( + user_id, + device_id=device_id, + initial_display_name=initial_display_name, + request_info=[(request_info.user_agent, request_info.ip)], + auth_provider_id=auth_provider_id, + ) + if spam_check != self._spam_checker.NOT_SPAM: + logger.info("Blocking login due to spam checker") + raise SynapseError( + 403, + msg="Login was blocked by the server", + errcode=spam_check[0], + additional_fields=spam_check[1], + ) + ( device_id, access_token, @@ -451,7 +485,11 @@ async def _complete_login( return result async def _do_token_login( - self, login_submission: JsonDict, should_issue_refresh_token: bool = False + self, + login_submission: JsonDict, + should_issue_refresh_token: bool = False, + *, + request_info: RequestInfo, ) -> LoginResponse: """ Handle token login. @@ -474,10 +512,15 @@ async def _do_token_login( auth_provider_id=res.auth_provider_id, should_issue_refresh_token=should_issue_refresh_token, auth_provider_session_id=res.auth_provider_session_id, + request_info=request_info, ) async def _do_jwt_login( - self, login_submission: JsonDict, should_issue_refresh_token: bool = False + self, + login_submission: JsonDict, + should_issue_refresh_token: bool = False, + *, + request_info: RequestInfo, ) -> LoginResponse: """ Handle the custom JWT login. @@ -496,6 +539,7 @@ async def _do_jwt_login( login_submission, create_non_existent_users=True, should_issue_refresh_token=should_issue_refresh_token, + request_info=request_info, ) diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index f3c3bc69a97a..ffbc13bb8df3 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -13,11 +13,12 @@ # limitations under the License. import time import urllib.parse -from typing import Any, Dict, List, Optional +from typing import Any, Collection, Dict, List, Optional, Tuple, Union from unittest.mock import Mock from urllib.parse import urlencode import pymacaroons +from typing_extensions import Literal from twisted.test.proto_helpers import MemoryReactor from twisted.web.resource import Resource @@ -26,11 +27,12 @@ from synapse.api.constants import ApprovalNoticeMedium, LoginType from synapse.api.errors import Codes from synapse.appservice import ApplicationService +from synapse.module_api import ModuleApi from synapse.rest.client import devices, login, logout, register from synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import HomeServer -from synapse.types import create_requester +from synapse.types import JsonDict, create_requester from synapse.util import Clock from tests import unittest @@ -88,6 +90,56 @@ ] +class TestSpamChecker: + def __init__(self, config: None, api: ModuleApi): + api.register_spam_checker_callbacks( + check_login_for_spam=self.check_login_for_spam, + ) + + @staticmethod + def parse_config(config: JsonDict) -> None: + return None + + async def check_login_for_spam( + self, + user_id: str, + device_id: Optional[str], + initial_display_name: Optional[str], + request_info: Collection[Tuple[Optional[str], str]], + auth_provider_id: Optional[str] = None, + ) -> Union[ + Literal["NOT_SPAM"], + Tuple["synapse.module_api.errors.Codes", JsonDict], + ]: + return "NOT_SPAM" + + +class DenyAllSpamChecker: + def __init__(self, config: None, api: ModuleApi): + api.register_spam_checker_callbacks( + check_login_for_spam=self.check_login_for_spam, + ) + + @staticmethod + def parse_config(config: JsonDict) -> None: + return None + + async def check_login_for_spam( + self, + user_id: str, + device_id: Optional[str], + initial_display_name: Optional[str], + request_info: Collection[Tuple[Optional[str], str]], + auth_provider_id: Optional[str] = None, + ) -> Union[ + Literal["NOT_SPAM"], + Tuple["synapse.module_api.errors.Codes", JsonDict], + ]: + # Return an odd set of values to ensure that they get correctly passed + # to the client. + return Codes.LIMIT_EXCEEDED, {"extra": "value"} + + class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, @@ -469,6 +521,58 @@ def test_get_login_flows_with_login_via_existing_enabled(self) -> None: ], ) + @override_config( + { + "modules": [ + { + "module": TestSpamChecker.__module__ + + "." + + TestSpamChecker.__qualname__ + } + ] + } + ) + def test_spam_checker_allow(self) -> None: + """Check that that adding a spam checker doesn't break login.""" + self.register_user("kermit", "monkey") + + body = {"type": "m.login.password", "user": "kermit", "password": "monkey"} + + channel = self.make_request( + "POST", + "/_matrix/client/r0/login", + body, + ) + self.assertEqual(channel.code, 200, channel.result) + + @override_config( + { + "modules": [ + { + "module": DenyAllSpamChecker.__module__ + + "." + + DenyAllSpamChecker.__qualname__ + } + ] + } + ) + def test_spam_checker_deny(self) -> None: + """Check that login""" + + self.register_user("kermit", "monkey") + + body = {"type": "m.login.password", "user": "kermit", "password": "monkey"} + + channel = self.make_request( + "POST", + "/_matrix/client/r0/login", + body, + ) + self.assertEqual(channel.code, 403, channel.result) + self.assertDictContainsSubset( + {"errcode": Codes.LIMIT_EXCEEDED, "extra": "value"}, channel.json_body + ) + @skip_unless(has_saml2 and HAS_OIDC, "Requires SAML2 and OIDC") class MultiSSOTestCase(unittest.HomeserverTestCase): From 70dc44f6673781a3e40cce9f82b30feabc174367 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 15:36:07 +0100 Subject: [PATCH 105/153] Bump towncrier from 22.12.0 to 23.6.0 (#15831) Bumps [towncrier](https://github.com/twisted/towncrier) from 22.12.0 to 23.6.0. - [Release notes](https://github.com/twisted/towncrier/releases) - [Changelog](https://github.com/twisted/towncrier/blob/trunk/NEWS.rst) - [Commits](https://github.com/twisted/towncrier/compare/22.12.0...23.6.0) --- updated-dependencies: - dependency-name: towncrier dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index b1b6087d09fd..ee19c246f375 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2711,21 +2711,21 @@ files = [ [[package]] name = "towncrier" -version = "22.12.0" +version = "23.6.0" description = "Building newsfiles for your project." optional = false python-versions = ">=3.7" files = [ - {file = "towncrier-22.12.0-py3-none-any.whl", hash = "sha256:9767a899a4d6856950f3598acd9e8f08da2663c49fdcda5ea0f9e6ba2afc8eea"}, - {file = "towncrier-22.12.0.tar.gz", hash = "sha256:9c49d7e75f646a9aea02ae904c0bc1639c8fd14a01292d2b123b8d307564034d"}, + {file = "towncrier-23.6.0-py3-none-any.whl", hash = "sha256:da552f29192b3c2b04d630133f194c98e9f14f0558669d427708e203fea4d0a5"}, + {file = "towncrier-23.6.0.tar.gz", hash = "sha256:fc29bd5ab4727c8dacfbe636f7fb5dc53b99805b62da1c96b214836159ff70c1"}, ] [package.dependencies] click = "*" click-default-group = "*" +importlib-resources = {version = ">=5", markers = "python_version < \"3.10\""} incremental = "*" jinja2 = "*" -setuptools = "*" tomli = {version = "*", markers = "python_version < \"3.11\""} [package.extras] From 14c1bfd53482860e176b0f336107cb1096ba7755 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 15:36:33 +0100 Subject: [PATCH 106/153] Bump serde_json from 1.0.97 to 1.0.99 (#15832) Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.97 to 1.0.99. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.97...v1.0.99) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 51ff26ec1baa..52f911277ebd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -340,9 +340,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.97" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdf3bf93142acad5821c99197022e170842cdbc1c30482b98750c688c640842a" +checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3" dependencies = [ "itoa", "ryu", From 78cfa55dad911e667b5a9b613e232eb72410382f Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 27 Jun 2023 01:41:42 -0700 Subject: [PATCH 107/153] Fix sqlite `user_filters` upgrade (#15817) --- changelog.d/15817.bugfix | 1 + .../78/02_validate_and_update_user_filters.py | 4 +- .../03_remove_unused_indexes_user_filters.py | 65 +++++++++++++++++++ .../04_add_full_user_id_index_user_filters.py | 25 +++++++ 4 files changed, 92 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15817.bugfix create mode 100644 synapse/storage/schema/main/delta/78/03_remove_unused_indexes_user_filters.py create mode 100644 synapse/storage/schema/main/delta/78/04_add_full_user_id_index_user_filters.py diff --git a/changelog.d/15817.bugfix b/changelog.d/15817.bugfix new file mode 100644 index 000000000000..2b025730ad5c --- /dev/null +++ b/changelog.d/15817.bugfix @@ -0,0 +1 @@ +Fix sqlite `user_filters` upgrade introduced in v1.86.0. diff --git a/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py b/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py index 8ef63335e7c3..e148ed26f29b 100644 --- a/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py +++ b/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py @@ -61,9 +61,7 @@ def run_upgrade( full_user_id text NOT NULL, user_id text NOT NULL, filter_id bigint NOT NULL, - filter_json bytea NOT NULL, - UNIQUE (full_user_id), - UNIQUE (user_id) + filter_json bytea NOT NULL ) """ cur.execute(create_sql) diff --git a/synapse/storage/schema/main/delta/78/03_remove_unused_indexes_user_filters.py b/synapse/storage/schema/main/delta/78/03_remove_unused_indexes_user_filters.py new file mode 100644 index 000000000000..f5ba1c3fd4ed --- /dev/null +++ b/synapse/storage/schema/main/delta/78/03_remove_unused_indexes_user_filters.py @@ -0,0 +1,65 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from synapse.config.homeserver import HomeServerConfig +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import BaseDatabaseEngine, Sqlite3Engine + + +def run_update( + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + config: HomeServerConfig, +) -> None: + """ + Fix to drop unused indexes caused by incorrectly adding UNIQUE constraint to + columns `user_id` and `full_user_id` of table `user_filters` in previous migration. + """ + + if isinstance(database_engine, Sqlite3Engine): + cur.execute("DROP TABLE IF EXISTS temp_user_filters") + create_sql = """ + CREATE TABLE temp_user_filters ( + full_user_id text NOT NULL, + user_id text NOT NULL, + filter_id bigint NOT NULL, + filter_json bytea NOT NULL + ) + """ + cur.execute(create_sql) + + copy_sql = """ + INSERT INTO temp_user_filters ( + user_id, + filter_id, + filter_json, + full_user_id) + SELECT user_id, filter_id, filter_json, full_user_id FROM user_filters + """ + cur.execute(copy_sql) + + drop_sql = """ + DROP TABLE user_filters + """ + cur.execute(drop_sql) + + rename_sql = """ + ALTER TABLE temp_user_filters RENAME to user_filters + """ + cur.execute(rename_sql) + + index_sql = """ + CREATE UNIQUE INDEX IF NOT EXISTS user_filters_unique ON + user_filters (user_id, filter_id) + """ + cur.execute(index_sql) diff --git a/synapse/storage/schema/main/delta/78/04_add_full_user_id_index_user_filters.py b/synapse/storage/schema/main/delta/78/04_add_full_user_id_index_user_filters.py new file mode 100644 index 000000000000..97fecc2bd9f6 --- /dev/null +++ b/synapse/storage/schema/main/delta/78/04_add_full_user_id_index_user_filters.py @@ -0,0 +1,25 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import BaseDatabaseEngine, Sqlite3Engine + + +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: + if isinstance(database_engine, Sqlite3Engine): + idx_sql = """ + CREATE UNIQUE INDEX IF NOT EXISTS user_filters_full_user_id_unique ON + user_filters (full_user_id, filter_id) + """ + cur.execute(idx_sql) From 472c2c72f6af21504291691e89ac8296bbbef7ea Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 27 Jun 2023 10:29:20 -0500 Subject: [PATCH 108/153] Prepare changelog for v1.87.0rc1 --- CHANGES.md | 52 +++++++++++++++++++++++++++++++++++++++ changelog.d/15233.misc | 1 - changelog.d/15680.bugfix | 1 - changelog.d/15737.feature | 1 - changelog.d/15743.misc | 1 - changelog.d/15748.removal | 1 - changelog.d/15755.misc | 1 - changelog.d/15758.bugfix | 1 - changelog.d/15770.bugfix | 1 - changelog.d/15772.doc | 1 - changelog.d/15776.bugfix | 1 - changelog.d/15781.bugfix | 1 - changelog.d/15783.misc | 1 - changelog.d/15788.bugfix | 1 - changelog.d/15804.bugfix | 1 - changelog.d/15805.doc | 1 - changelog.d/15806.misc | 1 - changelog.d/15812.doc | 1 - changelog.d/15814.misc | 1 - changelog.d/15815.bugfix | 1 - changelog.d/15817.bugfix | 1 - changelog.d/15838.feature | 1 - debian/changelog | 6 +++++ pyproject.toml | 2 +- 24 files changed, 59 insertions(+), 22 deletions(-) delete mode 100644 changelog.d/15233.misc delete mode 100644 changelog.d/15680.bugfix delete mode 100644 changelog.d/15737.feature delete mode 100644 changelog.d/15743.misc delete mode 100644 changelog.d/15748.removal delete mode 100644 changelog.d/15755.misc delete mode 100644 changelog.d/15758.bugfix delete mode 100644 changelog.d/15770.bugfix delete mode 100644 changelog.d/15772.doc delete mode 100644 changelog.d/15776.bugfix delete mode 100644 changelog.d/15781.bugfix delete mode 100644 changelog.d/15783.misc delete mode 100644 changelog.d/15788.bugfix delete mode 100644 changelog.d/15804.bugfix delete mode 100644 changelog.d/15805.doc delete mode 100644 changelog.d/15806.misc delete mode 100644 changelog.d/15812.doc delete mode 100644 changelog.d/15814.misc delete mode 100644 changelog.d/15815.bugfix delete mode 100644 changelog.d/15817.bugfix delete mode 100644 changelog.d/15838.feature diff --git a/CHANGES.md b/CHANGES.md index ff4126044bd9..3def0a44cdd8 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,55 @@ +# Synapse 1.87.0rc1 (2023-06-27) + +### Features + +- Improve `/messages` response time by avoiding backfill when we already have messages to return. ([\#15737](https://github.com/matrix-org/synapse/issues/15737)) +- Add spam checker module API for logins. ([\#15838](https://github.com/matrix-org/synapse/issues/15838)) + +### Bugfixes + +- Fix a long-standing bug where media files were served in an unsafe manner. Contributed by @joshqou. ([\#15680](https://github.com/matrix-org/synapse/issues/15680)) +- Avoid invalidating a cache that was just prefilled. ([\#15758](https://github.com/matrix-org/synapse/issues/15758)) +- Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983). ([\#15770](https://github.com/matrix-org/synapse/issues/15770)) +- Fix joining rooms through aliases where the alias server isn't a real homeserver. Contributed by @tulir @ Beeper. ([\#15776](https://github.com/matrix-org/synapse/issues/15776)) +- Fix a bug in push rules handling leading to an invalid (per spec) `is_user_mention` rule sent to clients. Also fix wrong rule names for `is_user_mention` and `is_room_mention`. ([\#15781](https://github.com/matrix-org/synapse/issues/15781)) +- Fix a bug introduced in 1.57.0 where the wrong table would be locked on updating database rows when using SQLite as the database backend. ([\#15788](https://github.com/matrix-org/synapse/issues/15788)) +- Fix Sytest environmental variable evaluation in CI. ([\#15804](https://github.com/matrix-org/synapse/issues/15804)) +- Fix forgotten rooms missing from initial sync after rejoining them. Contributed by Nico from Famedly. ([\#15815](https://github.com/matrix-org/synapse/issues/15815)) +- Fix sqlite `user_filters` upgrade introduced in v1.86.0. ([\#15817](https://github.com/matrix-org/synapse/issues/15817)) + +### Improved Documentation + +- Document `looping_call()` functionality that will wait for the given function to finish before scheduling another. ([\#15772](https://github.com/matrix-org/synapse/issues/15772)) +- Fix a typo in the [Admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html). ([\#15805](https://github.com/matrix-org/synapse/issues/15805)) +- Fix typo in MSC number in faster remote room join architecture doc. ([\#15812](https://github.com/matrix-org/synapse/issues/15812)) + +### Deprecations and Removals + +- Remove experimental [MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716) implementation to incrementally import history into existing rooms. ([\#15748](https://github.com/matrix-org/synapse/issues/15748)) + +### Internal Changes + +- Replace `EventContext` fields `prev_group` and `delta_ids` with field `state_group_deltas`. ([\#15233](https://github.com/matrix-org/synapse/issues/15233)) +- Regularly try to send transactions to other servers after they failed instead of waiting for a new event to be available before trying. ([\#15743](https://github.com/matrix-org/synapse/issues/15743)) +- Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983). ([\#15755](https://github.com/matrix-org/synapse/issues/15755)) +- Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. ([\#15783](https://github.com/matrix-org/synapse/issues/15783)) +- Switch from `matrix://` to `matrix-federation://` scheme for internal Synapse routing of outbound federation traffic. ([\#15806](https://github.com/matrix-org/synapse/issues/15806)) +- Fix harmless exceptions being printed when running the port DB script. ([\#15814](https://github.com/matrix-org/synapse/issues/15814)) + +### Updates to locked dependencies + +* Bump attrs from 22.2.0 to 23.1.0. ([\#15801](https://github.com/matrix-org/synapse/issues/15801)) +* Bump cryptography from 40.0.2 to 41.0.1. ([\#15800](https://github.com/matrix-org/synapse/issues/15800)) +* Bump ijson from 3.2.0.post0 to 3.2.1. ([\#15802](https://github.com/matrix-org/synapse/issues/15802)) +* Bump phonenumbers from 8.13.13 to 8.13.14. ([\#15798](https://github.com/matrix-org/synapse/issues/15798)) +* Bump ruff from 0.0.265 to 0.0.272. ([\#15799](https://github.com/matrix-org/synapse/issues/15799)) +* Bump ruff from 0.0.272 to 0.0.275. ([\#15833](https://github.com/matrix-org/synapse/issues/15833)) +* Bump serde_json from 1.0.96 to 1.0.97. ([\#15797](https://github.com/matrix-org/synapse/issues/15797)) +* Bump serde_json from 1.0.97 to 1.0.99. ([\#15832](https://github.com/matrix-org/synapse/issues/15832)) +* Bump towncrier from 22.12.0 to 23.6.0. ([\#15831](https://github.com/matrix-org/synapse/issues/15831)) +* Bump types-opentracing from 2.4.10.4 to 2.4.10.5. ([\#15830](https://github.com/matrix-org/synapse/issues/15830)) +* Bump types-setuptools from 67.8.0.0 to 68.0.0.0. ([\#15835](https://github.com/matrix-org/synapse/issues/15835)) + Synapse 1.86.0 (2023-06-20) =========================== diff --git a/changelog.d/15233.misc b/changelog.d/15233.misc deleted file mode 100644 index 1dff00bf3c8c..000000000000 --- a/changelog.d/15233.misc +++ /dev/null @@ -1 +0,0 @@ -Replace `EventContext` fields `prev_group` and `delta_ids` with field `state_group_deltas`. diff --git a/changelog.d/15680.bugfix b/changelog.d/15680.bugfix deleted file mode 100644 index 04ac19b4ec3a..000000000000 --- a/changelog.d/15680.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a long-standing bug where media files were served in an unsafe manner. Contributed by @joshqou. diff --git a/changelog.d/15737.feature b/changelog.d/15737.feature deleted file mode 100644 index 9a547b5ebdd0..000000000000 --- a/changelog.d/15737.feature +++ /dev/null @@ -1 +0,0 @@ -Improve `/messages` response time by avoiding backfill when we already have messages to return. diff --git a/changelog.d/15743.misc b/changelog.d/15743.misc deleted file mode 100644 index b95eed929e4b..000000000000 --- a/changelog.d/15743.misc +++ /dev/null @@ -1 +0,0 @@ -Regularly try to send transactions to other servers after they failed instead of waiting for a new event to be available before trying. diff --git a/changelog.d/15748.removal b/changelog.d/15748.removal deleted file mode 100644 index dcb978017839..000000000000 --- a/changelog.d/15748.removal +++ /dev/null @@ -1 +0,0 @@ -Remove experimental [MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716) implementation to incrementally import history into existing rooms. diff --git a/changelog.d/15755.misc b/changelog.d/15755.misc deleted file mode 100644 index a65340d380e3..000000000000 --- a/changelog.d/15755.misc +++ /dev/null @@ -1 +0,0 @@ -Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983). diff --git a/changelog.d/15758.bugfix b/changelog.d/15758.bugfix deleted file mode 100644 index cabe25ca24ec..000000000000 --- a/changelog.d/15758.bugfix +++ /dev/null @@ -1 +0,0 @@ -Avoid invalidating a cache that was just prefilled. diff --git a/changelog.d/15770.bugfix b/changelog.d/15770.bugfix deleted file mode 100644 index a65340d380e3..000000000000 --- a/changelog.d/15770.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983). diff --git a/changelog.d/15772.doc b/changelog.d/15772.doc deleted file mode 100644 index 4d6c933c713b..000000000000 --- a/changelog.d/15772.doc +++ /dev/null @@ -1 +0,0 @@ -Document `looping_call()` functionality that will wait for the given function to finish before scheduling another. diff --git a/changelog.d/15776.bugfix b/changelog.d/15776.bugfix deleted file mode 100644 index f146a85f1a22..000000000000 --- a/changelog.d/15776.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix joining rooms through aliases where the alias server isn't a real homeserver. Contributed by @tulir @ Beeper. diff --git a/changelog.d/15781.bugfix b/changelog.d/15781.bugfix deleted file mode 100644 index 5faf59afee0c..000000000000 --- a/changelog.d/15781.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug in push rules handling leading to an invalid (per spec) `is_user_mention` rule sent to clients. Also fix wrong rule names for `is_user_mention` and `is_room_mention`. \ No newline at end of file diff --git a/changelog.d/15783.misc b/changelog.d/15783.misc deleted file mode 100644 index 0bebaa213d9e..000000000000 --- a/changelog.d/15783.misc +++ /dev/null @@ -1 +0,0 @@ -Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. diff --git a/changelog.d/15788.bugfix b/changelog.d/15788.bugfix deleted file mode 100644 index d22aae7baf89..000000000000 --- a/changelog.d/15788.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in 1.57.0 where the wrong table would be locked on updating database rows when using SQLite as the database backend. \ No newline at end of file diff --git a/changelog.d/15804.bugfix b/changelog.d/15804.bugfix deleted file mode 100644 index 7c8b954397e4..000000000000 --- a/changelog.d/15804.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix Sytest environmental variable evaluation in CI. diff --git a/changelog.d/15805.doc b/changelog.d/15805.doc deleted file mode 100644 index 446f627cfc13..000000000000 --- a/changelog.d/15805.doc +++ /dev/null @@ -1 +0,0 @@ -Fix a typo in the [Admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html). diff --git a/changelog.d/15806.misc b/changelog.d/15806.misc deleted file mode 100644 index 80d0eb2f8f46..000000000000 --- a/changelog.d/15806.misc +++ /dev/null @@ -1 +0,0 @@ -Switch from `matrix://` to `matrix-federation://` scheme for internal Synapse routing of outbound federation traffic. diff --git a/changelog.d/15812.doc b/changelog.d/15812.doc deleted file mode 100644 index de7b64e4a25c..000000000000 --- a/changelog.d/15812.doc +++ /dev/null @@ -1 +0,0 @@ -Fix typo in MSC number in faster remote room join architecture doc. diff --git a/changelog.d/15814.misc b/changelog.d/15814.misc deleted file mode 100644 index 8e1107212f17..000000000000 --- a/changelog.d/15814.misc +++ /dev/null @@ -1 +0,0 @@ -Fix harmless exceptions being printed when running the port DB script. diff --git a/changelog.d/15815.bugfix b/changelog.d/15815.bugfix deleted file mode 100644 index e20b5acac1ae..000000000000 --- a/changelog.d/15815.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix forgotten rooms missing from initial sync after rejoining them. Contributed by Nico from Famedly. diff --git a/changelog.d/15817.bugfix b/changelog.d/15817.bugfix deleted file mode 100644 index 2b025730ad5c..000000000000 --- a/changelog.d/15817.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix sqlite `user_filters` upgrade introduced in v1.86.0. diff --git a/changelog.d/15838.feature b/changelog.d/15838.feature deleted file mode 100644 index 04c77bd72396..000000000000 --- a/changelog.d/15838.feature +++ /dev/null @@ -1 +0,0 @@ -Add spam checker module API for logins. diff --git a/debian/changelog b/debian/changelog index 9d057c65ef46..2fa8d30fe1ed 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.87.0~rc1) stable; urgency=medium + + * New synapse release 1.87.0rc1. + + -- Synapse Packaging team Tue, 27 Jun 2023 15:27:04 +0000 + matrix-synapse-py3 (1.86.0) stable; urgency=medium * New Synapse release 1.86.0. diff --git a/pyproject.toml b/pyproject.toml index a44ecd65e4a8..fc47b1ef7101 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.86.0" +version = "1.87.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 10ed3e233ee02d6a1faa744cf226c67042b60471 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 27 Jun 2023 10:34:11 -0500 Subject: [PATCH 109/153] Note last release with Python 3.7 support See https://github.com/matrix-org/synapse/issues/15836 --- CHANGES.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 3def0a44cdd8..dab35075b03e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,8 @@ # Synapse 1.87.0rc1 (2023-06-27) +Please note that this will be the last release of Synapse that is compatible with +Python 3.7 and earlier. + ### Features - Improve `/messages` response time by avoiding backfill when we already have messages to return. ([\#15737](https://github.com/matrix-org/synapse/issues/15737)) From 13fc89148c37f6c653dd237377ff165249fa73cc Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 28 Jun 2023 15:10:33 -0500 Subject: [PATCH 110/153] Split out 2022 changes from the changelog (#15846) Split out 2022 changes from the changelog so the rendered version in GitHub doesn't timeout as much. --- CHANGES.md | 2764 ------------------------------ changelog.d/15846.misc | 1 + docs/changelogs/CHANGES-2022.md | 2766 +++++++++++++++++++++++++++++++ 3 files changed, 2767 insertions(+), 2764 deletions(-) create mode 100644 changelog.d/15846.misc create mode 100644 docs/changelogs/CHANGES-2022.md diff --git a/CHANGES.md b/CHANGES.md index dab35075b03e..2765045a139a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1253,2768 +1253,4 @@ Internal Changes - Bump ruff from 0.0.206 to 0.0.215. ([\#14796](https://github.com/matrix-org/synapse/issues/14796)) -Synapse 1.74.0 (2022-12-20) -=========================== - -Improved Documentation ----------------------- - -- Add release note and update documentation regarding optional ICU support in user search. ([\#14712](https://github.com/matrix-org/synapse/issues/14712)) - - -Synapse 1.74.0rc1 (2022-12-13) -============================== - -Features --------- - -- Improve user search for international display names. ([\#14464](https://github.com/matrix-org/synapse/issues/14464)) -- Stop using deprecated `keyIds` parameter when calling `/_matrix/key/v2/server`. ([\#14490](https://github.com/matrix-org/synapse/issues/14490), [\#14525](https://github.com/matrix-org/synapse/issues/14525)) -- Add new `push.enabled` config option to allow opting out of push notification calculation. ([\#14551](https://github.com/matrix-org/synapse/issues/14551), [\#14619](https://github.com/matrix-org/synapse/issues/14619)) -- Advertise support for Matrix 1.5 on `/_matrix/client/versions`. ([\#14576](https://github.com/matrix-org/synapse/issues/14576)) -- Improve opentracing and logging for to-device message handling. ([\#14598](https://github.com/matrix-org/synapse/issues/14598)) -- Allow selecting "prejoin" events by state keys in addition to event types. ([\#14642](https://github.com/matrix-org/synapse/issues/14642)) - - -Bugfixes --------- - -- Fix a long-standing bug where a device list update might not be sent to clients in certain circumstances. ([\#14435](https://github.com/matrix-org/synapse/issues/14435), [\#14592](https://github.com/matrix-org/synapse/issues/14592), [\#14604](https://github.com/matrix-org/synapse/issues/14604)) -- Suppress a spurious warning when `POST /rooms///`, `POST /join//` receive an empty HTTP request body. ([\#14600](https://github.com/matrix-org/synapse/issues/14600)) -- Return spec-compliant JSON errors when unknown endpoints are requested. ([\#14620](https://github.com/matrix-org/synapse/issues/14620), [\#14621](https://github.com/matrix-org/synapse/issues/14621)) -- Update html templates to load images over HTTPS. Contributed by @ashfame. ([\#14625](https://github.com/matrix-org/synapse/issues/14625)) -- Fix a long-standing bug where the user directory would return 1 more row than requested. ([\#14631](https://github.com/matrix-org/synapse/issues/14631)) -- Reject invalid read receipt requests with empty room or event IDs. Contributed by Nick @ Beeper (@fizzadar). ([\#14632](https://github.com/matrix-org/synapse/issues/14632)) -- Fix a bug introduced in Synapse 1.67.0 where not specifying a config file or a server URL would lead to the `register_new_matrix_user` script failing. ([\#14637](https://github.com/matrix-org/synapse/issues/14637)) -- Fix a long-standing bug where the user directory and room/user stats might be out of sync. ([\#14639](https://github.com/matrix-org/synapse/issues/14639), [\#14643](https://github.com/matrix-org/synapse/issues/14643)) -- Fix a bug introduced in Synapse 1.72.0 where the background updates to add non-thread unique indexes on receipts would fail if they were previously interrupted. ([\#14650](https://github.com/matrix-org/synapse/issues/14650)) -- Improve validation of field size limits in events. ([\#14664](https://github.com/matrix-org/synapse/issues/14664)) -- Fix bugs introduced in Synapse 1.55.0 and 1.69.0 where application services would not be notified of events in the correct rooms, due to stale caches. ([\#14670](https://github.com/matrix-org/synapse/issues/14670)) - - -Improved Documentation ----------------------- - -- Update worker settings for `pusher` and `federation_sender` functionality. ([\#14493](https://github.com/matrix-org/synapse/issues/14493)) -- Add links to third party package repositories, and point to the bug which highlights Ubuntu's out-of-date packages. ([\#14517](https://github.com/matrix-org/synapse/issues/14517)) -- Remove old, incorrect minimum postgres version note and replace with a link to the [Dependency Deprecation Policy](https://matrix-org.github.io/synapse/v1.73/deprecation_policy.html). ([\#14590](https://github.com/matrix-org/synapse/issues/14590)) -- Add Single-Sign On setup instructions for Mastodon-based instances. ([\#14594](https://github.com/matrix-org/synapse/issues/14594)) -- Change `turn_allow_guests` example value to lowercase `true`. ([\#14634](https://github.com/matrix-org/synapse/issues/14634)) - - -Internal Changes ----------------- - -- Optimise push badge count calculations. Contributed by Nick @ Beeper (@fizzadar). ([\#14255](https://github.com/matrix-org/synapse/issues/14255)) -- Faster remote room joins: stream the un-partial-stating of rooms over replication. ([\#14473](https://github.com/matrix-org/synapse/issues/14473), [\#14474](https://github.com/matrix-org/synapse/issues/14474)) -- Share the `ClientRestResource` for both workers and the main process. ([\#14528](https://github.com/matrix-org/synapse/issues/14528)) -- Add `--editable` flag to `complement.sh` which uses an editable install of Synapse for faster turn-around times whilst developing iteratively. ([\#14548](https://github.com/matrix-org/synapse/issues/14548)) -- Faster joins: use servers list approximation to send read receipts when in partial state instead of waiting for the full state of the room. ([\#14549](https://github.com/matrix-org/synapse/issues/14549)) -- Modernize unit tests configuration related to workers. ([\#14568](https://github.com/matrix-org/synapse/issues/14568)) -- Bump jsonschema from 4.17.0 to 4.17.3. ([\#14591](https://github.com/matrix-org/synapse/issues/14591)) -- Fix Rust lint CI. ([\#14602](https://github.com/matrix-org/synapse/issues/14602)) -- Bump JasonEtco/create-an-issue from 2.5.0 to 2.8.1. ([\#14607](https://github.com/matrix-org/synapse/issues/14607)) -- Alter some unit test environment parameters to decrease time spent running tests. ([\#14610](https://github.com/matrix-org/synapse/issues/14610)) -- Switch to Go recommended installation method for `gotestfmt` template in CI. ([\#14611](https://github.com/matrix-org/synapse/issues/14611)) -- Bump phonenumbers from 8.13.0 to 8.13.1. ([\#14612](https://github.com/matrix-org/synapse/issues/14612)) -- Bump types-setuptools from 65.5.0.3 to 65.6.0.1. ([\#14613](https://github.com/matrix-org/synapse/issues/14613)) -- Bump twine from 4.0.1 to 4.0.2. ([\#14614](https://github.com/matrix-org/synapse/issues/14614)) -- Bump types-requests from 2.28.11.2 to 2.28.11.5. ([\#14615](https://github.com/matrix-org/synapse/issues/14615)) -- Bump cryptography from 38.0.3 to 38.0.4. ([\#14616](https://github.com/matrix-org/synapse/issues/14616)) -- Remove useless cargo install with apt from Dockerfile. ([\#14636](https://github.com/matrix-org/synapse/issues/14636)) -- Bump certifi from 2021.10.8 to 2022.12.7. ([\#14645](https://github.com/matrix-org/synapse/issues/14645)) -- Bump flake8-bugbear from 22.10.27 to 22.12.6. ([\#14656](https://github.com/matrix-org/synapse/issues/14656)) -- Bump packaging from 21.3 to 22.0. ([\#14657](https://github.com/matrix-org/synapse/issues/14657)) -- Bump types-pillow from 9.3.0.1 to 9.3.0.4. ([\#14658](https://github.com/matrix-org/synapse/issues/14658)) -- Bump serde from 1.0.148 to 1.0.150. ([\#14659](https://github.com/matrix-org/synapse/issues/14659)) -- Bump phonenumbers from 8.13.1 to 8.13.2. ([\#14660](https://github.com/matrix-org/synapse/issues/14660)) -- Bump authlib from 1.1.0 to 1.2.0. ([\#14661](https://github.com/matrix-org/synapse/issues/14661)) -- Move `StateFilter` to `synapse.types`. ([\#14668](https://github.com/matrix-org/synapse/issues/14668)) -- Improve type hints. ([\#14597](https://github.com/matrix-org/synapse/issues/14597), [\#14646](https://github.com/matrix-org/synapse/issues/14646), [\#14671](https://github.com/matrix-org/synapse/issues/14671)) - - -Synapse 1.73.0 (2022-12-06) -=========================== - -Please note that legacy Prometheus metric names have been removed in this release; see [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.73/docs/upgrade.md#legacy-prometheus-metric-names-have-now-been-removed) for more details. - -No significant changes since 1.73.0rc2. - - -Synapse 1.73.0rc2 (2022-12-01) -============================== - -Bugfixes --------- - -- Fix a regression in Synapse 1.73.0rc1 where Synapse's main process would stop responding to HTTP requests when a user with a large number of devices logs in. ([\#14582](https://github.com/matrix-org/synapse/issues/14582)) - - -Synapse 1.73.0rc1 (2022-11-29) -============================== - -Features --------- - -- Speed-up `/messages` with `filter_events_for_client` optimizations. ([\#14527](https://github.com/matrix-org/synapse/issues/14527)) -- Improve DB performance by reducing amount of data that gets read in `device_lists_changes_in_room`. ([\#14534](https://github.com/matrix-org/synapse/issues/14534)) -- Add support for handling avatar in SSO OIDC login. Contributed by @ashfame. ([\#13917](https://github.com/matrix-org/synapse/issues/13917)) -- Move MSC3030 `/timestamp_to_event` endpoints to stable `v1` location (`/_matrix/client/v1/rooms//timestamp_to_event?ts=&dir=`, `/_matrix/federation/v1/timestamp_to_event/?ts=&dir=`). ([\#14471](https://github.com/matrix-org/synapse/issues/14471)) -- Reduce database load of [Client-Server endpoints](https://spec.matrix.org/v1.5/client-server-api/#aggregations) which return bundled aggregations. ([\#14491](https://github.com/matrix-org/synapse/issues/14491), [\#14508](https://github.com/matrix-org/synapse/issues/14508), [\#14510](https://github.com/matrix-org/synapse/issues/14510)) -- Add unstable support for an Extensible Events room version (`org.matrix.msc1767.10`) via [MSC1767](https://github.com/matrix-org/matrix-spec-proposals/pull/1767), [MSC3931](https://github.com/matrix-org/matrix-spec-proposals/pull/3931), [MSC3932](https://github.com/matrix-org/matrix-spec-proposals/pull/3932), and [MSC3933](https://github.com/matrix-org/matrix-spec-proposals/pull/3933). ([\#14520](https://github.com/matrix-org/synapse/issues/14520), [\#14521](https://github.com/matrix-org/synapse/issues/14521), [\#14524](https://github.com/matrix-org/synapse/issues/14524)) -- Prune user's old devices on login if they have too many. ([\#14038](https://github.com/matrix-org/synapse/issues/14038), [\#14580](https://github.com/matrix-org/synapse/issues/14580)) - - -Bugfixes --------- - -- Fix a long-standing bug where paginating from the start of a room did not work. Contributed by @gnunicorn. ([\#14149](https://github.com/matrix-org/synapse/issues/14149)) -- Fix a bug introduced in Synapse 1.58.0 where a user with presence state `org.matrix.msc3026.busy` would mistakenly be set to `online` when calling `/sync` or `/events` on a worker process. ([\#14393](https://github.com/matrix-org/synapse/issues/14393)) -- Fix a bug introduced in Synapse 1.70.0 where a receipt's thread ID was not sent over federation. ([\#14466](https://github.com/matrix-org/synapse/issues/14466)) -- Fix a long-standing bug where the [List media admin API](https://matrix-org.github.io/synapse/latest/admin_api/media_admin_api.html#list-all-media-in-a-room) would fail when processing an image with broken thumbnail information. ([\#14537](https://github.com/matrix-org/synapse/issues/14537)) -- Fix a bug introduced in Synapse 1.67.0 where two logging context warnings would be logged on startup. ([\#14574](https://github.com/matrix-org/synapse/issues/14574)) -- In application service transactions that include the experimental `org.matrix.msc3202.device_one_time_key_counts` key, include a duplicate key of `org.matrix.msc3202.device_one_time_keys_count` to match the name proposed by [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202). ([\#14565](https://github.com/matrix-org/synapse/issues/14565)) -- Fix a bug introduced in Synapse 0.9 where Synapse would fail to fetch server keys whose IDs contain a forward slash. ([\#14490](https://github.com/matrix-org/synapse/issues/14490)) - - -Improved Documentation ----------------------- - -- Fixed link to 'Synapse administration endpoints'. ([\#14499](https://github.com/matrix-org/synapse/issues/14499)) - - -Deprecations and Removals -------------------------- - -- Remove legacy Prometheus metrics names. They were deprecated in Synapse v1.69.0 and disabled by default in Synapse v1.71.0. ([\#14538](https://github.com/matrix-org/synapse/issues/14538)) - - -Internal Changes ----------------- - -- Improve type hinting throughout Synapse. ([\#14055](https://github.com/matrix-org/synapse/issues/14055), [\#14412](https://github.com/matrix-org/synapse/issues/14412), [\#14529](https://github.com/matrix-org/synapse/issues/14529), [\#14452](https://github.com/matrix-org/synapse/issues/14452)). -- Remove old stream ID tracking code. Contributed by Nick @Beeper (@fizzadar). ([\#14376](https://github.com/matrix-org/synapse/issues/14376), [\#14468](https://github.com/matrix-org/synapse/issues/14468)) -- Remove the `worker_main_http_uri` configuration setting. This is now handled via internal replication. ([\#14400](https://github.com/matrix-org/synapse/issues/14400), [\#14476](https://github.com/matrix-org/synapse/issues/14476)) -- Refactor `federation_sender` and `pusher` configuration loading. ([\#14496](https://github.com/matrix-org/synapse/issues/14496)) -([\#14509](https://github.com/matrix-org/synapse/issues/14509), [\#14573](https://github.com/matrix-org/synapse/issues/14573)) -- Faster joins: do not wait for full state when creating events to send. ([\#14403](https://github.com/matrix-org/synapse/issues/14403)) -- Faster joins: filter out non local events when a room doesn't have its full state. ([\#14404](https://github.com/matrix-org/synapse/issues/14404)) -- Faster joins: send events to initial list of servers if we don't have the full state yet. ([\#14408](https://github.com/matrix-org/synapse/issues/14408)) -- Faster joins: use servers list approximation received during `send_join` (potentially updated with received membership events) in `assert_host_in_room`. ([\#14515](https://github.com/matrix-org/synapse/issues/14515)) -- Fix type logic in TCP replication code that prevented correctly ignoring blank commands. ([\#14449](https://github.com/matrix-org/synapse/issues/14449)) -- Remove option to skip locking of tables when performing emulated upserts, to avoid a class of bugs in future. ([\#14469](https://github.com/matrix-org/synapse/issues/14469)) -- `scripts-dev/federation_client`: Fix routing on servers with `.well-known` files. ([\#14479](https://github.com/matrix-org/synapse/issues/14479)) -- Reduce default third party invite rate limit to 216 invites per day. ([\#14487](https://github.com/matrix-org/synapse/issues/14487)) -- Refactor conversion of device list changes in room to outbound pokes to track unconverted rows using a `(stream ID, room ID)` position instead of updating the `converted_to_destinations` flag on every row. ([\#14516](https://github.com/matrix-org/synapse/issues/14516)) -- Add more prompts to the bug report form. ([\#14522](https://github.com/matrix-org/synapse/issues/14522)) -- Extend editorconfig rules on indent and line length to `.pyi` files. ([\#14526](https://github.com/matrix-org/synapse/issues/14526)) -- Run Rust CI when `Cargo.lock` changes. This is particularly useful for dependabot updates. ([\#14571](https://github.com/matrix-org/synapse/issues/14571)) -- Fix a possible variable shadow in `create_new_client_event`. ([\#14575](https://github.com/matrix-org/synapse/issues/14575)) -- Bump various dependencies in the `poetry.lock` file and in CI scripts. ([\#14557](https://github.com/matrix-org/synapse/issues/14557), [\#14559](https://github.com/matrix-org/synapse/issues/14559), [\#14560](https://github.com/matrix-org/synapse/issues/14560), [\#14500](https://github.com/matrix-org/synapse/issues/14500), [\#14501](https://github.com/matrix-org/synapse/issues/14501), [\#14502](https://github.com/matrix-org/synapse/issues/14502), [\#14503](https://github.com/matrix-org/synapse/issues/14503), [\#14504](https://github.com/matrix-org/synapse/issues/14504), [\#14505](https://github.com/matrix-org/synapse/issues/14505)). - - -Synapse 1.72.0 (2022-11-22) -=========================== - -Please note that Synapse now only supports PostgreSQL 11+, because PostgreSQL 10 has reached end-of-life, c.f. our [Deprecation Policy](https://github.com/matrix-org/synapse/blob/develop/docs/deprecation_policy.md). - -Bugfixes --------- - -- Update forgotten references to legacy metrics in the included Grafana dashboard. ([\#14477](https://github.com/matrix-org/synapse/issues/14477)) - - -Synapse 1.72.0rc1 (2022-11-16) -============================== - -Features --------- - -- Add experimental support for [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3912): Relation-based redactions. ([\#14260](https://github.com/matrix-org/synapse/issues/14260)) -- Build Debian packages for Ubuntu 22.10 (Kinetic Kudu). ([\#14396](https://github.com/matrix-org/synapse/issues/14396)) -- Add an [Admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html) endpoint for user lookup based on third-party ID (3PID). Contributed by @ashfame. ([\#14405](https://github.com/matrix-org/synapse/issues/14405)) -- Faster joins: include heroes' membership events in the partial join response, for rooms without a name or canonical alias. ([\#14442](https://github.com/matrix-org/synapse/issues/14442)) - - -Bugfixes --------- - -- Faster joins: do not block creation of or queries for room aliases during the resync. ([\#14292](https://github.com/matrix-org/synapse/issues/14292)) -- Fix a bug introduced in Synapse 1.64.0rc1 which could cause log spam when fetching events from other homeservers. ([\#14347](https://github.com/matrix-org/synapse/issues/14347)) -- Fix a bug introduced in 1.66 which would not send certain pushrules to clients. Contributed by Nico. ([\#14356](https://github.com/matrix-org/synapse/issues/14356)) -- Fix a bug introduced in v1.71.0rc1 where the power level event was incorrectly created during initial room creation. ([\#14361](https://github.com/matrix-org/synapse/issues/14361)) -- Fix the refresh token endpoint to be under /r0 and /v3 instead of /v1. Contributed by Tulir @ Beeper. ([\#14364](https://github.com/matrix-org/synapse/issues/14364)) -- Fix a long-standing bug where Synapse would raise an error when encountering an unrecognised field in a `/sync` filter, instead of ignoring it for forward compatibility. ([\#14369](https://github.com/matrix-org/synapse/issues/14369)) -- Fix a background database update, introduced in Synapse 1.64.0, which could cause poor database performance. ([\#14374](https://github.com/matrix-org/synapse/issues/14374)) -- Fix PostgreSQL sometimes using table scans for queries against the `event_search` table, taking a long time and a large amount of IO. ([\#14409](https://github.com/matrix-org/synapse/issues/14409)) -- Fix rendering of some HTML templates (including emails). Introduced in v1.71.0. ([\#14448](https://github.com/matrix-org/synapse/issues/14448)) -- Fix a bug introduced in Synapse 1.70.0 where the background updates to add non-thread unique indexes on receipts could fail when upgrading from 1.67.0 or earlier. ([\#14453](https://github.com/matrix-org/synapse/issues/14453)) - - -Updates to the Docker image ---------------------------- - -- Add all Stream Writer worker types to `configure_workers_and_start.py`. ([\#14197](https://github.com/matrix-org/synapse/issues/14197)) -- Remove references to legacy worker types in the multi-worker Dockerfile. ([\#14294](https://github.com/matrix-org/synapse/issues/14294)) - - -Improved Documentation ----------------------- - -- Upload documentation PRs to Netlify. ([\#12947](https://github.com/matrix-org/synapse/issues/12947), [\#14370](https://github.com/matrix-org/synapse/issues/14370)) -- Add addtional TURN server configuration example based on [eturnal](https://github.com/processone/eturnal) and adjust general TURN server doc structure. ([\#14293](https://github.com/matrix-org/synapse/issues/14293)) -- Add example on how to load balance /sync requests. Contributed by [aceArt](https://aceart.de). ([\#14297](https://github.com/matrix-org/synapse/issues/14297)) -- Edit sample Nginx reverse proxy configuration to use HTTP/1.1. Contributed by Brad Jones. ([\#14414](https://github.com/matrix-org/synapse/issues/14414)) - - -Deprecations and Removals -------------------------- - -- Remove support for PostgreSQL 10. ([\#14392](https://github.com/matrix-org/synapse/issues/14392), [\#14397](https://github.com/matrix-org/synapse/issues/14397)) - - -Internal Changes ----------------- - -- Run unit tests against Python 3.11. ([\#13812](https://github.com/matrix-org/synapse/issues/13812)) -- Add TLS support for generic worker endpoints. ([\#14128](https://github.com/matrix-org/synapse/issues/14128), [\#14455](https://github.com/matrix-org/synapse/issues/14455)) -- Switch to a maintained action for installing Rust in CI. ([\#14313](https://github.com/matrix-org/synapse/issues/14313)) -- Add override ability to `complement.sh` command line script to request certain types of workers. ([\#14324](https://github.com/matrix-org/synapse/issues/14324)) -- Enabling testing of [MSC3874](https://github.com/matrix-org/matrix-spec-proposals/pull/3874) (filtering of `/messages` by relation type) in complement. ([\#14339](https://github.com/matrix-org/synapse/issues/14339)) -- Concisely log a failure to resolve state due to missing `prev_events`. ([\#14346](https://github.com/matrix-org/synapse/issues/14346)) -- Use a maintained Github action to install Rust. ([\#14351](https://github.com/matrix-org/synapse/issues/14351)) -- Cleanup old worker datastore classes. Contributed by Nick @ Beeper (@fizzadar). ([\#14375](https://github.com/matrix-org/synapse/issues/14375)) -- Test against PostgreSQL 15 in CI. ([\#14394](https://github.com/matrix-org/synapse/issues/14394)) -- Remove unreachable code. ([\#14410](https://github.com/matrix-org/synapse/issues/14410)) -- Clean-up event persistence code. ([\#14411](https://github.com/matrix-org/synapse/issues/14411)) -- Update docstring to clarify that `get_partial_state_events_batch` does not just give you completely arbitrary partial-state events. ([\#14417](https://github.com/matrix-org/synapse/issues/14417)) -- Fix mypy errors introduced by bumping the locked version of `attrs` and `gitpython`. ([\#14433](https://github.com/matrix-org/synapse/issues/14433)) -- Make Dependabot only bump Rust deps in the lock file. ([\#14434](https://github.com/matrix-org/synapse/issues/14434)) -- Fix an incorrect stub return type for `PushRuleEvaluator.run`. ([\#14451](https://github.com/matrix-org/synapse/issues/14451)) -- Improve performance of `/context` in large rooms. ([\#14461](https://github.com/matrix-org/synapse/issues/14461)) - - -Synapse 1.71.0 (2022-11-08) -=========================== - -Please note that, as announced in the release notes for Synapse 1.69.0, legacy Prometheus metric names are now disabled by default. -They will be removed altogether in Synapse 1.73.0. -If not already done, server administrators should update their dashboards and alerting rules to avoid using the deprecated metric names. -See the [upgrade notes](https://matrix-org.github.io/synapse/v1.71/upgrade.html#upgrading-to-v1710) for more details. - -**Note:** in line with our [deprecation policy](https://matrix-org.github.io/synapse/latest/deprecation_policy.html) for platform dependencies, this will be the last release to support PostgreSQL 10, which reaches upstream end-of-life on November 10th, 2022. Future releases of Synapse will require PostgreSQL 11+. - -No significant changes since 1.71.0rc2. - - -Synapse 1.71.0rc2 (2022-11-04) -============================== - -Improved Documentation ----------------------- - -- Document the changes to monthly active user metrics due to deprecation of legacy Prometheus metric names. ([\#14358](https://github.com/matrix-org/synapse/issues/14358), [\#14360](https://github.com/matrix-org/synapse/issues/14360)) - - -Deprecations and Removals -------------------------- - -- Disable legacy Prometheus metric names by default. They can still be re-enabled for now, but they will be removed altogether in Synapse 1.73.0. ([\#14353](https://github.com/matrix-org/synapse/issues/14353)) - - -Internal Changes ----------------- - -- Run unit tests against Python 3.11. ([\#13812](https://github.com/matrix-org/synapse/issues/13812)) - - -Synapse 1.71.0rc1 (2022-11-01) -============================== - -Features --------- - -- Support back-channel logouts from OpenID Connect providers. ([\#11414](https://github.com/matrix-org/synapse/issues/11414)) -- Allow use of Postgres and SQLlite full-text search operators in search queries. ([\#11635](https://github.com/matrix-org/synapse/issues/11635), [\#14310](https://github.com/matrix-org/synapse/issues/14310), [\#14311](https://github.com/matrix-org/synapse/issues/14311)) -- Implement [MSC3664](https://github.com/matrix-org/matrix-doc/pull/3664), Pushrules for relations. Contributed by Nico. ([\#11804](https://github.com/matrix-org/synapse/issues/11804)) -- Improve aesthetics of HTML templates. Note that these changes do not retroactively apply to templates which have been [customised](https://matrix-org.github.io/synapse/latest/templates.html#templates) by server admins. ([\#13652](https://github.com/matrix-org/synapse/issues/13652)) -- Enable write-ahead logging for SQLite installations. Contributed by [@asymmetric](https://github.com/asymmetric). ([\#13897](https://github.com/matrix-org/synapse/issues/13897)) -- Show erasure status when [listing users](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html#query-user-account) in the Admin API. ([\#14205](https://github.com/matrix-org/synapse/issues/14205)) -- Provide a specific error code when a `/sync` request provides a filter which doesn't represent a JSON object. ([\#14262](https://github.com/matrix-org/synapse/issues/14262)) - - -Bugfixes --------- - -- Fix a long-standing bug where the `update_synapse_database` script could not be run with multiple databases. Contributed by @thefinn93 @ Beeper. ([\#13422](https://github.com/matrix-org/synapse/issues/13422)) -- Fix a bug which prevented setting an avatar on homeservers which have an explicit port in their `server_name` and have `max_avatar_size` and/or `allowed_avatar_mimetypes` configuration. Contributed by @ashfame. ([\#13927](https://github.com/matrix-org/synapse/issues/13927)) -- Check appservice user interest against the local users instead of all users in the room to align with [MSC3905](https://github.com/matrix-org/matrix-spec-proposals/pull/3905). ([\#13958](https://github.com/matrix-org/synapse/issues/13958)) -- Fix a long-standing bug where Synapse would accidentally include extra information in the response to [`PUT /_matrix/federation/v2/invite/{roomId}/{eventId}`](https://spec.matrix.org/v1.4/server-server-api/#put_matrixfederationv2inviteroomideventid). ([\#14064](https://github.com/matrix-org/synapse/issues/14064)) -- Fix a bug introduced in Synapse 1.64.0 where presence updates could be missing from `/sync` responses. ([\#14243](https://github.com/matrix-org/synapse/issues/14243)) -- Fix a bug introduced in Synapse 1.60.0 which caused an error to be logged when Synapse received a SIGHUP signal if debug logging was enabled. ([\#14258](https://github.com/matrix-org/synapse/issues/14258)) -- Prevent history insertion ([MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716)) during an partial join ([MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706)). ([\#14291](https://github.com/matrix-org/synapse/issues/14291)) -- Fix a bug introduced in Synapse 1.34.0 where device names would be returned via a federation user key query request when `allow_device_name_lookup_over_federation` was set to `false`. ([\#14304](https://github.com/matrix-org/synapse/issues/14304)) -- Fix a bug introduced in Synapse 0.34.0 where logs could include error spam when background processes are measured as taking a negative amount of time. ([\#14323](https://github.com/matrix-org/synapse/issues/14323)) -- Fix a bug introduced in Synapse 1.70.0 where clients were unable to PUT new [dehydrated devices](https://github.com/matrix-org/matrix-spec-proposals/pull/2697). ([\#14336](https://github.com/matrix-org/synapse/issues/14336)) - - -Improved Documentation ----------------------- - -- Explain how to disable the use of [`trusted_key_servers`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#trusted_key_servers). ([\#13999](https://github.com/matrix-org/synapse/issues/13999)) -- Add workers settings to [configuration manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#individual-worker-configuration). ([\#14086](https://github.com/matrix-org/synapse/issues/14086)) -- Correct the name of the config option [`encryption_enabled_by_default_for_room_type`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#encryption_enabled_by_default_for_room_type). ([\#14110](https://github.com/matrix-org/synapse/issues/14110)) -- Update docstrings of `SynapseError` and `FederationError` to bettter describe what they are used for and the effects of using them are. ([\#14191](https://github.com/matrix-org/synapse/issues/14191)) - - -Internal Changes ----------------- - -- Remove unused `@lru_cache` decorator. ([\#13595](https://github.com/matrix-org/synapse/issues/13595)) -- Save login tokens in database and prevent login token reuse. ([\#13844](https://github.com/matrix-org/synapse/issues/13844)) -- Refactor OIDC tests to better mimic an actual OIDC provider. ([\#13910](https://github.com/matrix-org/synapse/issues/13910)) -- Fix type annotation causing import time error in the Complement forking launcher. ([\#14084](https://github.com/matrix-org/synapse/issues/14084)) -- Refactor [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to loop over federation destinations with standard pattern and error handling. ([\#14096](https://github.com/matrix-org/synapse/issues/14096)) -- Add initial power level event to batch of bulk persisted events when creating a new room. ([\#14228](https://github.com/matrix-org/synapse/issues/14228)) -- Refactor `/key/` endpoints to use `RestServlet` classes. ([\#14229](https://github.com/matrix-org/synapse/issues/14229)) -- Switch to using the `matrix-org/backend-meta` version of `triage-incoming` for new issues in CI. ([\#14230](https://github.com/matrix-org/synapse/issues/14230)) -- Build wheels on macos 11, not 10.15. ([\#14249](https://github.com/matrix-org/synapse/issues/14249)) -- Add debugging to help diagnose lost device list updates. ([\#14268](https://github.com/matrix-org/synapse/issues/14268)) -- Add Rust cache to CI for `trial` runs. ([\#14287](https://github.com/matrix-org/synapse/issues/14287)) -- Improve type hinting of `RawHeaders`. ([\#14303](https://github.com/matrix-org/synapse/issues/14303)) -- Use Poetry 1.2.0 in the Twisted Trunk CI job. ([\#14305](https://github.com/matrix-org/synapse/issues/14305)) - -
-Dependency updates - -Runtime: - -- Bump anyhow from 1.0.65 to 1.0.66. ([\#14278](https://github.com/matrix-org/synapse/issues/14278)) -- Bump jinja2 from 3.0.3 to 3.1.2. ([\#14271](https://github.com/matrix-org/synapse/issues/14271)) -- Bump prometheus-client from 0.14.0 to 0.15.0. ([\#14274](https://github.com/matrix-org/synapse/issues/14274)) -- Bump psycopg2 from 2.9.4 to 2.9.5. ([\#14331](https://github.com/matrix-org/synapse/issues/14331)) -- Bump pysaml2 from 7.1.2 to 7.2.1. ([\#14270](https://github.com/matrix-org/synapse/issues/14270)) -- Bump sentry-sdk from 1.5.11 to 1.10.1. ([\#14330](https://github.com/matrix-org/synapse/issues/14330)) -- Bump serde from 1.0.145 to 1.0.147. ([\#14277](https://github.com/matrix-org/synapse/issues/14277)) -- Bump serde_json from 1.0.86 to 1.0.87. ([\#14279](https://github.com/matrix-org/synapse/issues/14279)) - -Tooling and CI: - -- Bump black from 22.3.0 to 22.10.0. ([\#14328](https://github.com/matrix-org/synapse/issues/14328)) -- Bump flake8-bugbear from 21.3.2 to 22.9.23. ([\#14042](https://github.com/matrix-org/synapse/issues/14042)) -- Bump peaceiris/actions-gh-pages from 3.8.0 to 3.9.0. ([\#14276](https://github.com/matrix-org/synapse/issues/14276)) -- Bump peaceiris/actions-mdbook from 1.1.14 to 1.2.0. ([\#14275](https://github.com/matrix-org/synapse/issues/14275)) -- Bump setuptools-rust from 1.5.1 to 1.5.2. ([\#14273](https://github.com/matrix-org/synapse/issues/14273)) -- Bump twine from 3.8.0 to 4.0.1. ([\#14332](https://github.com/matrix-org/synapse/issues/14332)) -- Bump types-opentracing from 2.4.7 to 2.4.10. ([\#14133](https://github.com/matrix-org/synapse/issues/14133)) -- Bump types-requests from 2.28.11 to 2.28.11.2. ([\#14272](https://github.com/matrix-org/synapse/issues/14272)) -
- -Synapse 1.70.1 (2022-10-28) -=========================== - -This release fixes some regressions that were discovered in 1.70.0. - -[#14300](https://github.com/matrix-org/synapse/issues/14300) -was previously reported to be a regression in 1.70.0 as well. However, we have -since concluded that it was limited to the reporter and thus have not needed -to include any fix for it in 1.70.1. - - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.70.0rc1 where the access tokens sent to application services as headers were malformed. Application services which were obtaining access tokens from query parameters were not affected. ([\#14301](https://github.com/matrix-org/synapse/issues/14301)) -- Fix room creation being rate limited too aggressively since Synapse v1.69.0. ([\#14314](https://github.com/matrix-org/synapse/issues/14314)) - - -Synapse 1.70.0 (2022-10-26) -=========================== - -No significant changes since 1.70.0rc2. - - -Synapse 1.70.0rc2 (2022-10-25) -============================== - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.70.0rc1 where the information returned from the `/threads` API could be stale when threaded events are redacted. ([\#14248](https://github.com/matrix-org/synapse/issues/14248)) -- Fix a bug introduced in Synapse 1.70.0rc1 leading to broken outbound federation when using Python 3.7. ([\#14280](https://github.com/matrix-org/synapse/issues/14280)) -- Fix a bug introduced in Synapse 1.70.0rc1 where edits to non-message events were aggregated by the homeserver. ([\#14283](https://github.com/matrix-org/synapse/issues/14283)) - - -Internal Changes ----------------- - -- Build ABI3 wheels for CPython. ([\#14253](https://github.com/matrix-org/synapse/issues/14253)) -- For the aarch64 architecture, only build wheels for CPython manylinux. ([\#14259](https://github.com/matrix-org/synapse/issues/14259)) - - -Synapse 1.70.0rc1 (2022-10-19) -============================== - -Features --------- - -- Support for [MSC3856](https://github.com/matrix-org/matrix-spec-proposals/pull/3856): threads list API. ([\#13394](https://github.com/matrix-org/synapse/issues/13394), [\#14171](https://github.com/matrix-org/synapse/issues/14171), [\#14175](https://github.com/matrix-org/synapse/issues/14175)) -- Support for thread-specific notifications & receipts ([MSC3771](https://github.com/matrix-org/matrix-spec-proposals/pull/3771) and [MSC3773](https://github.com/matrix-org/matrix-spec-proposals/pull/3773)). ([\#13776](https://github.com/matrix-org/synapse/issues/13776), [\#13824](https://github.com/matrix-org/synapse/issues/13824), [\#13877](https://github.com/matrix-org/synapse/issues/13877), [\#13878](https://github.com/matrix-org/synapse/issues/13878), [\#14050](https://github.com/matrix-org/synapse/issues/14050), [\#14140](https://github.com/matrix-org/synapse/issues/14140), [\#14159](https://github.com/matrix-org/synapse/issues/14159), [\#14163](https://github.com/matrix-org/synapse/issues/14163), [\#14174](https://github.com/matrix-org/synapse/issues/14174), [\#14222](https://github.com/matrix-org/synapse/issues/14222)) -- Stop fetching missing `prev_events` after we already know their signature is invalid. ([\#13816](https://github.com/matrix-org/synapse/issues/13816)) -- Send application service access tokens as a header (and query parameter). Implements [MSC2832](https://github.com/matrix-org/matrix-spec-proposals/pull/2832). ([\#13996](https://github.com/matrix-org/synapse/issues/13996)) -- Ignore server ACL changes when generating pushes. Implements [MSC3786](https://github.com/matrix-org/matrix-spec-proposals/pull/3786). ([\#13997](https://github.com/matrix-org/synapse/issues/13997)) -- Experimental support for redirecting to an implementation of a [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) HTTP rendezvous service. ([\#14018](https://github.com/matrix-org/synapse/issues/14018)) -- The `/relations` endpoint can now be used on workers. ([\#14028](https://github.com/matrix-org/synapse/issues/14028)) -- Advertise support for Matrix 1.3 and 1.4 on `/_matrix/client/versions`. ([\#14032](https://github.com/matrix-org/synapse/issues/14032), [\#14184](https://github.com/matrix-org/synapse/issues/14184)) -- Improve validation of request bodies for the [Device Management](https://spec.matrix.org/v1.4/client-server-api/#device-management) and [MSC2697 Device Dehyrdation](https://github.com/matrix-org/matrix-spec-proposals/pull/2697) client-server API endpoints. ([\#14054](https://github.com/matrix-org/synapse/issues/14054)) -- Experimental support for [MSC3874](https://github.com/matrix-org/matrix-spec-proposals/pull/3874): Filtering threads from the `/messages` endpoint. ([\#14148](https://github.com/matrix-org/synapse/issues/14148)) -- Improve the validation of the following PUT endpoints: [`/directory/room/{roomAlias}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directoryroomroomalias), [`/directory/list/room/{roomId}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directorylistroomroomid) and [`/directory/list/appservice/{networkId}/{roomId}`](https://spec.matrix.org/v1.4/application-service-api/#put_matrixclientv3directorylistappservicenetworkidroomid). ([\#14179](https://github.com/matrix-org/synapse/issues/14179)) -- Build and publish binary wheels for `aarch64` platforms. ([\#14212](https://github.com/matrix-org/synapse/issues/14212)) - - -Bugfixes --------- - -- Prevent device names from appearing in device list updates in some situations when `allow_device_name_lookup_over_federation` is `false`. (This is not comprehensive: see [\#13114](https://github.com/matrix-org/synapse/issues/13114).) ([\#10015](https://github.com/matrix-org/synapse/issues/10015)) -- Fix a long-standing bug where redactions were not being sent over federation if we did not have the original event. ([\#13813](https://github.com/matrix-org/synapse/issues/13813)) -- Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled or have their new content applied. ([\#14034](https://github.com/matrix-org/synapse/issues/14034)) -- Fix a bug introduced in Synapse 1.53.0 when querying `/publicRooms` with both a `room_type` filter and a `third_party_instance_id`. ([\#14053](https://github.com/matrix-org/synapse/issues/14053)) -- Fix a bug introduced in Synapse 1.35.0 where errors parsing a `/send_join` or `/state` response would produce excessive, low-quality Sentry events. ([\#14065](https://github.com/matrix-org/synapse/issues/14065)) -- Fix a long-standing bug where Synapse would error on the optional 'invite_room_state' field not being provided to [`PUT /_matrix/federation/v2/invite/{roomId}/{eventId}`](https://spec.matrix.org/v1.4/server-server-api/#put_matrixfederationv2inviteroomideventid). ([\#14083](https://github.com/matrix-org/synapse/issues/14083)) -- Fix a bug where invalid oEmbed fields would cause the entire response to be discarded. Introduced in Synapse 1.18.0. ([\#14089](https://github.com/matrix-org/synapse/issues/14089)) -- Fix a bug introduced in Synapse 1.37.0 in which an incorrect key name was used for sending and receiving room metadata when knocking on a room. ([\#14102](https://github.com/matrix-org/synapse/issues/14102)) -- Fix a bug introduced in v1.69.0rc1 where the joined hosts for a given event were not being properly cached. ([\#14125](https://github.com/matrix-org/synapse/issues/14125)) -- Fix a bug introduced in Synapse 1.30.0 where purging and rejoining a room without restarting in-between would result in a broken room. ([\#14161](https://github.com/matrix-org/synapse/issues/14161), [\#14164](https://github.com/matrix-org/synapse/issues/14164)) -- Fix [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint returning potentially inaccurate closest events with `outliers` present. ([\#14215](https://github.com/matrix-org/synapse/issues/14215)) - - -Updates to the Docker image ---------------------------- - -- Update the version of frozendict in Docker images and Debian packages from 2.3.3 to 2.3.4, which may fix memory leak problems. ([\#13955](https://github.com/matrix-org/synapse/issues/13955)) -- Use the `minimal` Rust profile when building Synapse. ([\#14141](https://github.com/matrix-org/synapse/issues/14141)) -- Prevent a class of database sharding errors when using `Dockerfile-workers` to spawn multiple instances of the same worker. Contributed by Jason Little. ([\#14165](https://github.com/matrix-org/synapse/issues/14165)) -- Set `LD_PRELOAD` to use jemalloc memory allocator in Dockerfile-workers. ([\#14182](https://github.com/matrix-org/synapse/issues/14182)) -- Fix pre-startup logging being lost when using the `Dockerfile-workers` image. ([\#14195](https://github.com/matrix-org/synapse/issues/14195)) - - -Improved Documentation ----------------------- - -- Add sample worker files for `pusher` and `federation_sender`. ([\#14077](https://github.com/matrix-org/synapse/issues/14077)) -- Improve the listener example on the metrics documentation. ([\#14078](https://github.com/matrix-org/synapse/issues/14078)) -- Expand Google OpenID Connect example config to map email attribute. Contributed by @ptman. ([\#14081](https://github.com/matrix-org/synapse/issues/14081)) -- The changelog entry ending in a full stop or exclamation mark is not optional. ([\#14087](https://github.com/matrix-org/synapse/issues/14087)) -- Fix links to jemalloc documentation, which were broken in [#13491](https://github.com/matrix-org/synapse/pull/14124). ([\#14093](https://github.com/matrix-org/synapse/issues/14093)) -- Remove not needed `replication` listener in docker compose example. ([\#14107](https://github.com/matrix-org/synapse/issues/14107)) -- Fix name of `alias_creation_rules` option in the config manual documentation. ([\#14124](https://github.com/matrix-org/synapse/issues/14124)) -- Clarify comment on event contexts. ([\#14145](https://github.com/matrix-org/synapse/issues/14145)) -- Fix dead link to the [Admin Registration API](https://matrix-org.github.io/synapse/latest/admin_api/register_api.html). ([\#14189](https://github.com/matrix-org/synapse/issues/14189)) - - -Deprecations and Removals -------------------------- - -- Remove the experimental implementation of [MSC3772](https://github.com/matrix-org/matrix-spec-proposals/pull/3772). ([\#14094](https://github.com/matrix-org/synapse/issues/14094)) -- Remove the unstable identifier for [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715). ([\#14106](https://github.com/matrix-org/synapse/issues/14106), [\#14146](https://github.com/matrix-org/synapse/issues/14146)) - - -Internal Changes ----------------- - -- Optimise queries used to get a users rooms during sync. Contributed by Nick @ Beeper (@fizzadar). ([\#13991](https://github.com/matrix-org/synapse/issues/13991)) -- Update authlib from 0.15.5 to 1.1.0. ([\#14006](https://github.com/matrix-org/synapse/issues/14006)) -- Make `parse_server_name` consistent in handling invalid server names. ([\#14007](https://github.com/matrix-org/synapse/issues/14007)) -- Don't repeatedly wake up the same users for batched events. ([\#14033](https://github.com/matrix-org/synapse/issues/14033)) -- Complement test image: capture logs from nginx. ([\#14063](https://github.com/matrix-org/synapse/issues/14063)) -- Don't create noisy Sentry events when a requester drops connection to the metrics server mid-request. ([\#14072](https://github.com/matrix-org/synapse/issues/14072)) -- Run the integration test suites with the asyncio reactor enabled in CI. ([\#14092](https://github.com/matrix-org/synapse/issues/14092)) -- Add debug logs to figure out why an event was filtered out of the client response. ([\#14095](https://github.com/matrix-org/synapse/issues/14095)) -- Indicate what endpoint came back with a JSON response we were unable to parse. ([\#14097](https://github.com/matrix-org/synapse/issues/14097)) -- Break up calls to fetch rooms for many users. Contributed by Nick @ Beeper (@fizzadar). ([\#14109](https://github.com/matrix-org/synapse/issues/14109)) -- Faster joins: prioritise the server we joined by when restarting a partial join resync. ([\#14126](https://github.com/matrix-org/synapse/issues/14126)) -- Cache Rust build cache when building docker images. ([\#14130](https://github.com/matrix-org/synapse/issues/14130)) -- Enable dependabot for Rust dependencies. ([\#14132](https://github.com/matrix-org/synapse/issues/14132)) -- Bump typing-extensions from 4.1.1 to 4.4.0. ([\#14134](https://github.com/matrix-org/synapse/issues/14134)) -- Use the `minimal` Rust profile when building Synapse. ([\#14141](https://github.com/matrix-org/synapse/issues/14141)) -- Remove unused configuration code. ([\#14142](https://github.com/matrix-org/synapse/issues/14142)) -- Prepare for the [`gotestfmt` repository move](https://github.com/GoTestTools/gotestfmt/discussions/46). ([\#14144](https://github.com/matrix-org/synapse/issues/14144)) -- Invalidate rooms for user caches on replicated event, fix sync cache race in synapse workers. Contributed by Nick @ Beeper (@fizzadar). ([\#14155](https://github.com/matrix-org/synapse/issues/14155)) -- Enable url previews when testing with complement. ([\#14198](https://github.com/matrix-org/synapse/issues/14198)) -- When authenticating batched events, check for auth events in batch as well as DB. ([\#14214](https://github.com/matrix-org/synapse/issues/14214)) -- Update CI config to avoid GitHub Actions deprecation warnings. ([\#14216](https://github.com/matrix-org/synapse/issues/14216), [\#14224](https://github.com/matrix-org/synapse/issues/14224)) -- Update dependency requirements to allow building with poetry-core 1.3.2. ([\#14217](https://github.com/matrix-org/synapse/issues/14217)) -- Rename the `cache_memory` extra to `cache-memory`, for compatability with poetry-core 1.3.0 and [PEP 685](https://peps.python.org/pep-0685/). From-source installations using this extra will need to install using the new name. ([\#14221](https://github.com/matrix-org/synapse/issues/14221)) -- Specify dev-dependencies using lower bounds, to reduce the likelihood of a dependabot merge conflict. The lockfile continues to pin to specific versions. ([\#14227](https://github.com/matrix-org/synapse/issues/14227)) - - -Synapse 1.69.0 (2022-10-17) -=========================== - -Please note that legacy Prometheus metric names are now deprecated and will be removed in Synapse 1.73.0. -Server administrators should update their dashboards and alerting rules to avoid using the deprecated metric names. -See the [upgrade notes](https://matrix-org.github.io/synapse/v1.69/upgrade.html#upgrading-to-v1690) for more details. - - -No significant changes since 1.69.0rc4. - - -Synapse 1.69.0rc4 (2022-10-14) -============================== - -Bugfixes --------- - -- Fix poor performance of the `event_push_backfill_thread_id` background update, which was introduced in Synapse 1.68.0rc1. ([\#14172](https://github.com/matrix-org/synapse/issues/14172), [\#14181](https://github.com/matrix-org/synapse/issues/14181)) - - -Updates to the Docker image ---------------------------- - -- Fix docker build OOMing in CI for arm64 builds. ([\#14173](https://github.com/matrix-org/synapse/issues/14173)) - - -Synapse 1.69.0rc3 (2022-10-12) -============================== - -Bugfixes --------- - -- Fix an issue with Docker images causing the Rust dependencies to not be pinned correctly. Introduced in v1.68.0 ([\#14129](https://github.com/matrix-org/synapse/issues/14129)) -- Fix a bug introduced in Synapse 1.69.0rc1 which would cause registration replication requests to fail if the worker sending the request is not running Synapse 1.69. ([\#14135](https://github.com/matrix-org/synapse/issues/14135)) -- Fix error in background update when rotating existing notifications. Introduced in v1.69.0rc2. ([\#14138](https://github.com/matrix-org/synapse/issues/14138)) - - -Internal Changes ----------------- - -- Rename the `url_preview` extra to `url-preview`, for compatability with poetry-core 1.3.0 and [PEP 685](https://peps.python.org/pep-0685/). From-source installations using this extra will need to install using the new name. ([\#14085](https://github.com/matrix-org/synapse/issues/14085)) - - -Synapse 1.69.0rc2 (2022-10-06) -============================== - -Deprecations and Removals -------------------------- - -- Deprecate the `generate_short_term_login_token` method in favor of an async `create_login_token` method in the Module API. ([\#13842](https://github.com/matrix-org/synapse/issues/13842)) - - -Internal Changes ----------------- - -- Ensure Synapse v1.69 works with upcoming database changes in v1.70. ([\#14045](https://github.com/matrix-org/synapse/issues/14045)) -- Fix a bug introduced in Synapse v1.68.0 where messages could not be sent in rooms with non-integer `notifications` power level. ([\#14073](https://github.com/matrix-org/synapse/issues/14073)) -- Temporarily pin build-system requirements to workaround an incompatibility with poetry-core 1.3.0. This will be reverted before the v1.69.0 release proper, see [\#14079](https://github.com/matrix-org/synapse/issues/14079). ([\#14080](https://github.com/matrix-org/synapse/issues/14080)) - - -Synapse 1.69.0rc1 (2022-10-04) -============================== - -Features --------- - -- Allow application services to set the `origin_server_ts` of a state event by providing the query parameter `ts` in [`PUT /_matrix/client/r0/rooms/{roomId}/state/{eventType}/{stateKey}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3roomsroomidstateeventtypestatekey), per [MSC3316](https://github.com/matrix-org/matrix-doc/pull/3316). Contributed by @lukasdenk. ([\#11866](https://github.com/matrix-org/synapse/issues/11866)) -- Allow server admins to require a manual approval process before new accounts can be used (using [MSC3866](https://github.com/matrix-org/matrix-spec-proposals/pull/3866)). ([\#13556](https://github.com/matrix-org/synapse/issues/13556)) -- Exponentially backoff from backfilling the same event over and over. ([\#13635](https://github.com/matrix-org/synapse/issues/13635), [\#13936](https://github.com/matrix-org/synapse/issues/13936)) -- Add cache invalidation across workers to module API. ([\#13667](https://github.com/matrix-org/synapse/issues/13667), [\#13947](https://github.com/matrix-org/synapse/issues/13947)) -- Experimental implementation of [MSC3882](https://github.com/matrix-org/matrix-spec-proposals/pull/3882) to allow an existing device/session to generate a login token for use on a new device/session. ([\#13722](https://github.com/matrix-org/synapse/issues/13722), [\#13868](https://github.com/matrix-org/synapse/issues/13868)) -- Experimental support for thread-specific receipts ([MSC3771](https://github.com/matrix-org/matrix-spec-proposals/pull/3771)). ([\#13782](https://github.com/matrix-org/synapse/issues/13782), [\#13893](https://github.com/matrix-org/synapse/issues/13893), [\#13932](https://github.com/matrix-org/synapse/issues/13932), [\#13937](https://github.com/matrix-org/synapse/issues/13937), [\#13939](https://github.com/matrix-org/synapse/issues/13939)) -- Add experimental support for [MSC3881: Remotely toggle push notifications for another client](https://github.com/matrix-org/matrix-spec-proposals/pull/3881). ([\#13799](https://github.com/matrix-org/synapse/issues/13799), [\#13831](https://github.com/matrix-org/synapse/issues/13831), [\#13860](https://github.com/matrix-org/synapse/issues/13860)) -- Keep track when an event pulled over federation fails its signature check so we can intelligently back-off in the future. ([\#13815](https://github.com/matrix-org/synapse/issues/13815)) -- Improve validation for the unspecced, internal-only `_matrix/client/unstable/add_threepid/msisdn/submit_token` endpoint. ([\#13832](https://github.com/matrix-org/synapse/issues/13832)) -- Faster remote room joins: record _when_ we first partial-join to a room. ([\#13892](https://github.com/matrix-org/synapse/issues/13892)) -- Support a `dir` parameter on the `/relations` endpoint per [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715). ([\#13920](https://github.com/matrix-org/synapse/issues/13920)) -- Ask mail servers receiving emails from Synapse to not send automatic replies (e.g. out-of-office responses). ([\#13957](https://github.com/matrix-org/synapse/issues/13957)) - - -Bugfixes --------- - -- Send push notifications for invites received over federation. ([\#13719](https://github.com/matrix-org/synapse/issues/13719), [\#14014](https://github.com/matrix-org/synapse/issues/14014)) -- Fix a long-standing bug where typing events would be accepted from remote servers not present in a room. Also fix a bug where incoming typing events would cause other incoming events to get stuck during a fast join. ([\#13830](https://github.com/matrix-org/synapse/issues/13830)) -- Fix a bug introduced in Synapse v1.53.0 where the experimental implementation of [MSC3715](https://github.com/matrix-org/matrix-spec-proposals/pull/3715) would give incorrect results when paginating forward. ([\#13840](https://github.com/matrix-org/synapse/issues/13840)) -- Fix access token leak to logs from proxy agent. ([\#13855](https://github.com/matrix-org/synapse/issues/13855)) -- Fix `have_seen_event` cache not being invalidated after we persist an event which causes inefficiency effects like extra `/state` federation calls. ([\#13863](https://github.com/matrix-org/synapse/issues/13863)) -- Faster room joins: Fix a bug introduced in 1.66.0 where an error would be logged when syncing after joining a room. ([\#13872](https://github.com/matrix-org/synapse/issues/13872)) -- Fix a bug introduced in 1.66.0 where some required fields in the pushrules sent to clients were not present anymore. Contributed by Nico. ([\#13904](https://github.com/matrix-org/synapse/issues/13904)) -- Fix packaging to include `Cargo.lock` in `sdist`. ([\#13909](https://github.com/matrix-org/synapse/issues/13909)) -- Fix a long-standing bug where device updates could cause delays sending out to-device messages over federation. ([\#13922](https://github.com/matrix-org/synapse/issues/13922)) -- Fix a bug introduced in v1.68.0 where Synapse would require `setuptools_rust` at runtime, even though the package is only required at build time. ([\#13952](https://github.com/matrix-org/synapse/issues/13952)) -- Fix a long-standing bug where `POST /_matrix/client/v3/keys/query` requests could result in excessively large SQL queries. ([\#13956](https://github.com/matrix-org/synapse/issues/13956)) -- Fix a performance regression in the `get_users_in_room` database query. Introduced in v1.67.0. ([\#13972](https://github.com/matrix-org/synapse/issues/13972)) -- Fix a bug introduced in v1.68.0 bug where Rust extension wasn't built in `release` mode when using `poetry install`. ([\#14009](https://github.com/matrix-org/synapse/issues/14009)) -- Do not return an unspecified `original_event` field when using the stable `/relations` endpoint. Introduced in Synapse v1.57.0. ([\#14025](https://github.com/matrix-org/synapse/issues/14025)) -- Correctly handle a race with device lists when a remote user leaves during a partial join. ([\#13885](https://github.com/matrix-org/synapse/issues/13885)) -- Correctly handle sending local device list updates to remote servers during a partial join. ([\#13934](https://github.com/matrix-org/synapse/issues/13934)) - - -Improved Documentation ----------------------- - -- Add `worker_main_http_uri` for the worker generator bash script. ([\#13772](https://github.com/matrix-org/synapse/issues/13772)) -- Update URL for the NixOS module for Synapse. ([\#13818](https://github.com/matrix-org/synapse/issues/13818)) -- Fix a mistake in sso_mapping_providers.md: `map_user_attributes` is expected to return `display_name`, not `displayname`. ([\#13836](https://github.com/matrix-org/synapse/issues/13836)) -- Fix a cross-link from the registration admin API to the `registration_shared_secret` configuration documentation. ([\#13870](https://github.com/matrix-org/synapse/issues/13870)) -- Update the man page for the `hash_password` script to correct the default number of bcrypt rounds performed. ([\#13911](https://github.com/matrix-org/synapse/issues/13911), [\#13930](https://github.com/matrix-org/synapse/issues/13930)) -- Emphasize the right reasons when to use `(room_id, event_id)` in a database schema. ([\#13915](https://github.com/matrix-org/synapse/issues/13915)) -- Add instruction to contributing guide for running unit tests in parallel. Contributed by @ashfame. ([\#13928](https://github.com/matrix-org/synapse/issues/13928)) -- Clarify that the `auto_join_rooms` config option can also be used with Space aliases. ([\#13931](https://github.com/matrix-org/synapse/issues/13931)) -- Add some cross references to worker documentation. ([\#13974](https://github.com/matrix-org/synapse/issues/13974)) -- Linkify urls in config documentation. ([\#14003](https://github.com/matrix-org/synapse/issues/14003)) - - -Deprecations and Removals -------------------------- - -- Remove the `complete_sso_login` method from the Module API which was deprecated in Synapse 1.13.0. ([\#13843](https://github.com/matrix-org/synapse/issues/13843)) -- Announce that legacy metric names are deprecated, will be turned off by default in Synapse v1.71.0 and removed altogether in Synapse v1.73.0. See the upgrade notes for more information. ([\#14024](https://github.com/matrix-org/synapse/issues/14024)) - - -Internal Changes ----------------- - -- Speed up creation of DM rooms. ([\#13487](https://github.com/matrix-org/synapse/issues/13487), [\#13800](https://github.com/matrix-org/synapse/issues/13800)) -- Port push rules to using Rust. ([\#13768](https://github.com/matrix-org/synapse/issues/13768), [\#13838](https://github.com/matrix-org/synapse/issues/13838), [\#13889](https://github.com/matrix-org/synapse/issues/13889)) -- Optimise get rooms for user calls. Contributed by Nick @ Beeper (@fizzadar). ([\#13787](https://github.com/matrix-org/synapse/issues/13787)) -- Update the script which makes full schema dumps. ([\#13792](https://github.com/matrix-org/synapse/issues/13792)) -- Use shared methods for cache invalidation when persisting events, remove duplicate codepaths. Contributed by Nick @ Beeper (@fizzadar). ([\#13796](https://github.com/matrix-org/synapse/issues/13796)) -- Improve the `synapse.api.auth.Auth` mock used in unit tests. ([\#13809](https://github.com/matrix-org/synapse/issues/13809)) -- Faster Remote Room Joins: tell remote homeservers that we are unable to authorise them if they query a room which has partial state on our server. ([\#13823](https://github.com/matrix-org/synapse/issues/13823)) -- Carry IdP Session IDs through user-mapping sessions. ([\#13839](https://github.com/matrix-org/synapse/issues/13839)) -- Fix the release script not publishing binary wheels. ([\#13850](https://github.com/matrix-org/synapse/issues/13850)) -- Raise issue if complement fails with latest deps. ([\#13859](https://github.com/matrix-org/synapse/issues/13859)) -- Correct the comments in the complement dockerfile. ([\#13867](https://github.com/matrix-org/synapse/issues/13867)) -- Create a new snapshot of the database schema. ([\#13873](https://github.com/matrix-org/synapse/issues/13873)) -- Faster room joins: Send device list updates to most servers in rooms with partial state. ([\#13874](https://github.com/matrix-org/synapse/issues/13874), [\#14013](https://github.com/matrix-org/synapse/issues/14013)) -- Add comments to the Prometheus recording rules to make it clear which set of rules you need for Grafana or Prometheus Console. ([\#13876](https://github.com/matrix-org/synapse/issues/13876)) -- Only pull relevant backfill points from the database based on the current depth and limit (instead of all) every time we want to `/backfill`. ([\#13879](https://github.com/matrix-org/synapse/issues/13879)) -- Faster room joins: Avoid waiting for full state when processing `/keys/changes` requests. ([\#13888](https://github.com/matrix-org/synapse/issues/13888)) -- Improve backfill robustness by trying more servers when we get a `4xx` error back. ([\#13890](https://github.com/matrix-org/synapse/issues/13890)) -- Fix mypy errors with canonicaljson 1.6.3. ([\#13905](https://github.com/matrix-org/synapse/issues/13905)) -- Faster remote room joins: correctly handle remote device list updates during a partial join. ([\#13913](https://github.com/matrix-org/synapse/issues/13913)) -- Complement image: propagate SIGTERM to all workers. ([\#13914](https://github.com/matrix-org/synapse/issues/13914)) -- Update an innaccurate comment in Synapse's upsert database helper. ([\#13924](https://github.com/matrix-org/synapse/issues/13924)) -- Update mypy (0.950 -> 0.981) and mypy-zope (0.3.7 -> 0.3.11). ([\#13925](https://github.com/matrix-org/synapse/issues/13925), [\#13993](https://github.com/matrix-org/synapse/issues/13993)) -- Use dedicated `get_local_users_in_room(room_id)` function to find local users when calculating users to copy over during a room upgrade. ([\#13960](https://github.com/matrix-org/synapse/issues/13960)) -- Refactor language in user directory `_track_user_joined_room` code to make it more clear that we use both local and remote users. ([\#13966](https://github.com/matrix-org/synapse/issues/13966)) -- Revert catch-all exceptions being recorded as event pull attempt failures (only handle what we know about). ([\#13969](https://github.com/matrix-org/synapse/issues/13969)) -- Speed up calculating push actions in large rooms. ([\#13973](https://github.com/matrix-org/synapse/issues/13973), [\#13992](https://github.com/matrix-org/synapse/issues/13992)) -- Enable update notifications from Github's dependabot. ([\#13976](https://github.com/matrix-org/synapse/issues/13976)) -- Prototype a workflow to automatically add changelogs to dependabot PRs. ([\#13998](https://github.com/matrix-org/synapse/issues/13998), [\#14011](https://github.com/matrix-org/synapse/issues/14011), [\#14017](https://github.com/matrix-org/synapse/issues/14017), [\#14021](https://github.com/matrix-org/synapse/issues/14021), [\#14027](https://github.com/matrix-org/synapse/issues/14027)) -- Fix type annotations to be compatible with new annotations in development versions of twisted. ([\#14012](https://github.com/matrix-org/synapse/issues/14012)) -- Clear out stale entries in `event_push_actions_staging` table. ([\#14020](https://github.com/matrix-org/synapse/issues/14020)) -- Bump versions of GitHub actions. ([\#13978](https://github.com/matrix-org/synapse/issues/13978), [\#13979](https://github.com/matrix-org/synapse/issues/13979), [\#13980](https://github.com/matrix-org/synapse/issues/13980), [\#13982](https://github.com/matrix-org/synapse/issues/13982), [\#14015](https://github.com/matrix-org/synapse/issues/14015), [\#14019](https://github.com/matrix-org/synapse/issues/14019), [\#14022](https://github.com/matrix-org/synapse/issues/14022), [\#14023](https://github.com/matrix-org/synapse/issues/14023)) - - -Synapse 1.68.0 (2022-09-27) -=========================== - -Please note that Synapse will now refuse to start if configured to use a version of SQLite older than 3.27. - -In addition, please note that installing Synapse from a source checkout now requires a recent Rust compiler. -Those using packages will not be affected. On most platforms, installing with `pip install matrix-synapse` will not be affected. -See the [upgrade notes](https://matrix-org.github.io/synapse/v1.68/upgrade.html#upgrading-to-v1680). - -Bugfixes --------- - -- Fix packaging to include `Cargo.lock` in `sdist`. ([\#13909](https://github.com/matrix-org/synapse/issues/13909)) - - -Synapse 1.68.0rc2 (2022-09-23) -============================== - -Bugfixes --------- - -- Fix building from packaged sdist. Broken in v1.68.0rc1. ([\#13866](https://github.com/matrix-org/synapse/issues/13866)) - - -Internal Changes ----------------- - -- Fix the release script not publishing binary wheels. ([\#13850](https://github.com/matrix-org/synapse/issues/13850)) -- Lower minimum supported rustc version to 1.58.1. ([\#13857](https://github.com/matrix-org/synapse/issues/13857)) -- Lock Rust dependencies' versions. ([\#13858](https://github.com/matrix-org/synapse/issues/13858)) - - -Synapse 1.68.0rc1 (2022-09-20) -============================== - -Features --------- - -- Keep track of when we fail to process a pulled event over federation so we can intelligently back off in the future. ([\#13589](https://github.com/matrix-org/synapse/issues/13589), [\#13814](https://github.com/matrix-org/synapse/issues/13814)) -- Add an [admin API endpoint to fetch messages within a particular window of time](https://matrix-org.github.io/synapse/v1.68/admin_api/rooms.html#room-messages-api). ([\#13672](https://github.com/matrix-org/synapse/issues/13672)) -- Add an [admin API endpoint to find a user based on their external ID in an auth provider](https://matrix-org.github.io/synapse/v1.68/admin_api/user_admin_api.html#find-a-user-based-on-their-id-in-an-auth-provider). ([\#13810](https://github.com/matrix-org/synapse/issues/13810)) -- Cancel the processing of key query requests when they time out. ([\#13680](https://github.com/matrix-org/synapse/issues/13680)) -- Improve validation of request bodies for the following client-server API endpoints: [`/account/3pid/msisdn/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidmsisdnrequesttoken), [`/org.matrix.msc3720/account_status`](https://github.com/matrix-org/matrix-spec-proposals/blob/babolivier/user_status/proposals/3720-account-status.md#post-_matrixclientv1account_status), [`/account/3pid/add`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidadd), [`/account/3pid/bind`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidbind), [`/account/3pid/delete`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3piddelete) and [`/account/3pid/unbind`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidunbind). ([\#13687](https://github.com/matrix-org/synapse/issues/13687), [\#13736](https://github.com/matrix-org/synapse/issues/13736)) -- Document the timestamp when a user accepts the consent, if [consent tracking](https://matrix-org.github.io/synapse/latest/consent_tracking.html) is used. ([\#13741](https://github.com/matrix-org/synapse/issues/13741)) -- Add a `listeners[x].request_id_header` configuration option to specify which request header to extract and use as the request ID in order to correlate requests from a reverse proxy. ([\#13801](https://github.com/matrix-org/synapse/issues/13801)) - - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`). ([\#13506](https://github.com/matrix-org/synapse/issues/13506)) -- Fix a long-standing bug where previously rejected events could end up in room state because they pass auth checks given the current state of the room. ([\#13723](https://github.com/matrix-org/synapse/issues/13723)) -- Fix a long-standing bug where Synapse fails to start if a signing key file contains an empty line. ([\#13738](https://github.com/matrix-org/synapse/issues/13738)) -- Fix a long-standing bug where Synapse would fail to handle malformed user IDs or room aliases gracefully in certain cases. ([\#13746](https://github.com/matrix-org/synapse/issues/13746)) -- Fix a long-standing bug where device lists would remain cached when remote users left and rejoined the last room shared with the local homeserver. ([\#13749](https://github.com/matrix-org/synapse/issues/13749), [\#13826](https://github.com/matrix-org/synapse/issues/13826)) -- Fix a long-standing bug that could cause stale caches in some rare cases on the first startup of Synapse with replication. ([\#13766](https://github.com/matrix-org/synapse/issues/13766)) -- Fix a long-standing spec compliance bug where Synapse would accept a trailing slash on the end of `/get_missing_events` federation requests. ([\#13789](https://github.com/matrix-org/synapse/issues/13789)) -- Delete associated data from `event_failed_pull_attempts`, `insertion_events`, `insertion_event_extremities`, `insertion_event_extremities`, `insertion_event_extremities` when purging the room. ([\#13825](https://github.com/matrix-org/synapse/issues/13825)) - - -Improved Documentation ----------------------- - -- Note that `libpq` is required on ARM-based Macs. ([\#13480](https://github.com/matrix-org/synapse/issues/13480)) -- Fix a mistake in the config manual introduced in Synapse 1.22.0: the `event_cache_size` _is_ scaled by `caches.global_factor`. ([\#13726](https://github.com/matrix-org/synapse/issues/13726)) -- Fix a typo in the documentation for the login ratelimiting configuration. ([\#13727](https://github.com/matrix-org/synapse/issues/13727)) -- Define Synapse's compatability policy for SQLite versions. ([\#13728](https://github.com/matrix-org/synapse/issues/13728)) -- Add docs for the common fix of deleting the `matrix_synapse.egg-info/` directory for fixing Python dependency problems. ([\#13785](https://github.com/matrix-org/synapse/issues/13785)) -- Update request log format documentation to mention the format used when the authenticated user is controlling another user. ([\#13794](https://github.com/matrix-org/synapse/issues/13794)) - - -Deprecations and Removals -------------------------- - -- Synapse will now refuse to start if configured to use SQLite < 3.27. ([\#13760](https://github.com/matrix-org/synapse/issues/13760)) -- Don't include redundant `prev_state` in new events. Contributed by Denis Kariakin (@dakariakin). ([\#13791](https://github.com/matrix-org/synapse/issues/13791)) - - -Internal Changes ----------------- - -- Add a stub Rust crate. ([\#12595](https://github.com/matrix-org/synapse/issues/12595), [\#13734](https://github.com/matrix-org/synapse/issues/13734), [\#13735](https://github.com/matrix-org/synapse/issues/13735), [\#13743](https://github.com/matrix-org/synapse/issues/13743), [\#13763](https://github.com/matrix-org/synapse/issues/13763), [\#13769](https://github.com/matrix-org/synapse/issues/13769), [\#13778](https://github.com/matrix-org/synapse/issues/13778)) -- Bump the minimum dependency of `matrix_common` to 1.3.0 to make use of the `MXCUri` class. Use `MXCUri` to simplify media retention test code. ([\#13162](https://github.com/matrix-org/synapse/issues/13162)) -- Add and populate the `event_stream_ordering` column on the `receipts` table for future optimisation of push action processing. Contributed by Nick @ Beeper (@fizzadar). ([\#13703](https://github.com/matrix-org/synapse/issues/13703)) -- Rename the `EventFormatVersions` enum values so that they line up with room version numbers. ([\#13706](https://github.com/matrix-org/synapse/issues/13706)) -- Update trial old deps CI to use Poetry 1.2.0. ([\#13707](https://github.com/matrix-org/synapse/issues/13707), [\#13725](https://github.com/matrix-org/synapse/issues/13725)) -- Add experimental configuration option to allow disabling legacy Prometheus metric names. ([\#13714](https://github.com/matrix-org/synapse/issues/13714), [\#13717](https://github.com/matrix-org/synapse/issues/13717), [\#13718](https://github.com/matrix-org/synapse/issues/13718)) -- Fix typechecking with latest types-jsonschema. ([\#13724](https://github.com/matrix-org/synapse/issues/13724)) -- Strip number suffix from instance name to consolidate services that traces are spread over. ([\#13729](https://github.com/matrix-org/synapse/issues/13729)) -- Instrument `get_metadata_for_events` for understandable traces in Jaeger. ([\#13730](https://github.com/matrix-org/synapse/issues/13730)) -- Remove old queries to join room memberships to current state events. Contributed by Nick @ Beeper (@fizzadar). ([\#13745](https://github.com/matrix-org/synapse/issues/13745)) -- Avoid raising an error due to malformed user IDs in `get_current_hosts_in_room`. Malformed user IDs cannot currently join a room, so this error would not be hit. ([\#13748](https://github.com/matrix-org/synapse/issues/13748)) -- Update the docstrings for `get_users_in_room` and `get_current_hosts_in_room` to explain the impact of partial state. ([\#13750](https://github.com/matrix-org/synapse/issues/13750)) -- Use an additional database query when persisting receipts. ([\#13752](https://github.com/matrix-org/synapse/issues/13752)) -- Preparatory work for storing thread IDs for notifications and receipts. ([\#13753](https://github.com/matrix-org/synapse/issues/13753)) -- Re-type hint some collections as read-only. ([\#13754](https://github.com/matrix-org/synapse/issues/13754)) -- Remove unused Prometheus recording rules from `synapse-v2.rules` and add comments describing where the rest are used. ([\#13756](https://github.com/matrix-org/synapse/issues/13756)) -- Add a check for editable installs if the Rust library needs rebuilding. ([\#13759](https://github.com/matrix-org/synapse/issues/13759)) -- Tag traces with the instance name to be able to easily jump into the right logs and filter traces by instance. ([\#13761](https://github.com/matrix-org/synapse/issues/13761)) -- Concurrently fetch room push actions when calculating badge counts. Contributed by Nick @ Beeper (@fizzadar). ([\#13765](https://github.com/matrix-org/synapse/issues/13765)) -- Update the script which makes full schema dumps. ([\#13770](https://github.com/matrix-org/synapse/issues/13770)) -- Deduplicate `is_server_notices_room`. ([\#13780](https://github.com/matrix-org/synapse/issues/13780)) -- Simplify the dependency DAG in the tests workflow. ([\#13784](https://github.com/matrix-org/synapse/issues/13784)) -- Remove an old, incorrect migration file. ([\#13788](https://github.com/matrix-org/synapse/issues/13788)) -- Remove unused method in `synapse.api.auth.Auth`. ([\#13795](https://github.com/matrix-org/synapse/issues/13795)) -- Fix a memory leak when running the unit tests. ([\#13798](https://github.com/matrix-org/synapse/issues/13798)) -- Use partial indices on SQLite. ([\#13802](https://github.com/matrix-org/synapse/issues/13802)) -- Check that portdb generates the same postgres schema as that in the source tree. ([\#13808](https://github.com/matrix-org/synapse/issues/13808)) -- Fix Docker build when Rust .so has been built locally first. ([\#13811](https://github.com/matrix-org/synapse/issues/13811)) -- Complement: Initialise the Postgres database directly inside the target image instead of the base Postgres image to fix building using Buildah. ([\#13819](https://github.com/matrix-org/synapse/issues/13819)) -- Support providing an index predicate clause when doing upserts. ([\#13822](https://github.com/matrix-org/synapse/issues/13822)) -- Minor speedups to linting in CI. ([\#13827](https://github.com/matrix-org/synapse/issues/13827)) - - -Synapse 1.67.0 (2022-09-13) -=========================== - -This release removes using the deprecated direct TCP replication configuration -for workers. Server admins should use Redis instead. See the [upgrade -notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670). - -The minimum version of `poetry` supported for managing source checkouts is now -1.2.0. - -**Notice:** from the next major release (1.68.0) installing Synapse from a source -checkout will require a recent Rust compiler. Those using packages or -`pip install matrix-synapse` will not be affected. See the [upgrade -notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670). - -**Notice:** from the next major release (1.68.0), running Synapse with a SQLite -database will require SQLite version 3.27.0 or higher. (The [current minimum - version is SQLite 3.22.0](https://github.com/matrix-org/synapse/blob/release-v1.67/synapse/storage/engines/sqlite.py#L69-L78).) -See [#12983](https://github.com/matrix-org/synapse/issues/12983) and the [upgrade notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670) for more details. - - -No significant changes since 1.67.0rc1. - - -Synapse 1.67.0rc1 (2022-09-06) -============================== - -Features --------- - -- Support setting the registration shared secret in a file, via a new `registration_shared_secret_path` configuration option. ([\#13614](https://github.com/matrix-org/synapse/issues/13614)) -- Change the default startup behaviour so that any missing "additional" configuration files (signing key, etc) are generated automatically. ([\#13615](https://github.com/matrix-org/synapse/issues/13615)) -- Improve performance of sending messages in rooms with thousands of local users. ([\#13634](https://github.com/matrix-org/synapse/issues/13634)) - - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.13 where the [List Rooms admin API](https://matrix-org.github.io/synapse/develop/admin_api/rooms.html#list-room-api) would return integers instead of booleans for the `federatable` and `public` fields when using a Sqlite database. ([\#13509](https://github.com/matrix-org/synapse/issues/13509)) -- Fix bug that user cannot `/forget` rooms after the last member has left the room. ([\#13546](https://github.com/matrix-org/synapse/issues/13546)) -- Faster Room Joins: fix `/make_knock` blocking indefinitely when the room in question is a partial-stated room. ([\#13583](https://github.com/matrix-org/synapse/issues/13583)) -- Fix loading the current stream position behind the actual position. ([\#13585](https://github.com/matrix-org/synapse/issues/13585)) -- Fix a longstanding bug in `register_new_matrix_user` which meant it was always necessary to explicitly give a server URL. ([\#13616](https://github.com/matrix-org/synapse/issues/13616)) -- Fix the running of [MSC1763](https://github.com/matrix-org/matrix-spec-proposals/pull/1763) retention purge_jobs in deployments with background jobs running on a worker by forcing them back onto the main worker. Contributed by Brad @ Beeper. ([\#13632](https://github.com/matrix-org/synapse/issues/13632)) -- Fix a long-standing bug that downloaded media for URL previews was not deleted while database background updates were running. ([\#13657](https://github.com/matrix-org/synapse/issues/13657)) -- Fix [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to return the correct next event when the events have the same timestamp. ([\#13658](https://github.com/matrix-org/synapse/issues/13658)) -- Fix bug where we wedge media plugins if clients disconnect early. Introduced in v1.22.0. ([\#13660](https://github.com/matrix-org/synapse/issues/13660)) -- Fix a long-standing bug which meant that keys for unwhitelisted servers were not returned by `/_matrix/key/v2/query`. ([\#13683](https://github.com/matrix-org/synapse/issues/13683)) -- Fix a bug introduced in Synapse 1.20.0 that would cause the unstable unread counts from [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654) to be calculated even if the feature is disabled. ([\#13694](https://github.com/matrix-org/synapse/issues/13694)) - - -Updates to the Docker image ---------------------------- - -- Update docker image to use a stable version of poetry. ([\#13688](https://github.com/matrix-org/synapse/issues/13688)) - - -Improved Documentation ----------------------- - -- Improve the description of the ["chain cover index"](https://matrix-org.github.io/synapse/latest/auth_chain_difference_algorithm.html) used internally by Synapse. ([\#13602](https://github.com/matrix-org/synapse/issues/13602)) -- Document how ["monthly active users"](https://matrix-org.github.io/synapse/latest/usage/administration/monthly_active_users.html) is calculated and used. ([\#13617](https://github.com/matrix-org/synapse/issues/13617)) -- Improve documentation around user registration. ([\#13640](https://github.com/matrix-org/synapse/issues/13640)) -- Remove documentation of legacy `frontend_proxy` worker app. ([\#13645](https://github.com/matrix-org/synapse/issues/13645)) -- Clarify documentation that HTTP replication traffic can be protected with a shared secret. ([\#13656](https://github.com/matrix-org/synapse/issues/13656)) -- Remove unintentional colons from [config manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html) headers. ([\#13665](https://github.com/matrix-org/synapse/issues/13665)) -- Update docs to make enabling metrics more clear. ([\#13678](https://github.com/matrix-org/synapse/issues/13678)) -- Clarify `(room_id, event_id)` global uniqueness and how we should scope our database schemas. ([\#13701](https://github.com/matrix-org/synapse/issues/13701)) - - -Deprecations and Removals -------------------------- - -- Drop support for calling `/_matrix/client/v3/rooms/{roomId}/invite` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu. ([\#13241](https://github.com/matrix-org/synapse/issues/13241)) -- Remove redundant `_get_joined_users_from_context` cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13569](https://github.com/matrix-org/synapse/issues/13569)) -- Remove the ability to use direct TCP replication with workers. Direct TCP replication was deprecated in Synapse 1.18.0. Workers now require using Redis. ([\#13647](https://github.com/matrix-org/synapse/issues/13647)) -- Remove support for unstable [private read receipts](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). ([\#13653](https://github.com/matrix-org/synapse/issues/13653), [\#13692](https://github.com/matrix-org/synapse/issues/13692)) - - -Internal Changes ----------------- - -- Extend the release script to wait for GitHub Actions to finish and to be usable as a guide for the whole process. ([\#13483](https://github.com/matrix-org/synapse/issues/13483)) -- Add experimental configuration option to allow disabling legacy Prometheus metric names. ([\#13540](https://github.com/matrix-org/synapse/issues/13540)) -- Cache user IDs instead of profiles to reduce cache memory usage. Contributed by Nick @ Beeper (@fizzadar). ([\#13573](https://github.com/matrix-org/synapse/issues/13573), [\#13600](https://github.com/matrix-org/synapse/issues/13600)) -- Optimize how Synapse calculates domains to fetch from during backfill. ([\#13575](https://github.com/matrix-org/synapse/issues/13575)) -- Comment about a better future where we can get the state diff between two events. ([\#13586](https://github.com/matrix-org/synapse/issues/13586)) -- Instrument `_check_sigs_and_hash_and_fetch` to trace time spent in child concurrent calls for understandable traces in Jaeger. ([\#13588](https://github.com/matrix-org/synapse/issues/13588)) -- Improve performance of `@cachedList`. ([\#13591](https://github.com/matrix-org/synapse/issues/13591)) -- Minor speed up of fetching large numbers of push rules. ([\#13592](https://github.com/matrix-org/synapse/issues/13592)) -- Optimise push action fetching queries. Contributed by Nick @ Beeper (@fizzadar). ([\#13597](https://github.com/matrix-org/synapse/issues/13597)) -- Rename `event_map` to `unpersisted_events` when computing the auth differences. ([\#13603](https://github.com/matrix-org/synapse/issues/13603)) -- Refactor `get_users_in_room(room_id)` mis-use with dedicated `get_current_hosts_in_room(room_id)` function. ([\#13605](https://github.com/matrix-org/synapse/issues/13605)) -- Use dedicated `get_local_users_in_room(room_id)` function to find local users when calculating `join_authorised_via_users_server` of a `/make_join` request. ([\#13606](https://github.com/matrix-org/synapse/issues/13606)) -- Refactor `get_users_in_room(room_id)` mis-use to lookup single local user with dedicated `check_local_user_in_room(...)` function. ([\#13608](https://github.com/matrix-org/synapse/issues/13608)) -- Drop unused column `application_services_state.last_txn`. ([\#13627](https://github.com/matrix-org/synapse/issues/13627)) -- Improve readability of Complement CI logs by printing failure results last. ([\#13639](https://github.com/matrix-org/synapse/issues/13639)) -- Generalise the `@cancellable` annotation so it can be used on functions other than just servlet methods. ([\#13662](https://github.com/matrix-org/synapse/issues/13662)) -- Introduce a `CommonUsageMetrics` class to share some usage metrics between the Prometheus exporter and the phone home stats. ([\#13671](https://github.com/matrix-org/synapse/issues/13671)) -- Add some logging to help track down #13444. ([\#13679](https://github.com/matrix-org/synapse/issues/13679)) -- Update poetry lock file for v1.2.0. ([\#13689](https://github.com/matrix-org/synapse/issues/13689)) -- Add cache to `is_partial_state_room`. ([\#13693](https://github.com/matrix-org/synapse/issues/13693)) -- Update the Grafana dashboard that is included with Synapse in the `contrib` directory. ([\#13697](https://github.com/matrix-org/synapse/issues/13697)) -- Only run trial CI on all python versions on non-PRs. ([\#13698](https://github.com/matrix-org/synapse/issues/13698)) -- Fix typechecking with latest types-jsonschema. ([\#13712](https://github.com/matrix-org/synapse/issues/13712)) -- Reduce number of CI checks we run for PRs. ([\#13713](https://github.com/matrix-org/synapse/issues/13713)) - - -Synapse 1.66.0 (2022-08-31) -=========================== - -No significant changes since 1.66.0rc2. - -This release removes the ability for homeservers to delegate email ownership -verification and password reset confirmation to identity servers. This removal -was originally planned for Synapse 1.64, but was later deferred until now. See -the [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details. - -Deployments with multiple workers should note that the direct TCP replication -configuration was deprecated in Synapse 1.18.0 and will be removed in Synapse -v1.67.0. In particular, the TCP `replication` [listener](https://matrix-org.github.io/synapse/v1.66/usage/configuration/config_documentation.html#listeners) -type (not to be confused with the `replication` resource on the `http` listener -type) and the `worker_replication_port` config option will be removed . - -To migrate to Redis, add the [`redis` config](https://matrix-org.github.io/synapse/v1.66/workers.html#shared-configuration), -then remove the TCP `replication` listener from config of the master and -`worker_replication_port` from worker config. Note that a HTTP listener with a -`replication` resource is still required. See the -[worker documentation](https://matrix-org.github.io/synapse/v1.66/workers.html) -for more details. - - -Synapse 1.66.0rc2 (2022-08-30) -============================== - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.66.0rc1 where the new rate limit metrics were misreported (`synapse_rate_limit_sleep_affected_hosts`, `synapse_rate_limit_reject_affected_hosts`). ([\#13649](https://github.com/matrix-org/synapse/issues/13649)) - - -Synapse 1.66.0rc1 (2022-08-23) -============================== - -Features --------- - -- Improve validation of request bodies for the following client-server API endpoints: [`/account/password`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountpassword), [`/account/password/email/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountpasswordemailrequesttoken), [`/account/deactivate`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountdeactivate) and [`/account/3pid/email/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidemailrequesttoken). ([\#13188](https://github.com/matrix-org/synapse/issues/13188), [\#13563](https://github.com/matrix-org/synapse/issues/13563)) -- Add forgotten status to [Room Details Admin API](https://matrix-org.github.io/synapse/latest/admin_api/rooms.html#room-details-api). ([\#13503](https://github.com/matrix-org/synapse/issues/13503)) -- Add an experimental implementation for [MSC3852 (Expose user agents on `Device`)](https://github.com/matrix-org/matrix-spec-proposals/pull/3852). ([\#13549](https://github.com/matrix-org/synapse/issues/13549)) -- Add `org.matrix.msc2716v4` experimental room version with updated content fields. Part of [MSC2716 (Importing history)](https://github.com/matrix-org/matrix-spec-proposals/pull/2716). ([\#13551](https://github.com/matrix-org/synapse/issues/13551)) -- Add support for compression to federation responses. ([\#13537](https://github.com/matrix-org/synapse/issues/13537)) -- Improve performance of sending messages in rooms with thousands of local users. ([\#13522](https://github.com/matrix-org/synapse/issues/13522), [\#13547](https://github.com/matrix-org/synapse/issues/13547)) - - -Bugfixes --------- - -- Faster room joins: make `/joined_members` block whilst the room is partial stated. ([\#13514](https://github.com/matrix-org/synapse/issues/13514)) -- Fix a bug introduced in Synapse 1.21.0 where the [`/event_reports` Admin API](https://matrix-org.github.io/synapse/develop/admin_api/event_reports.html) could return a total count which was larger than the number of results you can actually query for. ([\#13525](https://github.com/matrix-org/synapse/issues/13525)) -- Fix a bug introduced in Synapse 1.52.0 where sending server notices fails if `max_avatar_size` or `allowed_avatar_mimetypes` is set and not `system_mxid_avatar_url`. ([\#13566](https://github.com/matrix-org/synapse/issues/13566)) -- Fix a bug where the `opentracing.force_tracing_for_users` config option would not apply to [`/sendToDevice`](https://spec.matrix.org/v1.3/client-server-api/#put_matrixclientv3sendtodeviceeventtypetxnid) and [`/keys/upload`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3keysupload) requests. ([\#13574](https://github.com/matrix-org/synapse/issues/13574)) - - -Improved Documentation ----------------------- - -- Add `openssl` example for generating registration HMAC digest. ([\#13472](https://github.com/matrix-org/synapse/issues/13472)) -- Tidy up Synapse's README. ([\#13491](https://github.com/matrix-org/synapse/issues/13491)) -- Document that event purging related to the `redaction_retention_period` config option is executed only every 5 minutes. ([\#13492](https://github.com/matrix-org/synapse/issues/13492)) -- Add a warning to retention documentation regarding the possibility of database corruption. ([\#13497](https://github.com/matrix-org/synapse/issues/13497)) -- Document that the `DOCKER_BUILDKIT=1` flag is needed to build the docker image. ([\#13515](https://github.com/matrix-org/synapse/issues/13515)) -- Add missing links in `user_consent` section of configuration manual. ([\#13536](https://github.com/matrix-org/synapse/issues/13536)) -- Fix the doc and some warnings that were referring to the nonexistent `custom_templates_directory` setting (instead of `custom_template_directory`). ([\#13538](https://github.com/matrix-org/synapse/issues/13538)) - - -Deprecations and Removals -------------------------- - -- Remove the ability for homeservers to delegate email ownership verification - and password reset confirmation to identity servers. See [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details. - -Internal Changes ----------------- - -### Faster room joins - -- Update the rejected state of events during de-partial-stating. ([\#13459](https://github.com/matrix-org/synapse/issues/13459)) -- Avoid blocking lazy-loading `/sync`s during partial joins due to remote memberships. Pull remote memberships from auth events instead of the room state. ([\#13477](https://github.com/matrix-org/synapse/issues/13477)) -- Refuse to start when faster joins is enabled on a deployment with workers, since worker configurations are not currently supported. ([\#13531](https://github.com/matrix-org/synapse/issues/13531)) - -### Metrics and tracing - -- Allow use of both `@trace` and `@tag_args` stacked on the same function. ([\#13453](https://github.com/matrix-org/synapse/issues/13453)) -- Instrument the federation/backfill part of `/messages` for understandable traces in Jaeger. ([\#13489](https://github.com/matrix-org/synapse/issues/13489)) -- Instrument `FederationStateIdsServlet` (`/state_ids`) for understandable traces in Jaeger. ([\#13499](https://github.com/matrix-org/synapse/issues/13499), [\#13554](https://github.com/matrix-org/synapse/issues/13554)) -- Track HTTP response times over 10 seconds from `/messages` (`synapse_room_message_list_rest_servlet_response_time_seconds`). ([\#13533](https://github.com/matrix-org/synapse/issues/13533)) -- Add metrics to track how the rate limiter is affecting requests (sleep/reject). ([\#13534](https://github.com/matrix-org/synapse/issues/13534), [\#13541](https://github.com/matrix-org/synapse/issues/13541)) -- Add metrics to time how long it takes us to do backfill processing (`synapse_federation_backfill_processing_before_time_seconds`, `synapse_federation_backfill_processing_after_time_seconds`). ([\#13535](https://github.com/matrix-org/synapse/issues/13535), [\#13584](https://github.com/matrix-org/synapse/issues/13584)) -- Add metrics to track rate limiter queue timing (`synapse_rate_limit_queue_wait_time_seconds`). ([\#13544](https://github.com/matrix-org/synapse/issues/13544)) -- Update metrics to track `/messages` response time by room size. ([\#13545](https://github.com/matrix-org/synapse/issues/13545)) - -### Everything else - -- Refactor methods in `synapse.api.auth.Auth` to use `Requester` objects everywhere instead of user IDs. ([\#13024](https://github.com/matrix-org/synapse/issues/13024)) -- Clean-up tests for notifications. ([\#13471](https://github.com/matrix-org/synapse/issues/13471)) -- Add some miscellaneous comments to document sync, especially around `compute_state_delta`. ([\#13474](https://github.com/matrix-org/synapse/issues/13474)) -- Use literals in place of `HTTPStatus` constants in tests. ([\#13479](https://github.com/matrix-org/synapse/issues/13479), [\#13488](https://github.com/matrix-org/synapse/issues/13488)) -- Add comments about how event push actions are rotated. ([\#13485](https://github.com/matrix-org/synapse/issues/13485)) -- Modify HTML template content to better support mobile devices' screen sizes. ([\#13493](https://github.com/matrix-org/synapse/issues/13493)) -- Add a linter script which will reject non-strict types in Pydantic models. ([\#13502](https://github.com/matrix-org/synapse/issues/13502)) -- Reduce the number of tests using legacy TCP replication. ([\#13543](https://github.com/matrix-org/synapse/issues/13543)) -- Allow specifying additional request fields when using the `HomeServerTestCase.login` helper method. ([\#13549](https://github.com/matrix-org/synapse/issues/13549)) -- Make `HomeServerTestCase` load any configured homeserver modules automatically. ([\#13558](https://github.com/matrix-org/synapse/issues/13558)) - - -Synapse 1.65.0 (2022-08-16) -=========================== - -No significant changes since 1.65.0rc2. - - -Synapse 1.65.0rc2 (2022-08-11) -============================== - -Internal Changes ----------------- - -- Revert 'Remove the unspecced `room_id` field in the `/hierarchy` response. ([\#13365](https://github.com/matrix-org/synapse/issues/13365))' to give more time for clients to update. ([\#13501](https://github.com/matrix-org/synapse/issues/13501)) - - -Synapse 1.65.0rc1 (2022-08-09) -============================== - -Features --------- - -- Add support for stable prefixes for [MSC2285 (private read receipts)](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). ([\#13273](https://github.com/matrix-org/synapse/issues/13273)) -- Add new unstable error codes `ORG.MATRIX.MSC3848.ALREADY_JOINED`, `ORG.MATRIX.MSC3848.NOT_JOINED`, and `ORG.MATRIX.MSC3848.INSUFFICIENT_POWER` described in [MSC3848](https://github.com/matrix-org/matrix-spec-proposals/pull/3848). ([\#13343](https://github.com/matrix-org/synapse/issues/13343)) -- Use stable prefixes for [MSC3827](https://github.com/matrix-org/matrix-spec-proposals/pull/3827). ([\#13370](https://github.com/matrix-org/synapse/issues/13370)) -- Add a new module API method to translate a room alias into a room ID. ([\#13428](https://github.com/matrix-org/synapse/issues/13428)) -- Add a new module API method to create a room. ([\#13429](https://github.com/matrix-org/synapse/issues/13429)) -- Add remote join capability to the module API's `update_room_membership` method (in a backwards compatible manner). ([\#13441](https://github.com/matrix-org/synapse/issues/13441)) - - -Bugfixes --------- - -- Update the version of the LDAP3 auth provider module included in the `matrixdotorg/synapse` DockerHub images and the Debian packages hosted on packages.matrix.org to 0.2.2. This version fixes a regression in the module. ([\#13470](https://github.com/matrix-org/synapse/issues/13470)) -- Fix a bug introduced in Synapse 1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`) (this was reverted in v1.65.0rc2, see changelog notes above). ([\#13365](https://github.com/matrix-org/synapse/issues/13365)) -- Fix a bug introduced in Synapse 0.24.0 that would respond with the wrong error status code to `/joined_members` requests when the requester is not a current member of the room. Contributed by @andrewdoh. ([\#13374](https://github.com/matrix-org/synapse/issues/13374)) -- Fix bug in handling of typing events for appservices. Contributed by Nick @ Beeper (@fizzadar). ([\#13392](https://github.com/matrix-org/synapse/issues/13392)) -- Fix a bug introduced in Synapse 1.57.0 where rooms listed in `exclude_rooms_from_sync` in the configuration file would not be properly excluded from incremental syncs. ([\#13408](https://github.com/matrix-org/synapse/issues/13408)) -- Fix a bug in the experimental faster-room-joins support which could cause it to get stuck in an infinite loop. ([\#13353](https://github.com/matrix-org/synapse/issues/13353)) -- Faster room joins: fix a bug which caused rejected events to become un-rejected during state syncing. ([\#13413](https://github.com/matrix-org/synapse/issues/13413)) -- Faster room joins: fix error when running out of servers to sync partial state with, so that Synapse raises the intended error instead. ([\#13432](https://github.com/matrix-org/synapse/issues/13432)) - - -Updates to the Docker image ---------------------------- - -- Make Docker images build on armv7 by installing cryptography dependencies in the 'requirements' stage. Contributed by Jasper Spaans. ([\#13372](https://github.com/matrix-org/synapse/issues/13372)) - - -Improved Documentation ----------------------- - -- Update the 'registration tokens' page to acknowledge that the relevant MSC was merged into version 1.2 of the Matrix specification. Contributed by @moan0s. ([\#11897](https://github.com/matrix-org/synapse/issues/11897)) -- Document which HTTP resources support gzip compression. ([\#13221](https://github.com/matrix-org/synapse/issues/13221)) -- Add steps describing how to elevate an existing user to administrator by manipulating the database. ([\#13230](https://github.com/matrix-org/synapse/issues/13230)) -- Fix wrong headline for `url_preview_accept_language` in documentation. ([\#13437](https://github.com/matrix-org/synapse/issues/13437)) -- Remove redundant 'Contents' section from the Configuration Manual. Contributed by @dklimpel. ([\#13438](https://github.com/matrix-org/synapse/issues/13438)) -- Update documentation for config setting `macaroon_secret_key`. ([\#13443](https://github.com/matrix-org/synapse/issues/13443)) -- Update outdated information on `sso_mapping_providers` documentation. ([\#13449](https://github.com/matrix-org/synapse/issues/13449)) -- Fix example code in module documentation of `password_auth_provider_callbacks`. ([\#13450](https://github.com/matrix-org/synapse/issues/13450)) -- Make the configuration for the cache clearer. ([\#13481](https://github.com/matrix-org/synapse/issues/13481)) - - -Internal Changes ----------------- - -- Extend the release script to automatically push a new SyTest branch, rather than having that be a manual process. ([\#12978](https://github.com/matrix-org/synapse/issues/12978)) -- Make minor clarifications to the error messages given when we fail to join a room via any server. ([\#13160](https://github.com/matrix-org/synapse/issues/13160)) -- Enable Complement CI tests in the 'latest deps' test run. ([\#13213](https://github.com/matrix-org/synapse/issues/13213)) -- Fix long-standing bugged logic which was never hit in `get_pdu` asking every remote destination even after it finds an event. ([\#13346](https://github.com/matrix-org/synapse/issues/13346)) -- Faster room joins: avoid blocking when pulling events with partially missing prev events. ([\#13355](https://github.com/matrix-org/synapse/issues/13355)) -- Instrument `/messages` for understandable traces in Jaeger. ([\#13368](https://github.com/matrix-org/synapse/issues/13368)) -- Remove an unused argument to `get_relations_for_event`. ([\#13383](https://github.com/matrix-org/synapse/issues/13383)) -- Add a `merge-back` command to the release script, which automates merging the correct branches after a release. ([\#13393](https://github.com/matrix-org/synapse/issues/13393)) -- Adding missing type hints to tests. ([\#13397](https://github.com/matrix-org/synapse/issues/13397)) -- Faster Room Joins: don't leave a stuck room partial state flag if the join fails. ([\#13403](https://github.com/matrix-org/synapse/issues/13403)) -- Refactor `_resolve_state_at_missing_prevs` to compute an `EventContext` instead. ([\#13404](https://github.com/matrix-org/synapse/issues/13404), [\#13431](https://github.com/matrix-org/synapse/issues/13431)) -- Faster Room Joins: prevent Synapse from answering federated join requests for a room which it has not fully joined yet. ([\#13416](https://github.com/matrix-org/synapse/issues/13416)) -- Re-enable running Complement tests against Synapse with workers. ([\#13420](https://github.com/matrix-org/synapse/issues/13420)) -- Prevent unnecessary lookups to any external `get_event` cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13435](https://github.com/matrix-org/synapse/issues/13435)) -- Add some tracing to give more insight into local room joins. ([\#13439](https://github.com/matrix-org/synapse/issues/13439)) -- Rename class `RateLimitConfig` to `RatelimitSettings` and `FederationRateLimitConfig` to `FederationRatelimitSettings`. ([\#13442](https://github.com/matrix-org/synapse/issues/13442)) -- Add some comments about how event push actions are stored. ([\#13445](https://github.com/matrix-org/synapse/issues/13445), [\#13455](https://github.com/matrix-org/synapse/issues/13455)) -- Improve rebuild speed for the "synapse-workers" docker image. ([\#13447](https://github.com/matrix-org/synapse/issues/13447)) -- Fix `@tag_args` being off-by-one with the arguments when tagging a span (tracing). ([\#13452](https://github.com/matrix-org/synapse/issues/13452)) -- Update type of `EventContext.rejected`. ([\#13460](https://github.com/matrix-org/synapse/issues/13460)) -- Use literals in place of `HTTPStatus` constants in tests. ([\#13463](https://github.com/matrix-org/synapse/issues/13463), [\#13469](https://github.com/matrix-org/synapse/issues/13469)) -- Correct a misnamed argument in state res v2 internals. ([\#13467](https://github.com/matrix-org/synapse/issues/13467)) - - -Synapse 1.64.0 (2022-08-02) -=========================== - -No significant changes since 1.64.0rc2. - - -Deprecation Warning -------------------- - -Synapse 1.66.0 will remove the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server. - -If you require your homeserver to verify e-mail addresses or to support password resets via e-mail, please configure your homeserver with SMTP access so that it can send e-mails on its own behalf. -[Consult the configuration documentation for more information.](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#email) - - -Synapse 1.64.0rc2 (2022-07-29) -============================== - -This RC reintroduces support for `account_threepid_delegates.email`, which was removed in 1.64.0rc1. It remains deprecated and will be removed altogether in Synapse 1.66.0. ([\#13406](https://github.com/matrix-org/synapse/issues/13406)) - - -Synapse 1.64.0rc1 (2022-07-26) -============================== - -This RC removed the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server. - -We have also stopped building `.deb` packages for Ubuntu 21.10 as it is no longer an active version of Ubuntu. - - -Features --------- - -- Improve error messages when media thumbnails cannot be served. ([\#13038](https://github.com/matrix-org/synapse/issues/13038)) -- Allow pagination from remote event after discovering it from [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event`. ([\#13205](https://github.com/matrix-org/synapse/issues/13205)) -- Add a `room_type` field in the responses for the list room and room details admin APIs. Contributed by @andrewdoh. ([\#13208](https://github.com/matrix-org/synapse/issues/13208)) -- Add support for room version 10. ([\#13220](https://github.com/matrix-org/synapse/issues/13220)) -- Add per-room rate limiting for room joins. For each room, Synapse now monitors the rate of join events in that room, and throttles additional joins if that rate grows too large. ([\#13253](https://github.com/matrix-org/synapse/issues/13253), [\#13254](https://github.com/matrix-org/synapse/issues/13254), [\#13255](https://github.com/matrix-org/synapse/issues/13255), [\#13276](https://github.com/matrix-org/synapse/issues/13276)) -- Support Implicit TLS (TLS without using a STARTTLS upgrade, typically on port 465) for sending emails, enabled by the new option `force_tls`. Contributed by Jan Schär. ([\#13317](https://github.com/matrix-org/synapse/issues/13317)) - - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.15.0 where adding a user through the Synapse Admin API with a phone number would fail if the `enable_email_notifs` and `email_notifs_for_new_users` options were enabled. Contributed by @thomasweston12. ([\#13263](https://github.com/matrix-org/synapse/issues/13263)) -- Fix a bug introduced in Synapse 1.40.0 where a user invited to a restricted room would be briefly unable to join. ([\#13270](https://github.com/matrix-org/synapse/issues/13270)) -- Fix a long-standing bug where, in rare instances, Synapse could store the incorrect state for a room after a state resolution. ([\#13278](https://github.com/matrix-org/synapse/issues/13278)) -- Fix a bug introduced in v1.18.0 where the `synapse_pushers` metric would overcount pushers when they are replaced. ([\#13296](https://github.com/matrix-org/synapse/issues/13296)) -- Disable autocorrection and autocapitalisation on the username text field shown during registration when using SSO. ([\#13350](https://github.com/matrix-org/synapse/issues/13350)) -- Update locked version of `frozendict` to 2.3.3, which has fixes for memory leaks affecting `/sync`. ([\#13284](https://github.com/matrix-org/synapse/issues/13284), [\#13352](https://github.com/matrix-org/synapse/issues/13352)) - - -Improved Documentation ----------------------- - -- Provide an example of using the Admin API. Contributed by @jejo86. ([\#13231](https://github.com/matrix-org/synapse/issues/13231)) -- Move the documentation for how URL previews work to the URL preview module. ([\#13233](https://github.com/matrix-org/synapse/issues/13233), [\#13261](https://github.com/matrix-org/synapse/issues/13261)) -- Add another `contrib` script to help set up worker processes. Contributed by @villepeh. ([\#13271](https://github.com/matrix-org/synapse/issues/13271)) -- Document that certain config options were added or changed in Synapse 1.62. Contributed by @behrmann. ([\#13314](https://github.com/matrix-org/synapse/issues/13314)) -- Document the new `rc_invites.per_issuer` throttling option added in Synapse 1.63. ([\#13333](https://github.com/matrix-org/synapse/issues/13333)) -- Mention that BuildKit is needed when building Docker images for tests. ([\#13338](https://github.com/matrix-org/synapse/issues/13338)) -- Improve Caddy reverse proxy documentation. ([\#13344](https://github.com/matrix-org/synapse/issues/13344)) - - -Deprecations and Removals -------------------------- - -- Drop tables that were formerly used for groups/communities. ([\#12967](https://github.com/matrix-org/synapse/issues/12967)) -- Drop support for delegating email verification to an external server. ([\#13192](https://github.com/matrix-org/synapse/issues/13192)) -- Drop support for calling `/_matrix/client/v3/account/3pid/bind` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu. ([\#13239](https://github.com/matrix-org/synapse/issues/13239)) -- Stop building `.deb` packages for Ubuntu 21.10 (Impish Indri), which has reached end of life. ([\#13326](https://github.com/matrix-org/synapse/issues/13326)) - - -Internal Changes ----------------- - -- Use lower transaction isolation level when purging rooms to avoid serialization errors. Contributed by Nick @ Beeper. ([\#12942](https://github.com/matrix-org/synapse/issues/12942)) -- Remove code which incorrectly attempted to reconcile state with remote servers when processing incoming events. ([\#12943](https://github.com/matrix-org/synapse/issues/12943)) -- Make the AS login method call `Auth.get_user_by_req` for checking the AS token. ([\#13094](https://github.com/matrix-org/synapse/issues/13094)) -- Always use a version of canonicaljson that supports the C implementation of frozendict. ([\#13172](https://github.com/matrix-org/synapse/issues/13172)) -- Add prometheus counters for ephemeral events and to device messages pushed to app services. Contributed by Brad @ Beeper. ([\#13175](https://github.com/matrix-org/synapse/issues/13175)) -- Refactor receipts servlet logic to avoid duplicated code. ([\#13198](https://github.com/matrix-org/synapse/issues/13198)) -- Preparation for database schema simplifications: populate `state_key` and `rejection_reason` for existing rows in the `events` table. ([\#13215](https://github.com/matrix-org/synapse/issues/13215)) -- Remove unused database table `event_reference_hashes`. ([\#13218](https://github.com/matrix-org/synapse/issues/13218)) -- Further reduce queries used sending events when creating new rooms. Contributed by Nick @ Beeper (@fizzadar). ([\#13224](https://github.com/matrix-org/synapse/issues/13224)) -- Call the v2 identity service `/3pid/unbind` endpoint, rather than v1. Contributed by @Vetchu. ([\#13240](https://github.com/matrix-org/synapse/issues/13240)) -- Use an asynchronous cache wrapper for the get event cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13242](https://github.com/matrix-org/synapse/issues/13242), [\#13308](https://github.com/matrix-org/synapse/issues/13308)) -- Optimise federation sender and appservice pusher event stream processing queries. Contributed by Nick @ Beeper (@fizzadar). ([\#13251](https://github.com/matrix-org/synapse/issues/13251)) -- Log the stack when waiting for an entire room to be un-partial stated. ([\#13257](https://github.com/matrix-org/synapse/issues/13257)) -- Fix spurious warning when fetching state after a missing prev event. ([\#13258](https://github.com/matrix-org/synapse/issues/13258)) -- Clean-up tests for notifications. ([\#13260](https://github.com/matrix-org/synapse/issues/13260)) -- Do not fail build if complement with workers fails. ([\#13266](https://github.com/matrix-org/synapse/issues/13266)) -- Don't pull out state in `compute_event_context` for unconflicted state. ([\#13267](https://github.com/matrix-org/synapse/issues/13267), [\#13274](https://github.com/matrix-org/synapse/issues/13274)) -- Reduce the rebuild time for the complement-synapse docker image. ([\#13279](https://github.com/matrix-org/synapse/issues/13279)) -- Don't pull out the full state when creating an event. ([\#13281](https://github.com/matrix-org/synapse/issues/13281), [\#13307](https://github.com/matrix-org/synapse/issues/13307)) -- Upgrade from Poetry 1.1.12 to 1.1.14, to fix bugs when locking packages. ([\#13285](https://github.com/matrix-org/synapse/issues/13285)) -- Make `DictionaryCache` expire full entries if they haven't been queried in a while, even if specific keys have been queried recently. ([\#13292](https://github.com/matrix-org/synapse/issues/13292)) -- Use `HTTPStatus` constants in place of literals in tests. ([\#13297](https://github.com/matrix-org/synapse/issues/13297)) -- Improve performance of query `_get_subset_users_in_room_with_profiles`. ([\#13299](https://github.com/matrix-org/synapse/issues/13299)) -- Up batch size of `bulk_get_push_rules` and `_get_joined_profiles_from_event_ids`. ([\#13300](https://github.com/matrix-org/synapse/issues/13300)) -- Remove unnecessary `json.dumps` from tests. ([\#13303](https://github.com/matrix-org/synapse/issues/13303)) -- Reduce memory usage of sending dummy events. ([\#13310](https://github.com/matrix-org/synapse/issues/13310)) -- Prevent formatting changes of [#3679](https://github.com/matrix-org/synapse/pull/3679) from appearing in `git blame`. ([\#13311](https://github.com/matrix-org/synapse/issues/13311)) -- Change `get_users_in_room` and `get_rooms_for_user` caches to enable pruning of old entries. ([\#13313](https://github.com/matrix-org/synapse/issues/13313)) -- Validate federation destinations and log an error if a destination is invalid. ([\#13318](https://github.com/matrix-org/synapse/issues/13318)) -- Fix `FederationClient.get_pdu()` returning events from the cache as `outliers` instead of original events we saw over federation. ([\#13320](https://github.com/matrix-org/synapse/issues/13320)) -- Reduce memory usage of state caches. ([\#13323](https://github.com/matrix-org/synapse/issues/13323)) -- Reduce the amount of state we store in the `state_cache`. ([\#13324](https://github.com/matrix-org/synapse/issues/13324)) -- Add missing type hints to open tracing module. ([\#13328](https://github.com/matrix-org/synapse/issues/13328), [\#13345](https://github.com/matrix-org/synapse/issues/13345), [\#13362](https://github.com/matrix-org/synapse/issues/13362)) -- Remove old base slaved store and de-duplicate cache ID generators. Contributed by Nick @ Beeper (@fizzadar). ([\#13329](https://github.com/matrix-org/synapse/issues/13329), [\#13349](https://github.com/matrix-org/synapse/issues/13349)) -- When reporting metrics is enabled, use ~8x less data to describe DB transaction metrics. ([\#13342](https://github.com/matrix-org/synapse/issues/13342)) -- Faster room joins: skip soft fail checks while Synapse only has partial room state, since the current membership of event senders may not be accurately known. ([\#13354](https://github.com/matrix-org/synapse/issues/13354)) - - -Synapse 1.63.1 (2022-07-20) -=========================== - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.63.0 where push actions were incorrectly calculated for appservice users. This caused performance issues on servers with large numbers of appservices. ([\#13332](https://github.com/matrix-org/synapse/issues/13332)) - - -Synapse 1.63.0 (2022-07-19) -=========================== - -Improved Documentation ----------------------- - -- Clarify that homeserver server names are included in the reported data when the `report_stats` config option is enabled. ([\#13321](https://github.com/matrix-org/synapse/issues/13321)) - - -Synapse 1.63.0rc1 (2022-07-12) -============================== - -Features --------- - -- Add a rate limit for local users sending invites. ([\#13125](https://github.com/matrix-org/synapse/issues/13125)) -- Implement [MSC3827](https://github.com/matrix-org/matrix-spec-proposals/pull/3827): Filtering of `/publicRooms` by room type. ([\#13031](https://github.com/matrix-org/synapse/issues/13031)) -- Improve validation logic in the account data REST endpoints. ([\#13148](https://github.com/matrix-org/synapse/issues/13148)) - - -Bugfixes --------- - -- Fix a long-standing bug where application services were not able to join remote federated rooms without a profile. ([\#13131](https://github.com/matrix-org/synapse/issues/13131)) -- Fix a long-standing bug where `_get_state_map_for_room` might raise errors when third party event rules callbacks are present. ([\#13174](https://github.com/matrix-org/synapse/issues/13174)) -- Fix a long-standing bug where the `synapse_port_db` script could fail to copy rows with negative row ids. ([\#13226](https://github.com/matrix-org/synapse/issues/13226)) -- Fix a bug introduced in 1.54.0 where appservices would not receive room-less EDUs, like presence, when both [MSC2409](https://github.com/matrix-org/matrix-spec-proposals/pull/2409) and [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202) are enabled. ([\#13236](https://github.com/matrix-org/synapse/issues/13236)) -- Fix a bug introduced in 1.62.0 where rows were not deleted from `event_push_actions` table on large servers. ([\#13194](https://github.com/matrix-org/synapse/issues/13194)) -- Fix a bug introduced in 1.62.0 where notification counts would get stuck after a highlighted message. ([\#13223](https://github.com/matrix-org/synapse/issues/13223)) -- Fix exception when using experimental [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to look for remote federated imported events before room creation. ([\#13197](https://github.com/matrix-org/synapse/issues/13197)) -- Fix [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202)-enabled appservices not receiving to-device messages, preventing messages from being decrypted. ([\#13235](https://github.com/matrix-org/synapse/issues/13235)) - - -Updates to the Docker image ---------------------------- - -- Bump the version of `lxml` in matrix.org Docker images Debian packages from 4.8.0 to 4.9.1. ([\#13207](https://github.com/matrix-org/synapse/issues/13207)) - - -Improved Documentation ----------------------- - -- Add an explanation of the `--report-stats` argument to the docs. ([\#13029](https://github.com/matrix-org/synapse/issues/13029)) -- Add a helpful example bash script to the contrib directory for creating multiple worker configuration files of the same type. Contributed by @villepeh. ([\#13032](https://github.com/matrix-org/synapse/issues/13032)) -- Add missing links to config options. ([\#13166](https://github.com/matrix-org/synapse/issues/13166)) -- Add documentation for homeserver usage statistics collection. ([\#13086](https://github.com/matrix-org/synapse/issues/13086)) -- Add documentation for the existing `databases` option in the homeserver configuration manual. ([\#13212](https://github.com/matrix-org/synapse/issues/13212)) -- Clean up references to sample configuration and redirect users to the configuration manual instead. ([\#13077](https://github.com/matrix-org/synapse/issues/13077), [\#13139](https://github.com/matrix-org/synapse/issues/13139)) -- Document how the Synapse team does reviews. ([\#13132](https://github.com/matrix-org/synapse/issues/13132)) -- Fix wrong section header for `allow_public_rooms_over_federation` in the homeserver config documentation. ([\#13116](https://github.com/matrix-org/synapse/issues/13116)) - - -Deprecations and Removals -------------------------- - -- Remove obsolete and for 8 years unused `RoomEventsStoreTestCase`. Contributed by @arkamar. ([\#13200](https://github.com/matrix-org/synapse/issues/13200)) - - -Internal Changes ----------------- - -- Add type annotations to `synapse.logging`, `tests.server` and `tests.utils`. ([\#13028](https://github.com/matrix-org/synapse/issues/13028), [\#13103](https://github.com/matrix-org/synapse/issues/13103), [\#13159](https://github.com/matrix-org/synapse/issues/13159), [\#13136](https://github.com/matrix-org/synapse/issues/13136)) -- Enforce type annotations for `tests.test_server`. ([\#13135](https://github.com/matrix-org/synapse/issues/13135)) -- Support temporary experimental return values for spam checker module callbacks. ([\#13044](https://github.com/matrix-org/synapse/issues/13044)) -- Add support to `complement.sh` for skipping the docker build. ([\#13143](https://github.com/matrix-org/synapse/issues/13143), [\#13158](https://github.com/matrix-org/synapse/issues/13158)) -- Add support to `complement.sh` for setting the log level using the `SYNAPSE_TEST_LOG_LEVEL` environment variable. ([\#13152](https://github.com/matrix-org/synapse/issues/13152)) -- Enable Complement testing in the 'Twisted Trunk' CI runs. ([\#13079](https://github.com/matrix-org/synapse/issues/13079), [\#13157](https://github.com/matrix-org/synapse/issues/13157)) -- Improve startup times in Complement test runs against workers, particularly in CPU-constrained environments. ([\#13127](https://github.com/matrix-org/synapse/issues/13127)) -- Update config used by Complement to allow device name lookup over federation. ([\#13167](https://github.com/matrix-org/synapse/issues/13167)) -- Faster room joins: handle race between persisting an event and un-partial stating a room. ([\#13100](https://github.com/matrix-org/synapse/issues/13100)) -- Faster room joins: fix race in recalculation of current room state. ([\#13151](https://github.com/matrix-org/synapse/issues/13151)) -- Faster room joins: skip waiting for full state when processing incoming events over federation. ([\#13144](https://github.com/matrix-org/synapse/issues/13144)) -- Raise a `DependencyError` on missing dependencies instead of a `ConfigError`. ([\#13113](https://github.com/matrix-org/synapse/issues/13113)) -- Avoid stripping line breaks from SQL sent to the database. ([\#13129](https://github.com/matrix-org/synapse/issues/13129)) -- Apply ratelimiting earlier in processing of `/send` requests. ([\#13134](https://github.com/matrix-org/synapse/issues/13134)) -- Improve exception handling when processing events received over federation. ([\#13145](https://github.com/matrix-org/synapse/issues/13145)) -- Check that `auto_vacuum` is disabled when porting a SQLite database to Postgres, as `VACUUM`s must not be performed between runs of the script. ([\#13195](https://github.com/matrix-org/synapse/issues/13195)) -- Reduce DB usage of `/sync` when a large number of unread messages have recently been sent in a room. ([\#13119](https://github.com/matrix-org/synapse/issues/13119), [\#13153](https://github.com/matrix-org/synapse/issues/13153)) -- Reduce memory consumption when processing incoming events in large rooms. ([\#13078](https://github.com/matrix-org/synapse/issues/13078), [\#13222](https://github.com/matrix-org/synapse/issues/13222)) -- Reduce number of queries used to get profile information. Contributed by Nick @ Beeper (@fizzadar). ([\#13209](https://github.com/matrix-org/synapse/issues/13209)) -- Reduce number of events queried during room creation. Contributed by Nick @ Beeper (@fizzadar). ([\#13210](https://github.com/matrix-org/synapse/issues/13210)) -- More aggressively rotate push actions. ([\#13211](https://github.com/matrix-org/synapse/issues/13211)) -- Add `max_line_length` setting for Python files to the `.editorconfig`. Contributed by @sumnerevans @ Beeper. ([\#13228](https://github.com/matrix-org/synapse/issues/13228)) - -Synapse 1.62.0 (2022-07-05) -=========================== - -No significant changes since 1.62.0rc3. - -Authors of spam-checker plugins should consult the [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.62/docs/upgrade.md#upgrading-to-v1620) to learn about the enriched signatures for spam checker callbacks, which are supported with this release of Synapse. - -## Security advisory - -The following issue is fixed in 1.62.0. - -* [GHSA-jhjh-776m-4765](https://github.com/matrix-org/synapse/security/advisories/GHSA-jhjh-776m-4765) / [CVE-2022-31152](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-31152) - - Synapse instances prior to 1.62.0 did not implement the Matrix [event authorization rules](https://spec.matrix.org/v1.3/rooms/v10/#authorization-rules) correctly. An attacker could craft events which would be accepted by Synapse but not a spec-conformant server, potentially causing divergence in the room state between servers. - - Homeservers with federation disabled via the [`federation_domain_whitelist`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#federation_domain_whitelist) config option are unaffected. - - Administrators of homeservers with federation enabled are advised to upgrade to v1.62.0 or higher. - - Fixed by [#13087](https://github.com/matrix-org/synapse/pull/13087) and [#13088](https://github.com/matrix-org/synapse/pull/13088). - -Synapse 1.62.0rc3 (2022-07-04) -============================== - -Bugfixes --------- - -- Update the version of the [ldap3 plugin](https://github.com/matrix-org/matrix-synapse-ldap3/) included in the `matrixdotorg/synapse` DockerHub images and the Debian packages hosted on `packages.matrix.org` to 0.2.1. This fixes [a bug](https://github.com/matrix-org/matrix-synapse-ldap3/pull/163) with usernames containing uppercase characters. ([\#13156](https://github.com/matrix-org/synapse/issues/13156)) -- Fix a bug introduced in Synapse 1.62.0rc1 affecting unread counts for users on small servers. ([\#13168](https://github.com/matrix-org/synapse/issues/13168)) - - -Synapse 1.62.0rc2 (2022-07-01) -============================== - -Bugfixes --------- - -- Fix unread counts for users on large servers. Introduced in v1.62.0rc1. ([\#13140](https://github.com/matrix-org/synapse/issues/13140)) -- Fix DB performance when deleting old push notifications. Introduced in v1.62.0rc1. ([\#13141](https://github.com/matrix-org/synapse/issues/13141)) - - -Synapse 1.62.0rc1 (2022-06-28) -============================== - -Features --------- - -- Port the spam-checker API callbacks to a new, richer API. This is part of an ongoing change to let spam-checker modules inform users of the reason their event or operation is rejected. ([\#12857](https://github.com/matrix-org/synapse/issues/12857), [\#13047](https://github.com/matrix-org/synapse/issues/13047)) -- Allow server admins to customise the response of the `/.well-known/matrix/client` endpoint. ([\#13035](https://github.com/matrix-org/synapse/issues/13035)) -- Add metrics measuring the CPU and DB time spent in state resolution. ([\#13036](https://github.com/matrix-org/synapse/issues/13036)) -- Speed up fetching of device list changes in `/sync` and `/keys/changes`. ([\#13045](https://github.com/matrix-org/synapse/issues/13045), [\#13098](https://github.com/matrix-org/synapse/issues/13098)) -- Improve URL previews for sites which only provide Twitter Card metadata, e.g. LWN.net. ([\#13056](https://github.com/matrix-org/synapse/issues/13056)) - - -Bugfixes --------- - -- Update [MSC3786](https://github.com/matrix-org/matrix-spec-proposals/pull/3786) implementation to check `state_key`. ([\#12939](https://github.com/matrix-org/synapse/issues/12939)) -- Fix a bug introduced in Synapse 1.58 where Synapse would not report full version information when installed from a git checkout. This is a best-effort affair and not guaranteed to be stable. ([\#12973](https://github.com/matrix-org/synapse/issues/12973)) -- Fix a bug introduced in Synapse 1.60 where Synapse would fail to start if the `sqlite3` module was not available. ([\#12979](https://github.com/matrix-org/synapse/issues/12979)) -- Fix a bug where non-standard information was required when requesting the `/hierarchy` API over federation. Introduced - in Synapse 1.41.0. ([\#12991](https://github.com/matrix-org/synapse/issues/12991)) -- Fix a long-standing bug which meant that rate limiting was not restrictive enough in some cases. ([\#13018](https://github.com/matrix-org/synapse/issues/13018)) -- Fix a bug introduced in Synapse 1.58 where profile requests for a malformed user ID would ccause an internal error. Synapse now returns 400 Bad Request in this situation. ([\#13041](https://github.com/matrix-org/synapse/issues/13041)) -- Fix some inconsistencies in the event authentication code. ([\#13087](https://github.com/matrix-org/synapse/issues/13087), [\#13088](https://github.com/matrix-org/synapse/issues/13088)) -- Fix a long-standing bug where room directory requests would cause an internal server error if given a malformed room alias. ([\#13106](https://github.com/matrix-org/synapse/issues/13106)) - - -Improved Documentation ----------------------- - -- Add documentation for how to configure Synapse with Workers using Docker Compose. Includes example worker config and docker-compose.yaml. Contributed by @Thumbscrew. ([\#12737](https://github.com/matrix-org/synapse/issues/12737)) -- Ensure the [Poetry cheat sheet](https://matrix-org.github.io/synapse/develop/development/dependencies.html) is available in the online documentation. ([\#13022](https://github.com/matrix-org/synapse/issues/13022)) -- Mention removed community/group worker endpoints in upgrade.md. Contributed by @olmari. ([\#13023](https://github.com/matrix-org/synapse/issues/13023)) -- Add instructions for running Complement with `gotestfmt`-formatted output locally. ([\#13073](https://github.com/matrix-org/synapse/issues/13073)) -- Update OpenTracing docs to reference the configuration manual rather than the configuration file. ([\#13076](https://github.com/matrix-org/synapse/issues/13076)) -- Update information on downstream Debian packages. ([\#13095](https://github.com/matrix-org/synapse/issues/13095)) -- Remove documentation for the Delete Group Admin API which no longer exists. ([\#13112](https://github.com/matrix-org/synapse/issues/13112)) - - -Deprecations and Removals -------------------------- - -- Remove the unspecced `DELETE /directory/list/room/{roomId}` endpoint, which hid rooms from the [public room directory](https://spec.matrix.org/v1.3/client-server-api/#listing-rooms). Instead, `PUT` to the same URL with a visibility of `"private"`. ([\#13123](https://github.com/matrix-org/synapse/issues/13123)) - - -Internal Changes ----------------- - -- Add tests for cancellation of `GET /rooms/$room_id/members` and `GET /rooms/$room_id/state` requests. ([\#12674](https://github.com/matrix-org/synapse/issues/12674)) -- Report login failures due to unknown third party identifiers in the same way as failures due to invalid passwords. This prevents an attacker from using the error response to determine if the identifier exists. Contributed by Daniel Aloni. ([\#12738](https://github.com/matrix-org/synapse/issues/12738)) -- Merge the Complement testing Docker images into a single, multi-purpose image. ([\#12881](https://github.com/matrix-org/synapse/issues/12881), [\#13075](https://github.com/matrix-org/synapse/issues/13075)) -- Simplify the database schema for `event_edges`. ([\#12893](https://github.com/matrix-org/synapse/issues/12893)) -- Clean up the test code for client disconnection. ([\#12929](https://github.com/matrix-org/synapse/issues/12929)) -- Remove code generating comments in configuration. ([\#12941](https://github.com/matrix-org/synapse/issues/12941)) -- Add `Cross-Origin-Resource-Policy: cross-origin` header to content repository's thumbnail and download endpoints. ([\#12944](https://github.com/matrix-org/synapse/issues/12944)) -- Replace noop background updates with `DELETE` delta. ([\#12954](https://github.com/matrix-org/synapse/issues/12954), [\#13050](https://github.com/matrix-org/synapse/issues/13050)) -- Use lower isolation level when inserting read receipts to avoid serialization errors. Contributed by Nick @ Beeper. ([\#12957](https://github.com/matrix-org/synapse/issues/12957)) -- Reduce the amount of state we pull from the DB. ([\#12963](https://github.com/matrix-org/synapse/issues/12963)) -- Enable testing against PostgreSQL databases in Complement CI. ([\#12965](https://github.com/matrix-org/synapse/issues/12965), [\#13034](https://github.com/matrix-org/synapse/issues/13034)) -- Fix an inaccurate comment. ([\#12969](https://github.com/matrix-org/synapse/issues/12969)) -- Remove the `delete_device` method and always call `delete_devices`. ([\#12970](https://github.com/matrix-org/synapse/issues/12970)) -- Use a GitHub form for issues rather than a hard-to-read, easy-to-ignore template. ([\#12982](https://github.com/matrix-org/synapse/issues/12982)) -- Move [MSC3715](https://github.com/matrix-org/matrix-spec-proposals/pull/3715) behind an experimental config flag. ([\#12984](https://github.com/matrix-org/synapse/issues/12984)) -- Add type hints to tests. ([\#12985](https://github.com/matrix-org/synapse/issues/12985), [\#13099](https://github.com/matrix-org/synapse/issues/13099)) -- Refactor macaroon tokens generation and move the unsubscribe link in notification emails to `/_synapse/client/unsubscribe`. ([\#12986](https://github.com/matrix-org/synapse/issues/12986)) -- Fix documentation for running complement tests. ([\#12990](https://github.com/matrix-org/synapse/issues/12990)) -- Faster joins: add issue links to the TODO comments in the code. ([\#13004](https://github.com/matrix-org/synapse/issues/13004)) -- Reduce DB usage of `/sync` when a large number of unread messages have recently been sent in a room. ([\#13005](https://github.com/matrix-org/synapse/issues/13005), [\#13096](https://github.com/matrix-org/synapse/issues/13096), [\#13118](https://github.com/matrix-org/synapse/issues/13118)) -- Replaced usage of PyJWT with methods from Authlib in `org.matrix.login.jwt`. Contributed by Hannes Lerchl. ([\#13011](https://github.com/matrix-org/synapse/issues/13011)) -- Modernize the `contrib/graph/` scripts. ([\#13013](https://github.com/matrix-org/synapse/issues/13013)) -- Remove redundant `room_version` parameters from event auth functions. ([\#13017](https://github.com/matrix-org/synapse/issues/13017)) -- Decouple `synapse.api.auth_blocking.AuthBlocking` from `synapse.api.auth.Auth`. ([\#13021](https://github.com/matrix-org/synapse/issues/13021)) -- Add type annotations to `synapse.storage.databases.main.devices`. ([\#13025](https://github.com/matrix-org/synapse/issues/13025)) -- Set default `sync_response_cache_duration` to two minutes. ([\#13042](https://github.com/matrix-org/synapse/issues/13042)) -- Rename CI test runs. ([\#13046](https://github.com/matrix-org/synapse/issues/13046)) -- Increase timeout of complement CI test runs. ([\#13048](https://github.com/matrix-org/synapse/issues/13048)) -- Refactor entry points so that they all have a `main` function. ([\#13052](https://github.com/matrix-org/synapse/issues/13052)) -- Refactor the Dockerfile-workers configuration script to use Jinja2 templates in Synapse workers' Supervisord blocks. ([\#13054](https://github.com/matrix-org/synapse/issues/13054)) -- Add headers to individual options in config documentation to allow for linking. ([\#13055](https://github.com/matrix-org/synapse/issues/13055)) -- Make Complement CI logs easier to read. ([\#13057](https://github.com/matrix-org/synapse/issues/13057), [\#13058](https://github.com/matrix-org/synapse/issues/13058), [\#13069](https://github.com/matrix-org/synapse/issues/13069)) -- Don't instantiate modules with keyword arguments. ([\#13060](https://github.com/matrix-org/synapse/issues/13060)) -- Fix type checking errors against Twisted trunk. ([\#13061](https://github.com/matrix-org/synapse/issues/13061)) -- Allow MSC3030 `timestamp_to_event` calls from anyone on world-readable rooms. ([\#13062](https://github.com/matrix-org/synapse/issues/13062)) -- Add a CI job to check that schema deltas are in the correct folder. ([\#13063](https://github.com/matrix-org/synapse/issues/13063)) -- Avoid rechecking event auth rules which are independent of room state. ([\#13065](https://github.com/matrix-org/synapse/issues/13065)) -- Reduce the duplication of code that invokes the rate limiter. ([\#13070](https://github.com/matrix-org/synapse/issues/13070)) -- Add a Subject Alternative Name to the certificate generated for Complement tests. ([\#13071](https://github.com/matrix-org/synapse/issues/13071)) -- Add more tests for room upgrades. ([\#13074](https://github.com/matrix-org/synapse/issues/13074)) -- Pin dependencies maintained by matrix.org to [semantic version](https://semver.org/) bounds. ([\#13082](https://github.com/matrix-org/synapse/issues/13082)) -- Correctly report prometheus DB stats for `get_earliest_token_for_stats`. ([\#13085](https://github.com/matrix-org/synapse/issues/13085)) -- Fix a long-standing bug where a finished logging context would be re-started when Synapse failed to persist an event from federation. ([\#13089](https://github.com/matrix-org/synapse/issues/13089)) -- Simplify the alias deletion logic as an application service. ([\#13093](https://github.com/matrix-org/synapse/issues/13093)) -- Add type annotations to `tests.test_server`. ([\#13124](https://github.com/matrix-org/synapse/issues/13124)) - - -Synapse 1.61.1 (2022-06-28) -=========================== - -This patch release fixes a security issue regarding URL previews, affecting all prior versions of Synapse. Server administrators are encouraged to update Synapse as soon as possible. We are not aware of these vulnerabilities being exploited in the wild. - -Server administrators who are unable to update Synapse may use the workarounds described in the linked GitHub Security Advisory below. - -## Security advisory - -The following issue is fixed in 1.61.1. - -* [GHSA-22p3-qrh9-cx32](https://github.com/matrix-org/synapse/security/advisories/GHSA-22p3-qrh9-cx32) / [CVE-2022-31052](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-31052) - - Synapse instances with the [`url_preview_enabled`](https://matrix-org.github.io/synapse/v1.61/usage/configuration/config_documentation.html#media-store) homeserver config option set to `true` are affected. URL previews of some web pages can lead to unbounded recursion, causing the request to either fail, or in some cases crash the running Synapse process. - - Requesting URL previews requires authentication. Nevertheless, it is possible to exploit this maliciously, either by malicious users on the homeserver, or by remote users sending URLs that a local user's client may automatically request a URL preview for. - - Homeservers with the `url_preview_enabled` configuration option set to `false` (the default) are unaffected. Instances with the `enable_media_repo` configuration option set to `false` are also unaffected, as this also disables URL preview functionality. - - Fixed by [fa1308061802ac7b7d20e954ba7372c5ac292333](https://github.com/matrix-org/synapse/commit/fa1308061802ac7b7d20e954ba7372c5ac292333). - -Synapse 1.61.0 (2022-06-14) -=========================== - -This release removes support for the non-standard feature known both as 'groups' and as 'communities', which have been superseded by *Spaces*. - -See [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1610) -for more details. - -Improved Documentation ----------------------- - -- Mention removed community/group worker endpoints in [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1610). Contributed by @olmari. ([\#13023](https://github.com/matrix-org/synapse/issues/13023)) - - -Synapse 1.61.0rc1 (2022-06-07) -============================== - -Features --------- - -- Add new `media_retention` options to the homeserver config for routinely cleaning up non-recently accessed media. ([\#12732](https://github.com/matrix-org/synapse/issues/12732), [\#12972](https://github.com/matrix-org/synapse/issues/12972), [\#12977](https://github.com/matrix-org/synapse/issues/12977)) -- Experimental support for [MSC3772](https://github.com/matrix-org/matrix-spec-proposals/pull/3772): Push rule for mutually related events. ([\#12740](https://github.com/matrix-org/synapse/issues/12740), [\#12859](https://github.com/matrix-org/synapse/issues/12859)) -- Update to the `check_event_for_spam` module callback: Deprecate the current callback signature, replace it with a new signature that is both less ambiguous (replacing booleans with explicit allow/block) and more powerful (ability to return explicit error codes). ([\#12808](https://github.com/matrix-org/synapse/issues/12808)) -- Add storage and module API methods to get monthly active users (and their corresponding appservices) within an optionally specified time range. ([\#12838](https://github.com/matrix-org/synapse/issues/12838), [\#12917](https://github.com/matrix-org/synapse/issues/12917)) -- Support the new error code `ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED` from [MSC3823](https://github.com/matrix-org/matrix-spec-proposals/pull/3823). ([\#12845](https://github.com/matrix-org/synapse/issues/12845), [\#12923](https://github.com/matrix-org/synapse/issues/12923)) -- Add a configurable background job to delete stale devices. ([\#12855](https://github.com/matrix-org/synapse/issues/12855)) -- Improve URL previews for pages with empty elements. ([\#12951](https://github.com/matrix-org/synapse/issues/12951)) -- Allow updating a user's password using the admin API without logging out their devices. Contributed by @jcgruenhage. ([\#12952](https://github.com/matrix-org/synapse/issues/12952)) - - -Bugfixes --------- - -- Always send an `access_token` in `/thirdparty/` requests to appservices, as required by the [Application Service API specification](https://spec.matrix.org/v1.1/application-service-api/#third-party-networks). ([\#12746](https://github.com/matrix-org/synapse/issues/12746)) -- Implement [MSC3816](https://github.com/matrix-org/matrix-spec-proposals/pull/3816): sending the root event in a thread should count as having 'participated' in it. ([\#12766](https://github.com/matrix-org/synapse/issues/12766)) -- Delete events from the `federation_inbound_events_staging` table when a room is purged through the admin API. ([\#12784](https://github.com/matrix-org/synapse/issues/12784)) -- Fix a bug where we did not correctly handle invalid device list updates over federation. Contributed by Carl Bordum Hansen. ([\#12829](https://github.com/matrix-org/synapse/issues/12829)) -- Fix a bug which allowed multiple async operations to access database locks concurrently. Contributed by @sumnerevans @ Beeper. ([\#12832](https://github.com/matrix-org/synapse/issues/12832)) -- Fix an issue introduced in Synapse 0.34 where the `/notifications` endpoint would only return notifications if a user registered at least one pusher. Contributed by Famedly. ([\#12840](https://github.com/matrix-org/synapse/issues/12840)) -- Fix a bug where servers using a Postgres database would fail to backfill from an insertion event when MSC2716 is enabled (`experimental_features.msc2716_enabled`). ([\#12843](https://github.com/matrix-org/synapse/issues/12843)) -- Fix [MSC3787](https://github.com/matrix-org/matrix-spec-proposals/pull/3787) rooms being omitted from room directory, room summary and space hierarchy responses. ([\#12858](https://github.com/matrix-org/synapse/issues/12858)) -- Fix a bug introduced in Synapse 1.54.0 which could sometimes cause exceptions when handling federated traffic. ([\#12877](https://github.com/matrix-org/synapse/issues/12877)) -- Fix a bug introduced in Synapse 1.59.0 which caused room deletion to fail with a foreign key violation error. ([\#12889](https://github.com/matrix-org/synapse/issues/12889)) -- Fix a long-standing bug which caused the `/messages` endpoint to return an incorrect `end` attribute when there were no more events. Contributed by @Vetchu. ([\#12903](https://github.com/matrix-org/synapse/issues/12903)) -- Fix a bug introduced in Synapse 1.58.0 where `/sync` would fail if the most recent event in a room was a redaction of an event that has since been purged. ([\#12905](https://github.com/matrix-org/synapse/issues/12905)) -- Fix a potential memory leak when generating thumbnails. ([\#12932](https://github.com/matrix-org/synapse/issues/12932)) -- Fix a long-standing bug where a URL preview would break if the image failed to download. ([\#12950](https://github.com/matrix-org/synapse/issues/12950)) - - -Improved Documentation ----------------------- - -- Fix typographical errors in documentation. ([\#12863](https://github.com/matrix-org/synapse/issues/12863)) -- Fix documentation incorrectly stating the `sendToDevice` endpoint can be directed at generic workers. Contributed by Nick @ Beeper. ([\#12867](https://github.com/matrix-org/synapse/issues/12867)) - - -Deprecations and Removals -------------------------- - -- Remove support for the non-standard groups/communities feature from Synapse. ([\#12553](https://github.com/matrix-org/synapse/issues/12553), [\#12558](https://github.com/matrix-org/synapse/issues/12558), [\#12563](https://github.com/matrix-org/synapse/issues/12563), [\#12895](https://github.com/matrix-org/synapse/issues/12895), [\#12897](https://github.com/matrix-org/synapse/issues/12897), [\#12899](https://github.com/matrix-org/synapse/issues/12899), [\#12900](https://github.com/matrix-org/synapse/issues/12900), [\#12936](https://github.com/matrix-org/synapse/issues/12936), [\#12966](https://github.com/matrix-org/synapse/issues/12966)) -- Remove contributed `kick_users.py` script. This is broken under Python 3, and is not added to the environment when `pip install`ing Synapse. ([\#12908](https://github.com/matrix-org/synapse/issues/12908)) -- Remove `contrib/jitsimeetbridge`. This was an unused experiment that hasn't been meaningfully changed since 2014. ([\#12909](https://github.com/matrix-org/synapse/issues/12909)) -- Remove unused `contrib/experiements/cursesio.py` script, which fails to run under Python 3. ([\#12910](https://github.com/matrix-org/synapse/issues/12910)) -- Remove unused `contrib/experiements/test_messaging.py` script. This fails to run on Python 3. ([\#12911](https://github.com/matrix-org/synapse/issues/12911)) - - -Internal Changes ----------------- - -- Test Synapse against Complement with workers. ([\#12810](https://github.com/matrix-org/synapse/issues/12810), [\#12933](https://github.com/matrix-org/synapse/issues/12933)) -- Reduce the amount of state we pull from the DB. ([\#12811](https://github.com/matrix-org/synapse/issues/12811), [\#12964](https://github.com/matrix-org/synapse/issues/12964)) -- Try other homeservers when re-syncing state for rooms with partial state. ([\#12812](https://github.com/matrix-org/synapse/issues/12812)) -- Resume state re-syncing for rooms with partial state after a Synapse restart. ([\#12813](https://github.com/matrix-org/synapse/issues/12813)) -- Remove Mutual Rooms' ([MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666)) endpoint dependency on the User Directory. ([\#12836](https://github.com/matrix-org/synapse/issues/12836)) -- Experimental: expand `check_event_for_spam` with ability to return additional fields. This enables spam-checker implementations to experiment with mechanisms to give users more information about why they are blocked and whether any action is needed from them to be unblocked. ([\#12846](https://github.com/matrix-org/synapse/issues/12846)) -- Remove `dont_notify` from the `.m.rule.room.server_acl` rule. ([\#12849](https://github.com/matrix-org/synapse/issues/12849)) -- Remove the unstable `/hierarchy` endpoint from [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946). ([\#12851](https://github.com/matrix-org/synapse/issues/12851)) -- Pull out less state when handling gaps in room DAG. ([\#12852](https://github.com/matrix-org/synapse/issues/12852), [\#12904](https://github.com/matrix-org/synapse/issues/12904)) -- Clean-up the push rules datastore. ([\#12856](https://github.com/matrix-org/synapse/issues/12856)) -- Correct a type annotation in the URL preview source code. ([\#12860](https://github.com/matrix-org/synapse/issues/12860)) -- Update `pyjwt` dependency to [2.4.0](https://github.com/jpadilla/pyjwt/releases/tag/2.4.0). ([\#12865](https://github.com/matrix-org/synapse/issues/12865)) -- Enable the `/account/whoami` endpoint on synapse worker processes. Contributed by Nick @ Beeper. ([\#12866](https://github.com/matrix-org/synapse/issues/12866)) -- Enable the `batch_send` endpoint on synapse worker processes. Contributed by Nick @ Beeper. ([\#12868](https://github.com/matrix-org/synapse/issues/12868)) -- Don't generate empty AS transactions when the AS is flagged as down. Contributed by Nick @ Beeper. ([\#12869](https://github.com/matrix-org/synapse/issues/12869)) -- Fix up the variable `state_store` naming. ([\#12871](https://github.com/matrix-org/synapse/issues/12871)) -- Faster room joins: when querying the current state of the room, wait for state to be populated. ([\#12872](https://github.com/matrix-org/synapse/issues/12872)) -- Avoid running queries which will never result in deletions. ([\#12879](https://github.com/matrix-org/synapse/issues/12879)) -- Use constants for EDU types. ([\#12884](https://github.com/matrix-org/synapse/issues/12884)) -- Reduce database load of `/sync` when presence is enabled. ([\#12885](https://github.com/matrix-org/synapse/issues/12885)) -- Refactor `have_seen_events` to reduce memory consumed when processing federation traffic. ([\#12886](https://github.com/matrix-org/synapse/issues/12886)) -- Refactor receipt linearization code. ([\#12888](https://github.com/matrix-org/synapse/issues/12888)) -- Add type annotations to `synapse.logging.opentracing`. ([\#12894](https://github.com/matrix-org/synapse/issues/12894)) -- Remove PyNaCl occurrences directly used in Synapse code. ([\#12902](https://github.com/matrix-org/synapse/issues/12902)) -- Bump types-jsonschema from 4.4.1 to 4.4.6. ([\#12912](https://github.com/matrix-org/synapse/issues/12912)) -- Rename storage classes. ([\#12913](https://github.com/matrix-org/synapse/issues/12913)) -- Preparation for database schema simplifications: stop reading from `event_edges.room_id`. ([\#12914](https://github.com/matrix-org/synapse/issues/12914)) -- Check if we are in a virtual environment before overriding the `PYTHONPATH` environment variable in the demo script. ([\#12916](https://github.com/matrix-org/synapse/issues/12916)) -- Improve the logging when signature checks on events fail. ([\#12925](https://github.com/matrix-org/synapse/issues/12925)) - - -Synapse 1.60.0 (2022-05-31) -=========================== - -This release of Synapse adds a unique index to the `state_group_edges` table, in -order to prevent accidentally introducing duplicate information (for example, -because a database backup was restored multiple times). If your Synapse database -already has duplicate rows in this table, this could fail with an error and -require manual remediation. - -Additionally, the signature of the `check_event_for_spam` module callback has changed. -The previous signature has been deprecated and remains working for now. Module authors -should update their modules to use the new signature where possible. - -See [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1600) -for more details. - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.60.0rc1 that would break some imports from `synapse.module_api`. ([\#12918](https://github.com/matrix-org/synapse/issues/12918)) - - -Synapse 1.60.0rc2 (2022-05-27) -============================== - -Features --------- - -- Add an option allowing users to use their password to reauthenticate for privileged actions even though password login is disabled. ([\#12883](https://github.com/matrix-org/synapse/issues/12883)) - - -Bugfixes --------- - -- Explicitly close `ijson` coroutines once we are done with them, instead of leaving the garbage collector to close them. ([\#12875](https://github.com/matrix-org/synapse/issues/12875)) - - -Internal Changes ----------------- - -- Improve URL previews by not including the content of media tags in the generated description. ([\#12887](https://github.com/matrix-org/synapse/issues/12887)) - - -Synapse 1.60.0rc1 (2022-05-24) -============================== - -Features --------- - -- Measure the time taken in spam-checking callbacks and expose those measurements as metrics. ([\#12513](https://github.com/matrix-org/synapse/issues/12513)) -- Add a `default_power_level_content_override` config option to set default room power levels per room preset. ([\#12618](https://github.com/matrix-org/synapse/issues/12618)) -- Add support for [MSC3787: Allowing knocks to restricted rooms](https://github.com/matrix-org/matrix-spec-proposals/pull/3787). ([\#12623](https://github.com/matrix-org/synapse/issues/12623)) -- Send `USER_IP` commands on a different Redis channel, in order to reduce traffic to workers that do not process these commands. ([\#12672](https://github.com/matrix-org/synapse/issues/12672), [\#12809](https://github.com/matrix-org/synapse/issues/12809)) -- Synapse will now reload [cache config](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#caching) when it receives a [SIGHUP](https://en.wikipedia.org/wiki/SIGHUP) signal. ([\#12673](https://github.com/matrix-org/synapse/issues/12673)) -- Add a config options to allow for auto-tuning of caches. ([\#12701](https://github.com/matrix-org/synapse/issues/12701)) -- Update [MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716) implementation to process marker events from the current state to avoid markers being lost in timeline gaps for federated servers which would cause the imported history to be undiscovered. ([\#12718](https://github.com/matrix-org/synapse/issues/12718)) -- Add a `drop_federated_event` callback to `SpamChecker` to disregard inbound federated events before they take up much processing power, in an emergency. ([\#12744](https://github.com/matrix-org/synapse/issues/12744)) -- Implement [MSC3818: Copy room type on upgrade](https://github.com/matrix-org/matrix-spec-proposals/pull/3818). ([\#12786](https://github.com/matrix-org/synapse/issues/12786), [\#12792](https://github.com/matrix-org/synapse/issues/12792)) -- Update to the `check_event_for_spam` module callback. Deprecate the current callback signature, replace it with a new signature that is both less ambiguous (replacing booleans with explicit allow/block) and more powerful (ability to return explicit error codes). ([\#12808](https://github.com/matrix-org/synapse/issues/12808)) - - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.7.0 that would prevent events from being sent to clients if there's a retention policy in the room when the support for retention policies is disabled. ([\#12611](https://github.com/matrix-org/synapse/issues/12611)) -- Fix a bug introduced in Synapse 1.57.0 where `/messages` would throw a 500 error when querying for a non-existent room. ([\#12683](https://github.com/matrix-org/synapse/issues/12683)) -- Add a unique index to `state_group_edges` to prevent duplicates being accidentally introduced and the consequential impact to performance. ([\#12687](https://github.com/matrix-org/synapse/issues/12687)) -- Fix a long-standing bug where an empty room would be created when a user with an insufficient power level tried to upgrade a room. ([\#12696](https://github.com/matrix-org/synapse/issues/12696)) -- Fix a bug introduced in Synapse 1.30.0 where empty rooms could be automatically created if a monthly active users limit is set. ([\#12713](https://github.com/matrix-org/synapse/issues/12713)) -- Fix push to dismiss notifications when read on another client. Contributed by @SpiritCroc @ Beeper. ([\#12721](https://github.com/matrix-org/synapse/issues/12721)) -- Fix poor database performance when reading the cache invalidation stream for large servers with lots of workers. ([\#12747](https://github.com/matrix-org/synapse/issues/12747)) -- Fix a long-standing bug where the user directory background process would fail to make forward progress if a user included a null codepoint in their display name or avatar. ([\#12762](https://github.com/matrix-org/synapse/issues/12762)) -- Delete events from the `federation_inbound_events_staging` table when a room is purged through the admin API. ([\#12770](https://github.com/matrix-org/synapse/issues/12770)) -- Give a meaningful error message when a client tries to create a room with an invalid alias localpart. ([\#12779](https://github.com/matrix-org/synapse/issues/12779)) -- Fix a bug introduced in 1.43.0 where a file (`providers.json`) was never closed. Contributed by @arkamar. ([\#12794](https://github.com/matrix-org/synapse/issues/12794)) -- Fix a long-standing bug where finished log contexts would be re-started when failing to contact remote homeservers. ([\#12803](https://github.com/matrix-org/synapse/issues/12803)) -- Fix a bug, introduced in Synapse 1.21.0, that led to media thumbnails being unusable before the index has been added in the background. ([\#12823](https://github.com/matrix-org/synapse/issues/12823)) - - -Updates to the Docker image ---------------------------- - -- Fix the docker file after a dependency update. ([\#12853](https://github.com/matrix-org/synapse/issues/12853)) - - -Improved Documentation ----------------------- - -- Fix a typo in the Media Admin API documentation. ([\#12715](https://github.com/matrix-org/synapse/issues/12715)) -- Update the OpenID Connect example for Keycloak to be compatible with newer versions of Keycloak. Contributed by @nhh. ([\#12727](https://github.com/matrix-org/synapse/issues/12727)) -- Fix typo in server listener documentation. ([\#12742](https://github.com/matrix-org/synapse/issues/12742)) -- Link to the configuration manual from the welcome page of the documentation. ([\#12748](https://github.com/matrix-org/synapse/issues/12748)) -- Fix typo in `run_background_tasks_on` option name in configuration manual documentation. ([\#12749](https://github.com/matrix-org/synapse/issues/12749)) -- Add information regarding the `rc_invites` ratelimiting option to the configuration docs. ([\#12759](https://github.com/matrix-org/synapse/issues/12759)) -- Add documentation for cancellation of request processing. ([\#12761](https://github.com/matrix-org/synapse/issues/12761)) -- Recommend using docker to run tests against postgres. ([\#12765](https://github.com/matrix-org/synapse/issues/12765)) -- Add missing user directory endpoint from the generic worker documentation. Contributed by @olmari. ([\#12773](https://github.com/matrix-org/synapse/issues/12773)) -- Add additional info to documentation of config option `cache_autotuning`. ([\#12776](https://github.com/matrix-org/synapse/issues/12776)) -- Update configuration manual documentation to document size-related suffixes. ([\#12777](https://github.com/matrix-org/synapse/issues/12777)) -- Fix invalid YAML syntax in the example documentation for the `url_preview_accept_language` config option. ([\#12785](https://github.com/matrix-org/synapse/issues/12785)) - - -Deprecations and Removals -------------------------- - -- Require a body in POST requests to `/rooms/{roomId}/receipt/{receiptType}/{eventId}`, as required by the [Matrix specification](https://spec.matrix.org/v1.2/client-server-api/#post_matrixclientv3roomsroomidreceiptreceipttypeeventid). This breaks compatibility with Element Android 1.2.0 and earlier: users of those clients will be unable to send read receipts. ([\#12709](https://github.com/matrix-org/synapse/issues/12709)) - - -Internal Changes ----------------- - -- Improve event caching mechanism to avoid having multiple copies of an event in memory at a time. ([\#10533](https://github.com/matrix-org/synapse/issues/10533)) -- Preparation for faster-room-join work: return subsets of room state which we already have, immediately. ([\#12498](https://github.com/matrix-org/synapse/issues/12498)) -- Add `@cancellable` decorator, for use on endpoint methods that can be cancelled when clients disconnect. ([\#12586](https://github.com/matrix-org/synapse/issues/12586), [\#12588](https://github.com/matrix-org/synapse/issues/12588), [\#12630](https://github.com/matrix-org/synapse/issues/12630), [\#12694](https://github.com/matrix-org/synapse/issues/12694), [\#12698](https://github.com/matrix-org/synapse/issues/12698), [\#12699](https://github.com/matrix-org/synapse/issues/12699), [\#12700](https://github.com/matrix-org/synapse/issues/12700), [\#12705](https://github.com/matrix-org/synapse/issues/12705)) -- Enable cancellation of `GET /rooms/$room_id/members`, `GET /rooms/$room_id/state` and `GET /rooms/$room_id/state/$event_type/*` requests. ([\#12708](https://github.com/matrix-org/synapse/issues/12708)) -- Improve documentation of the `synapse.push` module. ([\#12676](https://github.com/matrix-org/synapse/issues/12676)) -- Refactor functions to on `PushRuleEvaluatorForEvent`. ([\#12677](https://github.com/matrix-org/synapse/issues/12677)) -- Preparation for database schema simplifications: stop writing to `event_reference_hashes`. ([\#12679](https://github.com/matrix-org/synapse/issues/12679)) -- Remove code which updates unused database column `application_services_state.last_txn`. ([\#12680](https://github.com/matrix-org/synapse/issues/12680)) -- Refactor `EventContext` class. ([\#12689](https://github.com/matrix-org/synapse/issues/12689)) -- Remove an unneeded class in the push code. ([\#12691](https://github.com/matrix-org/synapse/issues/12691)) -- Consolidate parsing of relation information from events. ([\#12693](https://github.com/matrix-org/synapse/issues/12693)) -- Convert namespace class `Codes` into a string enum. ([\#12703](https://github.com/matrix-org/synapse/issues/12703)) -- Optimize private read receipt filtering. ([\#12711](https://github.com/matrix-org/synapse/issues/12711)) -- Drop the logging level of status messages for the URL preview cache expiry job from INFO to DEBUG. ([\#12720](https://github.com/matrix-org/synapse/issues/12720)) -- Downgrade some OIDC errors to warnings in the logs, to reduce the noise of Sentry reports. ([\#12723](https://github.com/matrix-org/synapse/issues/12723)) -- Update configs used by Complement to allow more invites/3PID validations during tests. ([\#12731](https://github.com/matrix-org/synapse/issues/12731)) -- Tweak the mypy plugin so that `@cached` can accept `on_invalidate=None`. ([\#12769](https://github.com/matrix-org/synapse/issues/12769)) -- Move methods that call `add_push_rule` to the `PushRuleStore` class. ([\#12772](https://github.com/matrix-org/synapse/issues/12772)) -- Make handling of federation Authorization header (more) compliant with RFC7230. ([\#12774](https://github.com/matrix-org/synapse/issues/12774)) -- Refactor `resolve_state_groups_for_events` to not pull out full state when no state resolution happens. ([\#12775](https://github.com/matrix-org/synapse/issues/12775)) -- Do not keep going if there are 5 back-to-back background update failures. ([\#12781](https://github.com/matrix-org/synapse/issues/12781)) -- Fix federation when using the demo scripts. ([\#12783](https://github.com/matrix-org/synapse/issues/12783)) -- The `hash_password` script now fails when it is called without specifying a config file. Contributed by @jae1911. ([\#12789](https://github.com/matrix-org/synapse/issues/12789)) -- Improve and fix type hints. ([\#12567](https://github.com/matrix-org/synapse/issues/12567), [\#12477](https://github.com/matrix-org/synapse/issues/12477), [\#12717](https://github.com/matrix-org/synapse/issues/12717), [\#12753](https://github.com/matrix-org/synapse/issues/12753), [\#12695](https://github.com/matrix-org/synapse/issues/12695), [\#12734](https://github.com/matrix-org/synapse/issues/12734), [\#12716](https://github.com/matrix-org/synapse/issues/12716), [\#12726](https://github.com/matrix-org/synapse/issues/12726), [\#12790](https://github.com/matrix-org/synapse/issues/12790), [\#12833](https://github.com/matrix-org/synapse/issues/12833)) -- Update EventContext `get_current_event_ids` and `get_prev_event_ids` to accept state filters and update calls where possible. ([\#12791](https://github.com/matrix-org/synapse/issues/12791)) -- Remove Caddy from the Synapse workers image used in Complement. ([\#12818](https://github.com/matrix-org/synapse/issues/12818)) -- Add Complement's shared registration secret to the Complement worker image. This fixes tests that depend on it. ([\#12819](https://github.com/matrix-org/synapse/issues/12819)) -- Support registering Application Services when running with workers under Complement. ([\#12826](https://github.com/matrix-org/synapse/issues/12826)) -- Disable 'faster room join' Complement tests when testing against Synapse with workers. ([\#12842](https://github.com/matrix-org/synapse/issues/12842)) - - -Synapse 1.59.1 (2022-05-18) -=========================== - -This release fixes a long-standing issue which could prevent Synapse's user directory for updating properly. - -Bugfixes ----------------- - -- Fix a long-standing bug where the user directory background process would fail to make forward progress if a user included a null codepoint in their display name or avatar. Contributed by Nick @ Beeper. ([\#12762](https://github.com/matrix-org/synapse/issues/12762)) - - -Synapse 1.59.0 (2022-05-17) -=========================== - -Synapse 1.59 makes several changes that server administrators should be aware of: - -- Device name lookup over federation is now disabled by default. ([\#12616](https://github.com/matrix-org/synapse/issues/12616)) -- The `synapse.app.appservice` and `synapse.app.user_dir` worker application types are now deprecated. ([\#12452](https://github.com/matrix-org/synapse/issues/12452), [\#12654](https://github.com/matrix-org/synapse/issues/12654)) - -See [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1590) for more details. - -Additionally, this release removes the non-standard `m.login.jwt` login type from Synapse. It can be replaced with `org.matrix.login.jwt` for identical behaviour. This is only used if `jwt_config.enabled` is set to `true` in the configuration. ([\#12597](https://github.com/matrix-org/synapse/issues/12597)) - - -Bugfixes --------- - -- Fix DB performance regression introduced in Synapse 1.59.0rc2. ([\#12745](https://github.com/matrix-org/synapse/issues/12745)) - - -Synapse 1.59.0rc2 (2022-05-16) -============================== - -Note: this release candidate includes a performance regression which can cause database disruption. Other release candidates in the v1.59.0 series are not affected, and a fix will be included in the v1.59.0 final release. - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.58.0 where `/sync` would fail if the most recent event in a room was rejected. ([\#12729](https://github.com/matrix-org/synapse/issues/12729)) - - -Synapse 1.59.0rc1 (2022-05-10) -============================== - -Features --------- - -- Support [MSC3266](https://github.com/matrix-org/matrix-doc/pull/3266) room summaries over federation. ([\#11507](https://github.com/matrix-org/synapse/issues/11507)) -- Implement [changes](https://github.com/matrix-org/matrix-spec-proposals/pull/2285/commits/4a77139249c2e830aec3c7d6bd5501a514d1cc27) to [MSC2285 (hidden read receipts)](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). Contributed by @SimonBrandner. ([\#12168](https://github.com/matrix-org/synapse/issues/12168), [\#12635](https://github.com/matrix-org/synapse/issues/12635), [\#12636](https://github.com/matrix-org/synapse/issues/12636), [\#12670](https://github.com/matrix-org/synapse/issues/12670)) -- Extend the [module API](https://github.com/matrix-org/synapse/blob/release-v1.59/synapse/module_api/__init__.py) to allow modules to change actions for existing push rules of local users. ([\#12406](https://github.com/matrix-org/synapse/issues/12406)) -- Add the `notify_appservices_from_worker` configuration option (superseding `notify_appservices`) to allow a generic worker to be designated as the worker to send traffic to Application Services. ([\#12452](https://github.com/matrix-org/synapse/issues/12452)) -- Add the `update_user_directory_from_worker` configuration option (superseding `update_user_directory`) to allow a generic worker to be designated as the worker to update the user directory. ([\#12654](https://github.com/matrix-org/synapse/issues/12654)) -- Add new `enable_registration_token_3pid_bypass` configuration option to allow registrations via token as an alternative to verifying a 3pid. ([\#12526](https://github.com/matrix-org/synapse/issues/12526)) -- Implement [MSC3786](https://github.com/matrix-org/matrix-spec-proposals/pull/3786): Add a default push rule to ignore `m.room.server_acl` events. ([\#12601](https://github.com/matrix-org/synapse/issues/12601)) -- Add new `mau_appservice_trial_days` configuration option to specify a different trial period for users registered via an appservice. ([\#12619](https://github.com/matrix-org/synapse/issues/12619)) - - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.48.0 where the latest thread reply provided failed to include the proper bundled aggregations. ([\#12273](https://github.com/matrix-org/synapse/issues/12273)) -- Fix a bug introduced in Synapse 1.22.0 where attempting to send a large amount of read receipts to an application service all at once would result in duplicate content and abnormally high memory usage. Contributed by Brad & Nick @ Beeper. ([\#12544](https://github.com/matrix-org/synapse/issues/12544)) -- Fix a bug introduced in Synapse 1.57.0 which could cause `Failed to calculate hosts in room` errors to be logged for outbound federation. ([\#12570](https://github.com/matrix-org/synapse/issues/12570)) -- Fix a long-standing bug where status codes would almost always get logged as `200!`, irrespective of the actual status code, when clients disconnect before a request has finished processing. ([\#12580](https://github.com/matrix-org/synapse/issues/12580)) -- Fix race when persisting an event and deleting a room that could lead to outbound federation breaking. ([\#12594](https://github.com/matrix-org/synapse/issues/12594)) -- Fix a bug introduced in Synapse 1.53.0 where bundled aggregations for annotations/edits were incorrectly calculated. ([\#12633](https://github.com/matrix-org/synapse/issues/12633)) -- Fix a long-standing bug where rooms containing power levels with string values could not be upgraded. ([\#12657](https://github.com/matrix-org/synapse/issues/12657)) -- Prevent memory leak from reoccurring when presence is disabled. ([\#12656](https://github.com/matrix-org/synapse/issues/12656)) - - -Updates to the Docker image ---------------------------- - -- Explicitly opt-in to using [BuildKit-specific features](https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md) in the Dockerfile. This fixes issues with building images in some GitLab CI environments. ([\#12541](https://github.com/matrix-org/synapse/issues/12541)) -- Update the "Build docker images" GitHub Actions workflow to use `docker/metadata-action` to generate docker image tags, instead of a custom shell script. Contributed by @henryclw. ([\#12573](https://github.com/matrix-org/synapse/issues/12573)) - - -Improved Documentation ----------------------- - -- Update SQL statements and replace use of old table `user_stats_historical` in docs for Synapse Admins. ([\#12536](https://github.com/matrix-org/synapse/issues/12536)) -- Add missing linebreak to `pipx` install instructions. ([\#12579](https://github.com/matrix-org/synapse/issues/12579)) -- Add information about the TCP replication module to docs. ([\#12621](https://github.com/matrix-org/synapse/issues/12621)) -- Fixes to the formatting of `README.rst`. ([\#12627](https://github.com/matrix-org/synapse/issues/12627)) -- Fix docs on how to run specific Complement tests using the `complement.sh` test runner. ([\#12664](https://github.com/matrix-org/synapse/issues/12664)) - - -Deprecations and Removals -------------------------- - -- Remove unstable identifiers from [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069). ([\#12596](https://github.com/matrix-org/synapse/issues/12596)) -- Remove the unspecified `m.login.jwt` login type and the unstable `uk.half-shot.msc2778.login.application_service` from - [MSC2778](https://github.com/matrix-org/matrix-doc/pull/2778). ([\#12597](https://github.com/matrix-org/synapse/issues/12597)) -- Synapse now requires at least Python 3.7.1 (up from 3.7.0), for compatibility with the latest Twisted trunk. ([\#12613](https://github.com/matrix-org/synapse/issues/12613)) - - -Internal Changes ----------------- - -- Use supervisord to supervise Postgres and Caddy in the Complement image to reduce restart time. ([\#12480](https://github.com/matrix-org/synapse/issues/12480)) -- Immediately retry any requests that have backed off when a server comes back online. ([\#12500](https://github.com/matrix-org/synapse/issues/12500)) -- Use `make_awaitable` instead of `defer.succeed` for return values of mocks in tests. ([\#12505](https://github.com/matrix-org/synapse/issues/12505)) -- Consistently check if an object is a `frozendict`. ([\#12564](https://github.com/matrix-org/synapse/issues/12564)) -- Protect module callbacks with read semantics against cancellation. ([\#12568](https://github.com/matrix-org/synapse/issues/12568)) -- Improve comments and error messages around access tokens. ([\#12577](https://github.com/matrix-org/synapse/issues/12577)) -- Improve docstrings for the receipts store. ([\#12581](https://github.com/matrix-org/synapse/issues/12581)) -- Use constants for read-receipts in tests. ([\#12582](https://github.com/matrix-org/synapse/issues/12582)) -- Log status code of cancelled requests as 499 and avoid logging stack traces for them. ([\#12587](https://github.com/matrix-org/synapse/issues/12587), [\#12663](https://github.com/matrix-org/synapse/issues/12663)) -- Remove special-case for `twisted` logger from default log config. ([\#12589](https://github.com/matrix-org/synapse/issues/12589)) -- Use `getClientAddress` instead of the deprecated `getClientIP`. ([\#12599](https://github.com/matrix-org/synapse/issues/12599)) -- Add link to documentation in Grafana Dashboard. ([\#12602](https://github.com/matrix-org/synapse/issues/12602)) -- Reduce log spam when running multiple event persisters. ([\#12610](https://github.com/matrix-org/synapse/issues/12610)) -- Add extra debug logging to federation sender. ([\#12614](https://github.com/matrix-org/synapse/issues/12614)) -- Prevent remote homeservers from requesting local user device names by default. ([\#12616](https://github.com/matrix-org/synapse/issues/12616)) -- Add a consistency check on events which we read from the database. ([\#12620](https://github.com/matrix-org/synapse/issues/12620)) -- Remove use of the `constantly` library and switch to enums for `EventRedactBehaviour`. Contributed by @andrewdoh. ([\#12624](https://github.com/matrix-org/synapse/issues/12624)) -- Remove unused code related to receipts. ([\#12632](https://github.com/matrix-org/synapse/issues/12632)) -- Minor improvements to the scripts for running Synapse in worker mode under Complement. ([\#12637](https://github.com/matrix-org/synapse/issues/12637)) -- Move `pympler` back in to the `all` extras. ([\#12652](https://github.com/matrix-org/synapse/issues/12652)) -- Fix spelling of `M_UNRECOGNIZED` in comments. ([\#12665](https://github.com/matrix-org/synapse/issues/12665)) -- Release script: confirm the commit to be tagged before tagging. ([\#12556](https://github.com/matrix-org/synapse/issues/12556)) -- Fix a typo in the announcement text generated by the Synapse release development script. ([\#12612](https://github.com/matrix-org/synapse/issues/12612)) - -### Typechecking - -- Fix scripts-dev to pass typechecking. ([\#12356](https://github.com/matrix-org/synapse/issues/12356)) -- Add some type hints to datastore. ([\#12485](https://github.com/matrix-org/synapse/issues/12485)) -- Remove unused `# type: ignore`s. ([\#12531](https://github.com/matrix-org/synapse/issues/12531)) -- Allow unused `# type: ignore` comments in bleeding edge CI jobs. ([\#12576](https://github.com/matrix-org/synapse/issues/12576)) -- Remove redundant lines of config from `mypy.ini`. ([\#12608](https://github.com/matrix-org/synapse/issues/12608)) -- Update to mypy 0.950. ([\#12650](https://github.com/matrix-org/synapse/issues/12650)) -- Use `Concatenate` to better annotate `_do_execute`. ([\#12666](https://github.com/matrix-org/synapse/issues/12666)) -- Use `ParamSpec` to refine type hints. ([\#12667](https://github.com/matrix-org/synapse/issues/12667)) -- Fix mypy against latest pillow stubs. ([\#12671](https://github.com/matrix-org/synapse/issues/12671)) - -Synapse 1.58.1 (2022-05-05) -=========================== - -This patch release includes a fix to the Debian packages, installing the -`systemd` and `cache_memory` extra package groups, which were incorrectly -omitted in v1.58.0. This primarily prevented Synapse from starting -when the `systemd.journal.JournalHandler` log handler was configured. -See [#12631](https://github.com/matrix-org/synapse/issues/12631) for further information. - -Otherwise, no significant changes since 1.58.0. - - -Synapse 1.58.0 (2022-05-03) -=========================== - -As of this release, the groups/communities feature in Synapse is now disabled by default. See [\#11584](https://github.com/matrix-org/synapse/issues/11584) for details. As mentioned in [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1580), this feature will be removed in Synapse 1.61. - -No significant changes since 1.58.0rc2. - - -Synapse 1.58.0rc2 (2022-04-26) -============================== - -This release candidate fixes bugs related to Synapse 1.58.0rc1's logic for handling device list updates. - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.58.0rc1 where the main process could consume excessive amounts of CPU and memory while handling sentry logging failures. ([\#12554](https://github.com/matrix-org/synapse/issues/12554)) -- Fix a bug introduced in Synapse 1.58.0rc1 where opentracing contexts were not correctly sent to whitelisted remote servers with device lists updates. ([\#12555](https://github.com/matrix-org/synapse/issues/12555)) - - -Internal Changes ----------------- - -- Reduce unnecessary work when handling remote device list updates. ([\#12557](https://github.com/matrix-org/synapse/issues/12557)) - - -Synapse 1.58.0rc1 (2022-04-26) -============================== - -Features --------- - -- Implement [MSC3383](https://github.com/matrix-org/matrix-spec-proposals/pull/3383) for including the destination in server-to-server authentication headers. Contributed by @Bubu and @jcgruenhage for Famedly. ([\#11398](https://github.com/matrix-org/synapse/issues/11398)) -- Docker images and Debian packages from matrix.org now contain a locked set of Python dependencies, greatly improving build reproducibility. ([Board](https://github.com/orgs/matrix-org/projects/54), [\#11537](https://github.com/matrix-org/synapse/issues/11537)) -- Enable processing of device list updates asynchronously. ([\#12365](https://github.com/matrix-org/synapse/issues/12365), [\#12465](https://github.com/matrix-org/synapse/issues/12465)) -- Implement [MSC2815](https://github.com/matrix-org/matrix-spec-proposals/pull/2815) to allow room moderators to view redacted event content. Contributed by @tulir @ Beeper. ([\#12427](https://github.com/matrix-org/synapse/issues/12427)) -- Build Debian packages for Ubuntu 22.04 "Jammy Jellyfish". ([\#12543](https://github.com/matrix-org/synapse/issues/12543)) - - -Bugfixes --------- - -- Prevent a sync request from removing a user's busy presence status. ([\#12213](https://github.com/matrix-org/synapse/issues/12213)) -- Fix bug with incremental sync missing events when rejoining/backfilling. Contributed by Nick @ Beeper. ([\#12319](https://github.com/matrix-org/synapse/issues/12319)) -- Fix a long-standing bug which incorrectly caused `GET /_matrix/client/v3/rooms/{roomId}/event/{eventId}` to return edited events rather than the original. ([\#12476](https://github.com/matrix-org/synapse/issues/12476)) -- Fix a bug introduced in Synapse 1.27.0 where the admin API for [deleting forward extremities](https://github.com/matrix-org/synapse/blob/erikj/fix_delete_event_response_count/docs/admin_api/rooms.md#deleting-forward-extremities) would always return a count of 1, no matter how many extremities were deleted. ([\#12496](https://github.com/matrix-org/synapse/issues/12496)) -- Fix a long-standing bug where the image thumbnails embedded into email notifications were broken. ([\#12510](https://github.com/matrix-org/synapse/issues/12510)) -- Fix a bug in the implementation of [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202) where Synapse would use the field name `device_unused_fallback_keys`, rather than `device_unused_fallback_key_types`. ([\#12520](https://github.com/matrix-org/synapse/issues/12520)) -- Fix a bug introduced in Synapse 0.99.3 which could cause Synapse to consume large amounts of RAM when back-paginating in a large room. ([\#12522](https://github.com/matrix-org/synapse/issues/12522)) - - -Improved Documentation ----------------------- - -- Fix rendering of the documentation site when using the 'print' feature. ([\#12340](https://github.com/matrix-org/synapse/issues/12340)) -- Add a manual documenting config file options. ([\#12368](https://github.com/matrix-org/synapse/issues/12368), [\#12527](https://github.com/matrix-org/synapse/issues/12527)) -- Update documentation to reflect that both the `run_background_tasks_on` option and the options for moving stream writers off of the main process are no longer experimental. ([\#12451](https://github.com/matrix-org/synapse/issues/12451)) -- Update worker documentation and replace old `federation_reader` with `generic_worker`. ([\#12457](https://github.com/matrix-org/synapse/issues/12457)) -- Strongly recommend [Poetry](https://python-poetry.org/) for development. ([\#12475](https://github.com/matrix-org/synapse/issues/12475)) -- Add some example configurations for workers and update architectural diagram. ([\#12492](https://github.com/matrix-org/synapse/issues/12492)) -- Fix a broken link in `README.rst`. ([\#12495](https://github.com/matrix-org/synapse/issues/12495)) -- Add HAProxy delegation example with CORS headers to docs. ([\#12501](https://github.com/matrix-org/synapse/issues/12501)) -- Remove extraneous comma in User Admin API's device deletion section so that the example JSON is actually valid and works. Contributed by @olmari. ([\#12533](https://github.com/matrix-org/synapse/issues/12533)) - - -Deprecations and Removals -------------------------- - -- The groups/communities feature in Synapse is now disabled by default. ([\#12344](https://github.com/matrix-org/synapse/issues/12344)) -- Remove unstable identifiers from [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440). ([\#12382](https://github.com/matrix-org/synapse/issues/12382)) - - -Internal Changes ----------------- - -- Preparation for faster-room-join work: start a background process to resynchronise the room state after a room join. ([\#12394](https://github.com/matrix-org/synapse/issues/12394)) -- Preparation for faster-room-join work: Implement a tracking mechanism to allow functions to wait for full room state to arrive. ([\#12399](https://github.com/matrix-org/synapse/issues/12399)) -- Remove an unstable identifier from [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083). ([\#12395](https://github.com/matrix-org/synapse/issues/12395)) -- Run CI in the locked [Poetry](https://python-poetry.org/) environment, and remove corresponding `tox` jobs. ([\#12425](https://github.com/matrix-org/synapse/issues/12425), [\#12434](https://github.com/matrix-org/synapse/issues/12434), [\#12438](https://github.com/matrix-org/synapse/issues/12438), [\#12441](https://github.com/matrix-org/synapse/issues/12441), [\#12449](https://github.com/matrix-org/synapse/issues/12449), [\#12478](https://github.com/matrix-org/synapse/issues/12478), [\#12514](https://github.com/matrix-org/synapse/issues/12514), [\#12472](https://github.com/matrix-org/synapse/issues/12472)) -- Change Mutual Rooms' `unstable_features` flag to `uk.half-shot.msc2666.mutual_rooms` which matches the current iteration of [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666). ([\#12445](https://github.com/matrix-org/synapse/issues/12445)) -- Fix typo in the release script help string. ([\#12450](https://github.com/matrix-org/synapse/issues/12450)) -- Fix a minor typo in the Debian changelogs generated by the release script. ([\#12497](https://github.com/matrix-org/synapse/issues/12497)) -- Reintroduce the list of targets to the linter script, to avoid linting unwanted local-only directories during development. ([\#12455](https://github.com/matrix-org/synapse/issues/12455)) -- Limit length of `device_id` to less than 512 characters. ([\#12454](https://github.com/matrix-org/synapse/issues/12454)) -- Dockerfile-workers: reduce the amount we install in the image. ([\#12464](https://github.com/matrix-org/synapse/issues/12464)) -- Dockerfile-workers: give the master its own log config. ([\#12466](https://github.com/matrix-org/synapse/issues/12466)) -- complement-synapse-workers: factor out separate entry point script. ([\#12467](https://github.com/matrix-org/synapse/issues/12467)) -- Back out experimental implementation of [MSC2314](https://github.com/matrix-org/matrix-spec-proposals/pull/2314). ([\#12474](https://github.com/matrix-org/synapse/issues/12474)) -- Fix grammatical error in federation error response when the room version of a room is unknown. ([\#12483](https://github.com/matrix-org/synapse/issues/12483)) -- Remove unnecessary configuration overrides in tests. ([\#12511](https://github.com/matrix-org/synapse/issues/12511)) -- Refactor the relations code for clarity. ([\#12519](https://github.com/matrix-org/synapse/issues/12519)) -- Add type hints so `docker` and `stubs` directories pass `mypy --disallow-untyped-defs`. ([\#12528](https://github.com/matrix-org/synapse/issues/12528)) -- Update `delay_cancellation` to accept any awaitable, rather than just `Deferred`s. ([\#12468](https://github.com/matrix-org/synapse/issues/12468)) -- Handle cancellation in `EventsWorkerStore._get_events_from_cache_or_db`. ([\#12529](https://github.com/matrix-org/synapse/issues/12529)) - - -Synapse 1.57.1 (2022-04-20) -=========================== - -This is a patch release that only affects the Docker image. It is only of interest to administrators using [the LDAP module][LDAPModule] to authenticate their users. -If you have already upgraded to Synapse 1.57.0 without problem, then you have no need to upgrade to this patch release. - -[LDAPModule]: https://github.com/matrix-org/matrix-synapse-ldap3 - - -Updates to the Docker image ---------------------------- - -- Include version 0.2.0 of the Synapse LDAP Auth Provider module in the Docker image. This matches the version that was present in the Docker image for Synapse 1.56.0. ([\#12512](https://github.com/matrix-org/synapse/issues/12512)) - - -Synapse 1.57.0 (2022-04-19) -=========================== - -This version includes a [change](https://github.com/matrix-org/synapse/pull/12209) to the way transaction IDs are managed for application services. If your deployment uses a dedicated worker for application service traffic, **it must be stopped** when the database is upgraded (which normally happens when the main process is upgraded), to ensure the change is made safely without any risk of reusing transaction IDs. - -See the [upgrade notes](https://github.com/matrix-org/synapse/blob/v1.57.0rc1/docs/upgrade.md#upgrading-to-v1570) for more details. - -No significant changes since 1.57.0rc1. - - -Synapse 1.57.0rc1 (2022-04-12) -============================== - -Features --------- - -- Send device list changes to application services as specified by [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202), using unstable prefixes. The `msc3202_transaction_extensions` experimental homeserver config option must be enabled and `org.matrix.msc3202: true` must be present in the application service registration file for device list changes to be sent. The "left" field is currently always empty. ([\#11881](https://github.com/matrix-org/synapse/issues/11881)) -- Optimise fetching large quantities of missing room state over federation. ([\#12040](https://github.com/matrix-org/synapse/issues/12040)) -- Offload the `update_client_ip` background job from the main process to the background worker, when using Redis-based replication. ([\#12251](https://github.com/matrix-org/synapse/issues/12251)) -- Move `update_client_ip` background job from the main process to the background worker. ([\#12252](https://github.com/matrix-org/synapse/issues/12252)) -- Add a module callback to react to new 3PID (email address, phone number) associations. ([\#12302](https://github.com/matrix-org/synapse/issues/12302)) -- Add a configuration option to remove a specific set of rooms from sync responses. ([\#12310](https://github.com/matrix-org/synapse/issues/12310)) -- Add a module callback to react to account data changes. ([\#12327](https://github.com/matrix-org/synapse/issues/12327)) -- Allow setting user admin status using the module API. Contributed by Famedly. ([\#12341](https://github.com/matrix-org/synapse/issues/12341)) -- Reduce overhead of restarting synchrotrons. ([\#12367](https://github.com/matrix-org/synapse/issues/12367), [\#12372](https://github.com/matrix-org/synapse/issues/12372)) -- Update `/messages` to use historic pagination tokens if no `from` query parameter is given. ([\#12370](https://github.com/matrix-org/synapse/issues/12370)) -- Add a module API for reading and writing global account data. ([\#12391](https://github.com/matrix-org/synapse/issues/12391)) -- Support the stable `v1` endpoint for `/relations`, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#12403](https://github.com/matrix-org/synapse/issues/12403)) -- Include bundled aggregations in search results - ([MSC3666](https://github.com/matrix-org/matrix-spec-proposals/pull/3666)). ([\#12436](https://github.com/matrix-org/synapse/issues/12436)) - - -Bugfixes --------- - -- Fix a long-standing bug where updates to the server notices user profile (display name/avatar URL) in the configuration would not be applied to pre-existing rooms. Contributed by Jorge Florian. ([\#12115](https://github.com/matrix-org/synapse/issues/12115)) -- Fix a long-standing bug where events from ignored users were still considered for bundled aggregations. ([\#12235](https://github.com/matrix-org/synapse/issues/12235), [\#12338](https://github.com/matrix-org/synapse/issues/12338)) -- Fix non-member state events not resolving for historical events when used in [MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716) `/batch_send` `state_events_at_start`. ([\#12329](https://github.com/matrix-org/synapse/issues/12329)) -- Fix a long-standing bug affecting URL previews that would generate a 500 response instead of a 403 if the previewed URL includes a port that isn't allowed by the relevant blacklist. ([\#12333](https://github.com/matrix-org/synapse/issues/12333)) -- Default to `private` room visibility rather than `public` when a client does not specify one, according to spec. ([\#12350](https://github.com/matrix-org/synapse/issues/12350)) -- Fix a spec compliance issue where requests to the `/publicRooms` federation API would specify `limit` as a string. ([\#12364](https://github.com/matrix-org/synapse/issues/12364), [\#12410](https://github.com/matrix-org/synapse/issues/12410)) -- Fix a bug introduced in Synapse 1.49.0 which caused the `synapse_event_persisted_position` metric to have invalid values. ([\#12390](https://github.com/matrix-org/synapse/issues/12390)) - - -Updates to the Docker image ---------------------------- - -- Bundle locked versions of dependencies into the Docker image. ([\#12385](https://github.com/matrix-org/synapse/issues/12385), [\#12439](https://github.com/matrix-org/synapse/issues/12439)) -- Fix up healthcheck generation for workers docker image. ([\#12405](https://github.com/matrix-org/synapse/issues/12405)) - - -Improved Documentation ----------------------- - -- Clarify documentation for running SyTest against Synapse, including use of Postgres and worker mode. ([\#12271](https://github.com/matrix-org/synapse/issues/12271)) -- Document the behaviour of `LoggingTransaction.call_after` and `LoggingTransaction.call_on_exception` methods when transactions are retried. ([\#12315](https://github.com/matrix-org/synapse/issues/12315)) -- Update dead links in `check-newsfragment.sh` to point to the correct documentation URL. ([\#12331](https://github.com/matrix-org/synapse/issues/12331)) -- Upgrade the version of `mdbook` in CI to 0.4.17. ([\#12339](https://github.com/matrix-org/synapse/issues/12339)) -- Updates to the Room DAG concepts development document to clarify that we mark events as outliers because we don't have any state for them. ([\#12345](https://github.com/matrix-org/synapse/issues/12345)) -- Update the link to Redis pub/sub documentation in the workers documentation. ([\#12369](https://github.com/matrix-org/synapse/issues/12369)) -- Remove documentation for converting a legacy structured logging configuration to the new format. ([\#12392](https://github.com/matrix-org/synapse/issues/12392)) - - -Deprecations and Removals -------------------------- - -- Remove the unused and unstable `/aggregations` endpoint which was removed from [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#12293](https://github.com/matrix-org/synapse/issues/12293)) - - -Internal Changes ----------------- - -- Remove lingering unstable references to MSC2403 (knocking). ([\#12165](https://github.com/matrix-org/synapse/issues/12165)) -- Avoid trying to calculate the state at outlier events. ([\#12191](https://github.com/matrix-org/synapse/issues/12191), [\#12316](https://github.com/matrix-org/synapse/issues/12316), [\#12330](https://github.com/matrix-org/synapse/issues/12330), [\#12332](https://github.com/matrix-org/synapse/issues/12332), [\#12409](https://github.com/matrix-org/synapse/issues/12409)) -- Omit sending "offline" presence updates to application services after they are initially configured. ([\#12193](https://github.com/matrix-org/synapse/issues/12193)) -- Switch to using a sequence to generate AS transaction IDs. Contributed by Nick @ Beeper. If running synapse with a dedicated appservice worker, this MUST be stopped before upgrading the main process and database. ([\#12209](https://github.com/matrix-org/synapse/issues/12209)) -- Add missing type hints for storage. ([\#12267](https://github.com/matrix-org/synapse/issues/12267)) -- Add missing type definitions for scripts in docker folder. Contributed by Jorge Florian. ([\#12280](https://github.com/matrix-org/synapse/issues/12280)) -- Move [MSC2654](https://github.com/matrix-org/matrix-doc/pull/2654) support behind an experimental configuration flag. ([\#12295](https://github.com/matrix-org/synapse/issues/12295)) -- Update docstrings to explain how to decipher live and historic pagination tokens. ([\#12317](https://github.com/matrix-org/synapse/issues/12317)) -- Add ground work for speeding up device list updates for users in large numbers of rooms. ([\#12321](https://github.com/matrix-org/synapse/issues/12321)) -- Fix typechecker problems exposed by signedjson 1.1.2. ([\#12326](https://github.com/matrix-org/synapse/issues/12326)) -- Remove the `tox` packaging job: it will be redundant once #11537 lands. ([\#12334](https://github.com/matrix-org/synapse/issues/12334)) -- Ignore `.envrc` for `direnv` users. ([\#12335](https://github.com/matrix-org/synapse/issues/12335)) -- Remove the (broadly unused, dev-only) dockerfile for pg tests. ([\#12336](https://github.com/matrix-org/synapse/issues/12336)) -- Remove redundant `get_success` calls in test code. ([\#12346](https://github.com/matrix-org/synapse/issues/12346)) -- Add type annotations for `tests/unittest.py`. ([\#12347](https://github.com/matrix-org/synapse/issues/12347)) -- Move single-use methods out of `TestCase`. ([\#12348](https://github.com/matrix-org/synapse/issues/12348)) -- Remove broken and unused development scripts. ([\#12349](https://github.com/matrix-org/synapse/issues/12349), [\#12351](https://github.com/matrix-org/synapse/issues/12351), [\#12355](https://github.com/matrix-org/synapse/issues/12355)) -- Convert `Linearizer` tests from `inlineCallbacks` to async. ([\#12353](https://github.com/matrix-org/synapse/issues/12353)) -- Update docstrings for `ReadWriteLock` tests. ([\#12354](https://github.com/matrix-org/synapse/issues/12354)) -- Refactor `Linearizer`, convert methods to async and use an async context manager. ([\#12357](https://github.com/matrix-org/synapse/issues/12357)) -- Fix a long-standing bug where `Linearizer`s could get stuck if a cancellation were to happen at the wrong time. ([\#12358](https://github.com/matrix-org/synapse/issues/12358)) -- Make `StreamToken.from_string` and `RoomStreamToken.parse` propagate cancellations instead of replacing them with `SynapseError`s. ([\#12366](https://github.com/matrix-org/synapse/issues/12366)) -- Add type hints to tests files. ([\#12371](https://github.com/matrix-org/synapse/issues/12371)) -- Allow specifying the Postgres database's port when running unit tests with Postgres. ([\#12376](https://github.com/matrix-org/synapse/issues/12376)) -- Remove temporary pin of signedjson<=1.1.1 that was added in Synapse 1.56.0. ([\#12379](https://github.com/matrix-org/synapse/issues/12379)) -- Add opentracing spans to calls to external cache. ([\#12380](https://github.com/matrix-org/synapse/issues/12380)) -- Lay groundwork for using `poetry` to manage Synapse's dependencies. ([\#12381](https://github.com/matrix-org/synapse/issues/12381), [\#12407](https://github.com/matrix-org/synapse/issues/12407), [\#12412](https://github.com/matrix-org/synapse/issues/12412), [\#12418](https://github.com/matrix-org/synapse/issues/12418)) -- Make missing `importlib_metadata` dependency explicit. ([\#12384](https://github.com/matrix-org/synapse/issues/12384), [\#12400](https://github.com/matrix-org/synapse/issues/12400)) -- Update type annotations for compatiblity with prometheus_client 0.14. ([\#12389](https://github.com/matrix-org/synapse/issues/12389)) -- Remove support for the unstable identifiers specified in [MSC3288](https://github.com/matrix-org/matrix-doc/pull/3288). ([\#12398](https://github.com/matrix-org/synapse/issues/12398)) -- Add missing type hints to configuration classes. ([\#12402](https://github.com/matrix-org/synapse/issues/12402)) -- Add files used to build the Docker image used for complement testing into the Synapse repository. ([\#12404](https://github.com/matrix-org/synapse/issues/12404)) -- Do not include groups in the sync response when disabled. ([\#12408](https://github.com/matrix-org/synapse/issues/12408)) -- Improve type hints related to HTTP query parameters. ([\#12415](https://github.com/matrix-org/synapse/issues/12415)) -- Stop maintaining a list of lint targets. ([\#12420](https://github.com/matrix-org/synapse/issues/12420)) -- Make `synapse._scripts` pass type checks. ([\#12421](https://github.com/matrix-org/synapse/issues/12421), [\#12422](https://github.com/matrix-org/synapse/issues/12422)) -- Add some type hints to datastore. ([\#12423](https://github.com/matrix-org/synapse/issues/12423)) -- Enable certificate checking during complement tests. ([\#12435](https://github.com/matrix-org/synapse/issues/12435)) -- Explicitly specify the `tls` extra for Twisted dependency. ([\#12444](https://github.com/matrix-org/synapse/issues/12444)) - - -Synapse 1.56.0 (2022-04-05) -=========================== - -Synapse will now refuse to start up if open registration is enabled, in order to help mitigate -abuse across the federation. If you would like -to provide registration to anyone, consider adding [email](https://github.com/matrix-org/synapse/blob/8a519f8abc6de772167c2cca101d22ee2052fafc/docs/sample_config.yaml#L1285), -[recaptcha](https://matrix-org.github.io/synapse/v1.56/CAPTCHA_SETUP.html) -or [token-based](https://matrix-org.github.io/synapse/v1.56/usage/administration/admin_api/registration_tokens.html) verification -in order to prevent automated registration from bad actors. -This check can be disabled by setting the `enable_registration_without_verification` option in your -homeserver configuration file to `true`. More details are available in the -[upgrade notes](https://matrix-org.github.io/synapse/v1.56/upgrade.html#open-registration-without-verification-is-now-disabled-by-default). - -Synapse will additionally now refuse to start when using PostgreSQL with a non-`C` values for `COLLATE` and `CTYPE`, unless -the config flag `allow_unsafe_locale`, found in the database section of the configuration file, is set to `true`. See the -[upgrade notes](https://matrix-org.github.io/synapse/v1.56/upgrade#change-in-behaviour-for-postgresql-databases-with-unsafe-locale) -for details. - -Internal Changes ----------------- - -- Bump the version of `black` for compatibility with the latest `click` release. ([\#12320](https://github.com/matrix-org/synapse/issues/12320)) - - -Synapse 1.56.0rc1 (2022-03-29) -============================== - -Features --------- - -- Allow modules to store already existing 3PID associations. ([\#12195](https://github.com/matrix-org/synapse/issues/12195)) -- Allow registering server administrators using the module API. Contributed by Famedly. ([\#12250](https://github.com/matrix-org/synapse/issues/12250)) - - -Bugfixes --------- - -- Fix a long-standing bug which caused the `/_matrix/federation/v1/state` and `/_matrix/federation/v1/state_ids` endpoints to return incorrect or invalid data when called for an event which we have stored as an "outlier". ([\#12087](https://github.com/matrix-org/synapse/issues/12087)) -- Fix a long-standing bug where events from ignored users would still be considered for relations. ([\#12227](https://github.com/matrix-org/synapse/issues/12227), [\#12232](https://github.com/matrix-org/synapse/issues/12232), [\#12285](https://github.com/matrix-org/synapse/issues/12285)) -- Fix a bug introduced in Synapse 1.53.0 where an unnecessary query could be performed when fetching bundled aggregations for threads. ([\#12228](https://github.com/matrix-org/synapse/issues/12228)) -- Fix a bug introduced in Synapse 1.52.0 where admins could not deactivate and GDPR-erase a user if Synapse was configured with limits on avatars. ([\#12261](https://github.com/matrix-org/synapse/issues/12261)) - - -Improved Documentation ----------------------- - -- Fix the link to the module documentation in the legacy spam checker warning message. ([\#12231](https://github.com/matrix-org/synapse/issues/12231)) -- Remove incorrect prefixes in the worker documentation for some endpoints. ([\#12243](https://github.com/matrix-org/synapse/issues/12243)) -- Correct `check_username_for_spam` annotations and docs. ([\#12246](https://github.com/matrix-org/synapse/issues/12246)) -- Correct Authentik OpenID typo, and add notes on troubleshooting. Contributed by @IronTooch. ([\#12275](https://github.com/matrix-org/synapse/issues/12275)) -- HAProxy reverse proxy guide update to stop sending IPv4-mapped address to homeserver. Contributed by @villepeh. ([\#12279](https://github.com/matrix-org/synapse/issues/12279)) - - -Internal Changes ----------------- - -- Rename `shared_rooms` to `mutual_rooms` ([MSC2666](https://github.com/matrix-org/matrix-doc/pull/2666)), as per proposal changes. ([\#12036](https://github.com/matrix-org/synapse/issues/12036)) -- Remove check on `update_user_directory` for shared rooms handler ([MSC2666](https://github.com/matrix-org/matrix-doc/pull/2666)), and update/expand documentation. ([\#12038](https://github.com/matrix-org/synapse/issues/12038)) -- Refactor `create_new_client_event` to use a new parameter, `state_event_ids`, which accurately describes the usage with [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) instead of abusing `auth_event_ids`. ([\#12083](https://github.com/matrix-org/synapse/issues/12083), [\#12304](https://github.com/matrix-org/synapse/issues/12304)) -- Refuse to start if registration is enabled without email, captcha, or token-based verification unless the new config flag `enable_registration_without_verification` is set to `true`. ([\#12091](https://github.com/matrix-org/synapse/issues/12091), [\#12322](https://github.com/matrix-org/synapse/issues/12322)) -- Add tests for database transaction callbacks. ([\#12198](https://github.com/matrix-org/synapse/issues/12198)) -- Handle cancellation in `DatabasePool.runInteraction`. ([\#12199](https://github.com/matrix-org/synapse/issues/12199)) -- Add missing type hints for cache storage. ([\#12216](https://github.com/matrix-org/synapse/issues/12216)) -- Add missing type hints for storage. ([\#12248](https://github.com/matrix-org/synapse/issues/12248), [\#12255](https://github.com/matrix-org/synapse/issues/12255)) -- Add type hints to tests files. ([\#12224](https://github.com/matrix-org/synapse/issues/12224), [\#12240](https://github.com/matrix-org/synapse/issues/12240), [\#12256](https://github.com/matrix-org/synapse/issues/12256)) -- Use type stubs for `psycopg2`. ([\#12269](https://github.com/matrix-org/synapse/issues/12269)) -- Improve type annotations for `execute_values`. ([\#12311](https://github.com/matrix-org/synapse/issues/12311)) -- Clean-up logic around rebasing URLs for URL image previews. ([\#12219](https://github.com/matrix-org/synapse/issues/12219)) -- Use the `ignored_users` table in additional places instead of re-parsing the account data. ([\#12225](https://github.com/matrix-org/synapse/issues/12225)) -- Refactor the relations endpoints to add a `RelationsHandler`. ([\#12237](https://github.com/matrix-org/synapse/issues/12237)) -- Generate announcement links in the release script. ([\#12242](https://github.com/matrix-org/synapse/issues/12242)) -- Improve error message when dependencies check finds a broken installation. ([\#12244](https://github.com/matrix-org/synapse/issues/12244)) -- Compress metrics HTTP resource when enabled. Contributed by Nick @ Beeper. ([\#12258](https://github.com/matrix-org/synapse/issues/12258)) -- Refuse to start if the PostgreSQL database has a non-`C` locale, unless the config flag `allow_unsafe_db_locale` is set to true. ([\#12262](https://github.com/matrix-org/synapse/issues/12262), [\#12288](https://github.com/matrix-org/synapse/issues/12288)) -- Optionally include account validity expiration information to experimental [MSC3720](https://github.com/matrix-org/matrix-doc/pull/3720) account status responses. ([\#12266](https://github.com/matrix-org/synapse/issues/12266)) -- Add a new cache `_get_membership_from_event_id` to speed up push rule calculations in large rooms. ([\#12272](https://github.com/matrix-org/synapse/issues/12272)) -- Re-enable Complement concurrency in CI. ([\#12283](https://github.com/matrix-org/synapse/issues/12283)) -- Remove unused test utilities. ([\#12291](https://github.com/matrix-org/synapse/issues/12291)) -- Enhance logging for inbound federation events. ([\#12301](https://github.com/matrix-org/synapse/issues/12301)) -- Fix compatibility with the recently-released Jinja 3.1. ([\#12313](https://github.com/matrix-org/synapse/issues/12313)) -- Avoid trying to calculate the state at outlier events. ([\#12314](https://github.com/matrix-org/synapse/issues/12314)) - - -Synapse 1.55.2 (2022-03-24) -=========================== - -This patch version reverts the earlier fixes from Synapse 1.55.1, which could cause problems in certain deployments, and instead adds a cap to the version of Jinja to be installed. Again, this is to fix an incompatibility with version 3.1.0 of the [Jinja](https://pypi.org/project/Jinja2/) library, and again, deployments of Synapse using the `matrixdotorg/synapse` Docker image or Debian packages from packages.matrix.org are not affected. - -Internal Changes ----------------- - -- Pin Jinja to <3.1.0, as Synapse fails to start with Jinja 3.1.0. ([\#12297](https://github.com/matrix-org/synapse/issues/12297)) -- Revert changes from 1.55.1 as they caused problems with older versions of Jinja ([\#12296](https://github.com/matrix-org/synapse/issues/12296)) - - -Synapse 1.55.1 (2022-03-24) -=========================== - -This is a patch release that fixes an incompatibility with version 3.1.0 of the [Jinja](https://pypi.org/project/Jinja2/) library, released on March 24th, 2022. Deployments of Synapse using the `matrixdotorg/synapse` Docker image or Debian packages from packages.matrix.org are not affected. - -Internal Changes ----------------- - -- Remove uses of the long-deprecated `jinja2.Markup` which would prevent Synapse from starting with Jinja 3.1.0 or above installed. ([\#12289](https://github.com/matrix-org/synapse/issues/12289)) - - -Synapse 1.55.0 (2022-03-22) -=========================== - -This release removes a workaround introduced in Synapse 1.50.0 for Mjolnir compatibility. **This breaks compatibility with Mjolnir 1.3.1 and earlier. ([\#11700](https://github.com/matrix-org/synapse/issues/11700))**; Mjolnir users should upgrade Mjolnir before upgrading Synapse to this version. - -This release also moves the location of the `synctl` script; see the [upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#synctl-script-has-been-moved) for more details. - - -Internal Changes ----------------- - -- Tweak copy for default Single Sign-On account details template to better adhere to mobile app store guidelines. ([\#12265](https://github.com/matrix-org/synapse/issues/12265), [\#12260](https://github.com/matrix-org/synapse/issues/12260)) - - -Synapse 1.55.0rc1 (2022-03-15) -============================== - -Features --------- - -- Add third-party rules callbacks `check_can_shutdown_room` and `check_can_deactivate_user`. ([\#12028](https://github.com/matrix-org/synapse/issues/12028)) -- Improve performance of logging in for large accounts. ([\#12132](https://github.com/matrix-org/synapse/issues/12132)) -- Add experimental env var `SYNAPSE_ASYNC_IO_REACTOR` that causes Synapse to use the asyncio reactor for Twisted. ([\#12135](https://github.com/matrix-org/synapse/issues/12135)) -- Support the stable identifiers from [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440): threads. ([\#12151](https://github.com/matrix-org/synapse/issues/12151)) -- Add a new Jinja2 template filter to extract the local part of an email address. ([\#12212](https://github.com/matrix-org/synapse/issues/12212)) - - -Bugfixes --------- - -- Use the proper serialization format for bundled thread aggregations. The bug has existed since Synapse 1.48.0. ([\#12090](https://github.com/matrix-org/synapse/issues/12090)) -- Fix a long-standing bug when redacting events with relations. ([\#12113](https://github.com/matrix-org/synapse/issues/12113), [\#12121](https://github.com/matrix-org/synapse/issues/12121), [\#12130](https://github.com/matrix-org/synapse/issues/12130), [\#12189](https://github.com/matrix-org/synapse/issues/12189)) -- Fix a bug introduced in Synapse 1.7.2 whereby background updates are never run with the default background batch size. ([\#12157](https://github.com/matrix-org/synapse/issues/12157)) -- Fix a bug where non-standard information was returned from the `/hierarchy` API. Introduced in Synapse 1.41.0. ([\#12175](https://github.com/matrix-org/synapse/issues/12175)) -- Fix a bug introduced in Synapse 1.54.0 that broke background updates on sqlite homeservers while search was disabled. ([\#12215](https://github.com/matrix-org/synapse/issues/12215)) -- Fix a long-standing bug when a `filter` argument with `event_fields` which did not include the `unsigned` field could result in a 500 error on `/sync`. ([\#12234](https://github.com/matrix-org/synapse/issues/12234)) - - -Improved Documentation ----------------------- - -- Fix complexity checking config example in [Resource Constrained Devices](https://matrix-org.github.io/synapse/v1.54/other/running_synapse_on_single_board_computers.html) docs page. ([\#11998](https://github.com/matrix-org/synapse/issues/11998)) -- Improve documentation for demo scripts. ([\#12143](https://github.com/matrix-org/synapse/issues/12143)) -- Updates to the Room DAG concepts development document. ([\#12179](https://github.com/matrix-org/synapse/issues/12179)) -- Document that the `typing`, `to_device`, `account_data`, `receipts`, and `presence` stream writer can only be used on a single worker. ([\#12196](https://github.com/matrix-org/synapse/issues/12196)) -- Document that contributors can sign off privately by email. ([\#12204](https://github.com/matrix-org/synapse/issues/12204)) - - -Deprecations and Removals -------------------------- - -- **Remove workaround introduced in Synapse 1.50.0 for Mjolnir compatibility. Breaks compatibility with Mjolnir 1.3.1 and earlier. ([\#11700](https://github.com/matrix-org/synapse/issues/11700))** -- **`synctl` has been moved into into `synapse._scripts` and is exposed as an entry point; see [upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#synctl-script-has-been-moved). ([\#12140](https://github.com/matrix-org/synapse/issues/12140)) -- Remove backwards compatibilty with pagination tokens from the `/relations` and `/aggregations` endpoints generated from Synapse < v1.52.0. ([\#12138](https://github.com/matrix-org/synapse/issues/12138)) -- The groups/communities feature in Synapse has been deprecated. ([\#12200](https://github.com/matrix-org/synapse/issues/12200)) - - -Internal Changes ----------------- - -- Simplify the `ApplicationService` class' set of public methods related to interest checking. ([\#11915](https://github.com/matrix-org/synapse/issues/11915)) -- Add config settings for background update parameters. ([\#11980](https://github.com/matrix-org/synapse/issues/11980)) -- Correct type hints for txredis. ([\#12042](https://github.com/matrix-org/synapse/issues/12042)) -- Limit the size of `aggregation_key` on annotations. ([\#12101](https://github.com/matrix-org/synapse/issues/12101)) -- Add type hints to tests files. ([\#12108](https://github.com/matrix-org/synapse/issues/12108), [\#12146](https://github.com/matrix-org/synapse/issues/12146), [\#12207](https://github.com/matrix-org/synapse/issues/12207), [\#12208](https://github.com/matrix-org/synapse/issues/12208)) -- Move scripts to Synapse package and expose as setuptools entry points. ([\#12118](https://github.com/matrix-org/synapse/issues/12118)) -- Add support for cancellation to `ReadWriteLock`. ([\#12120](https://github.com/matrix-org/synapse/issues/12120)) -- Fix data validation to compare to lists, not sequences. ([\#12128](https://github.com/matrix-org/synapse/issues/12128)) -- Fix CI not attaching source distributions and wheels to the GitHub releases. ([\#12131](https://github.com/matrix-org/synapse/issues/12131)) -- Remove unused mocks from `test_typing`. ([\#12136](https://github.com/matrix-org/synapse/issues/12136)) -- Give `scripts-dev` scripts suffixes for neater CI config. ([\#12137](https://github.com/matrix-org/synapse/issues/12137)) -- Move the snapcraft configuration file to `contrib`. ([\#12142](https://github.com/matrix-org/synapse/issues/12142)) -- Enable [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) Complement tests in CI. ([\#12144](https://github.com/matrix-org/synapse/issues/12144)) -- Enable [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) Complement tests in CI. ([\#12145](https://github.com/matrix-org/synapse/issues/12145)) -- Add test for `ObservableDeferred`'s cancellation behaviour. ([\#12149](https://github.com/matrix-org/synapse/issues/12149)) -- Use `ParamSpec` in type hints for `synapse.logging.context`. ([\#12150](https://github.com/matrix-org/synapse/issues/12150)) -- Prune unused jobs from `tox` config. ([\#12152](https://github.com/matrix-org/synapse/issues/12152)) -- Move CI checks out of tox, to facilitate a move to using poetry. ([\#12153](https://github.com/matrix-org/synapse/issues/12153)) -- Avoid generating state groups for local out-of-band leaves. ([\#12154](https://github.com/matrix-org/synapse/issues/12154)) -- Avoid trying to calculate the state at outlier events. ([\#12155](https://github.com/matrix-org/synapse/issues/12155), [\#12173](https://github.com/matrix-org/synapse/issues/12173), [\#12202](https://github.com/matrix-org/synapse/issues/12202)) -- Fix some type annotations. ([\#12156](https://github.com/matrix-org/synapse/issues/12156)) -- Add type hints for `ObservableDeferred` attributes. ([\#12159](https://github.com/matrix-org/synapse/issues/12159)) -- Use a prebuilt Action for the `tests-done` CI job. ([\#12161](https://github.com/matrix-org/synapse/issues/12161)) -- Reduce number of DB queries made during processing of `/sync`. ([\#12163](https://github.com/matrix-org/synapse/issues/12163)) -- Add `delay_cancellation` utility function, which behaves like `stop_cancellation` but waits until the original `Deferred` resolves before raising a `CancelledError`. ([\#12180](https://github.com/matrix-org/synapse/issues/12180)) -- Retry HTTP replication failures, this should prevent 502's when restarting stateful workers (main, event persisters, stream writers). Contributed by Nick @ Beeper. ([\#12182](https://github.com/matrix-org/synapse/issues/12182)) -- Add cancellation support to `@cached` and `@cachedList` decorators. ([\#12183](https://github.com/matrix-org/synapse/issues/12183)) -- Remove unused variables. ([\#12187](https://github.com/matrix-org/synapse/issues/12187)) -- Add combined test for HTTP pusher and push rule. Contributed by Nick @ Beeper. ([\#12188](https://github.com/matrix-org/synapse/issues/12188)) -- Rename `HomeServer.get_tcp_replication` to `get_replication_command_handler`. ([\#12192](https://github.com/matrix-org/synapse/issues/12192)) -- Remove some dead code. ([\#12197](https://github.com/matrix-org/synapse/issues/12197)) -- Fix a misleading comment in the function `check_event_for_spam`. ([\#12203](https://github.com/matrix-org/synapse/issues/12203)) -- Remove unnecessary `pass` statements. ([\#12206](https://github.com/matrix-org/synapse/issues/12206)) -- Update the SSO username picker template to comply with SIWA guidelines. ([\#12210](https://github.com/matrix-org/synapse/issues/12210)) -- Improve code documentation for the typing stream over replication. ([\#12211](https://github.com/matrix-org/synapse/issues/12211)) - - -Synapse 1.54.0 (2022-03-08) -=========================== - -Please note that this will be the last release of Synapse that is compatible with Mjolnir 1.3.1 and earlier. -Administrators of servers which have the Mjolnir module installed are advised to upgrade Mjolnir to version 1.3.2 or later. - - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.54.0rc1 preventing the new module callbacks introduced in this release from being registered by modules. ([\#12141](https://github.com/matrix-org/synapse/issues/12141)) -- Fix a bug introduced in Synapse 1.54.0rc1 where runtime dependency version checks would mistakenly check development dependencies if they were present and would not accept pre-release versions of dependencies. ([\#12129](https://github.com/matrix-org/synapse/issues/12129), [\#12177](https://github.com/matrix-org/synapse/issues/12177)) - - -Internal Changes ----------------- - -- Update release script to insert the previous version when writing "No significant changes" line in the changelog. ([\#12127](https://github.com/matrix-org/synapse/issues/12127)) -- Relax the version guard for "packaging" added in [\#12088](https://github.com/matrix-org/synapse/issues/12088). ([\#12166](https://github.com/matrix-org/synapse/issues/12166)) - - -Synapse 1.54.0rc1 (2022-03-02) -============================== - - -Features --------- - -- Add support for [MSC3202](https://github.com/matrix-org/matrix-doc/pull/3202): sending one-time key counts and fallback key usage states to Application Services. ([\#11617](https://github.com/matrix-org/synapse/issues/11617)) -- Improve the generated URL previews for some web pages. Contributed by @AndrewRyanChama. ([\#11985](https://github.com/matrix-org/synapse/issues/11985)) -- Track cache invalidations in Prometheus metrics, as already happens for cache eviction based on size or time. ([\#12000](https://github.com/matrix-org/synapse/issues/12000)) -- Implement experimental support for [MSC3720](https://github.com/matrix-org/matrix-doc/pull/3720) (account status endpoints). ([\#12001](https://github.com/matrix-org/synapse/issues/12001), [\#12067](https://github.com/matrix-org/synapse/issues/12067)) -- Enable modules to set a custom display name when registering a user. ([\#12009](https://github.com/matrix-org/synapse/issues/12009)) -- Advertise Matrix 1.1 and 1.2 support on `/_matrix/client/versions`. ([\#12020](https://github.com/matrix-org/synapse/issues/12020), ([\#12022](https://github.com/matrix-org/synapse/issues/12022)) -- Support only the stable identifier for [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069)'s `is_guest` on `/_matrix/client/v3/account/whoami`. ([\#12021](https://github.com/matrix-org/synapse/issues/12021)) -- Use room version 9 as the default room version (per [MSC3589](https://github.com/matrix-org/matrix-doc/pull/3589)). ([\#12058](https://github.com/matrix-org/synapse/issues/12058)) -- Add module callbacks to react to user deactivation status changes (i.e. deactivations and reactivations) and profile updates. ([\#12062](https://github.com/matrix-org/synapse/issues/12062)) - - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.48.0 where an edit of the latest event in a thread would not be properly applied to the thread summary. ([\#11992](https://github.com/matrix-org/synapse/issues/11992)) -- Fix long-standing bug where the `get_rooms_for_user` cache was not correctly invalidated for remote users when the server left a room. ([\#11999](https://github.com/matrix-org/synapse/issues/11999)) -- Fix a 500 error with Postgres when looking backwards with the [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) `/timestamp_to_event?dir=b` endpoint. ([\#12024](https://github.com/matrix-org/synapse/issues/12024)) -- Properly fix a long-standing bug where wrong data could be inserted into the `event_search` table when using SQLite. This could block running `synapse_port_db` with an `argument of type 'int' is not iterable` error. This bug was partially fixed by a change in Synapse 1.44.0. ([\#12037](https://github.com/matrix-org/synapse/issues/12037)) -- Fix slow performance of `/logout` in some cases where refresh tokens are in use. The slowness existed since the initial implementation of refresh tokens in version 1.38.0. ([\#12056](https://github.com/matrix-org/synapse/issues/12056)) -- Fix a long-standing bug where Synapse would make additional failing requests over federation for missing data. ([\#12077](https://github.com/matrix-org/synapse/issues/12077)) -- Fix occasional `Unhandled error in Deferred` error message. ([\#12089](https://github.com/matrix-org/synapse/issues/12089)) -- Fix a bug introduced in Synapse 1.51.0 where incoming federation transactions containing at least one EDU would be dropped if debug logging was enabled for `synapse.8631_debug`. ([\#12098](https://github.com/matrix-org/synapse/issues/12098)) -- Fix a long-standing bug which could cause push notifications to malfunction if `use_frozen_dicts` was set in the configuration. ([\#12100](https://github.com/matrix-org/synapse/issues/12100)) -- Fix an extremely rare, long-standing bug in `ReadWriteLock` that would cause an error when a newly unblocked writer completes instantly. ([\#12105](https://github.com/matrix-org/synapse/issues/12105)) -- Make a `POST` to `/rooms//receipt/m.read/` only trigger a push notification if the count of unread messages is different to the one in the last successfully sent push. This reduces server load and load on the receiving device. ([\#11835](https://github.com/matrix-org/synapse/issues/11835)) - - -Updates to the Docker image ---------------------------- - -- The Docker image no longer automatically creates a temporary volume at `/data`. This is not expected to affect normal usage. ([\#11997](https://github.com/matrix-org/synapse/issues/11997)) -- Use Python 3.9 in Docker images by default. ([\#12112](https://github.com/matrix-org/synapse/issues/12112)) - - -Improved Documentation ----------------------- - -- Document support for the `to_device`, `account_data`, `receipts`, and `presence` stream writers for workers. ([\#11599](https://github.com/matrix-org/synapse/issues/11599)) -- Explain the meaning of spam checker callbacks' return values. ([\#12003](https://github.com/matrix-org/synapse/issues/12003)) -- Clarify information about external Identity Provider IDs. ([\#12004](https://github.com/matrix-org/synapse/issues/12004)) - - -Deprecations and Removals -------------------------- - -- Deprecate using `synctl` with the config option `synctl_cache_factor` and print a warning if a user still uses this option. ([\#11865](https://github.com/matrix-org/synapse/issues/11865)) -- Remove support for the legacy structured logging configuration (please see the the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#legacy-structured-logging-configuration-removal) if you are using `structured: true` in the Synapse configuration). ([\#12008](https://github.com/matrix-org/synapse/issues/12008)) -- Drop support for [MSC3283](https://github.com/matrix-org/matrix-doc/pull/3283) unstable flags now that the stable flags are supported. ([\#12018](https://github.com/matrix-org/synapse/issues/12018)) -- Remove the unstable `/spaces` endpoint from [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946). ([\#12073](https://github.com/matrix-org/synapse/issues/12073)) - - -Internal Changes ----------------- - -- Make the `get_room_version` method use `get_room_version_id` to benefit from caching. ([\#11808](https://github.com/matrix-org/synapse/issues/11808)) -- Remove unnecessary condition on knock -> leave auth rule check. ([\#11900](https://github.com/matrix-org/synapse/issues/11900)) -- Add tests for device list changes between local users. ([\#11972](https://github.com/matrix-org/synapse/issues/11972)) -- Optimise calculating `device_list` changes in `/sync`. ([\#11974](https://github.com/matrix-org/synapse/issues/11974)) -- Add missing type hints to storage classes. ([\#11984](https://github.com/matrix-org/synapse/issues/11984)) -- Refactor the search code for improved readability. ([\#11991](https://github.com/matrix-org/synapse/issues/11991)) -- Move common deduplication code down into `_auth_and_persist_outliers`. ([\#11994](https://github.com/matrix-org/synapse/issues/11994)) -- Limit concurrent joins from applications services. ([\#11996](https://github.com/matrix-org/synapse/issues/11996)) -- Preparation for faster-room-join work: when parsing the `send_join` response, get the `m.room.create` event from `state`, not `auth_chain`. ([\#12005](https://github.com/matrix-org/synapse/issues/12005), [\#12039](https://github.com/matrix-org/synapse/issues/12039)) -- Preparation for faster-room-join work: parse MSC3706 fields in send_join response. ([\#12011](https://github.com/matrix-org/synapse/issues/12011)) -- Preparation for faster-room-join work: persist information on which events and rooms have partial state to the database. ([\#12012](https://github.com/matrix-org/synapse/issues/12012)) -- Preparation for faster-room-join work: Support for calling `/federation/v1/state` on a remote server. ([\#12013](https://github.com/matrix-org/synapse/issues/12013)) -- Configure `tox` to use `venv` rather than `virtualenv`. ([\#12015](https://github.com/matrix-org/synapse/issues/12015)) -- Fix bug in `StateFilter.return_expanded()` and add some tests. ([\#12016](https://github.com/matrix-org/synapse/issues/12016)) -- Use Matrix v1.1 endpoints (`/_matrix/client/v3/auth/...`) in fallback auth HTML forms. ([\#12019](https://github.com/matrix-org/synapse/issues/12019)) -- Update the `olddeps` CI job to use an old version of `markupsafe`. ([\#12025](https://github.com/matrix-org/synapse/issues/12025)) -- Upgrade Mypy to version 0.931. ([\#12030](https://github.com/matrix-org/synapse/issues/12030)) -- Remove legacy `HomeServer.get_datastore()`. ([\#12031](https://github.com/matrix-org/synapse/issues/12031), [\#12070](https://github.com/matrix-org/synapse/issues/12070)) -- Minor typing fixes. ([\#12034](https://github.com/matrix-org/synapse/issues/12034), [\#12069](https://github.com/matrix-org/synapse/issues/12069)) -- After joining a room, create a dedicated logcontext to process the queued events. ([\#12041](https://github.com/matrix-org/synapse/issues/12041)) -- Tidy up GitHub Actions config which builds distributions for PyPI. ([\#12051](https://github.com/matrix-org/synapse/issues/12051)) -- Move configuration out of `setup.cfg`. ([\#12052](https://github.com/matrix-org/synapse/issues/12052), [\#12059](https://github.com/matrix-org/synapse/issues/12059)) -- Fix error message when a worker process fails to talk to another worker process. ([\#12060](https://github.com/matrix-org/synapse/issues/12060)) -- Fix using the `complement.sh` script without specifying a directory or a branch. Contributed by Nico on behalf of Famedly. ([\#12063](https://github.com/matrix-org/synapse/issues/12063)) -- Add type hints to `tests/rest/client`. ([\#12066](https://github.com/matrix-org/synapse/issues/12066), [\#12072](https://github.com/matrix-org/synapse/issues/12072), [\#12084](https://github.com/matrix-org/synapse/issues/12084), [\#12094](https://github.com/matrix-org/synapse/issues/12094)) -- Add some logging to `/sync` to try and track down #11916. ([\#12068](https://github.com/matrix-org/synapse/issues/12068)) -- Inspect application dependencies using `importlib.metadata` or its backport. ([\#12088](https://github.com/matrix-org/synapse/issues/12088)) -- Use `assertEqual` instead of the deprecated `assertEquals` in test code. ([\#12092](https://github.com/matrix-org/synapse/issues/12092)) -- Move experimental support for [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440) to `/versions`. ([\#12099](https://github.com/matrix-org/synapse/issues/12099)) -- Add `stop_cancellation` utility function to stop `Deferred`s from being cancelled. ([\#12106](https://github.com/matrix-org/synapse/issues/12106)) -- Improve exception handling for concurrent execution. ([\#12109](https://github.com/matrix-org/synapse/issues/12109)) -- Advertise support for Python 3.10 in packaging files. ([\#12111](https://github.com/matrix-org/synapse/issues/12111)) -- Move CI checks out of tox, to facilitate a move to using poetry. ([\#12119](https://github.com/matrix-org/synapse/issues/12119)) - - -Synapse 1.53.0 (2022-02-22) -=========================== - -No significant changes since 1.53.0rc1. - - -Synapse 1.53.0rc1 (2022-02-15) -============================== - -Features --------- - -- Add experimental support for sending to-device messages to application services, as specified by [MSC2409](https://github.com/matrix-org/matrix-doc/pull/2409). ([\#11215](https://github.com/matrix-org/synapse/issues/11215), [\#11966](https://github.com/matrix-org/synapse/issues/11966)) -- Add a background database update to purge account data for deactivated users. ([\#11655](https://github.com/matrix-org/synapse/issues/11655)) -- Experimental support for [MSC3666](https://github.com/matrix-org/matrix-doc/pull/3666): including bundled aggregations in server side search results. ([\#11837](https://github.com/matrix-org/synapse/issues/11837)) -- Enable cache time-based expiry by default. The `expiry_time` config flag has been superseded by `expire_caches` and `cache_entry_ttl`. ([\#11849](https://github.com/matrix-org/synapse/issues/11849)) -- Add a callback to allow modules to allow or forbid a 3PID (email address, phone number) from being associated to a local account. ([\#11854](https://github.com/matrix-org/synapse/issues/11854)) -- Stabilize support and remove unstable endpoints for [MSC3231](https://github.com/matrix-org/matrix-doc/pull/3231). Clients must switch to the stable identifier and endpoint. See the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#stablisation-of-msc3231) for more information. ([\#11867](https://github.com/matrix-org/synapse/issues/11867)) -- Allow modules to retrieve the current instance's server name and worker name. ([\#11868](https://github.com/matrix-org/synapse/issues/11868)) -- Use a dedicated configurable rate limiter for 3PID invites. ([\#11892](https://github.com/matrix-org/synapse/issues/11892)) -- Support the stable API endpoint for [MSC3283](https://github.com/matrix-org/matrix-doc/pull/3283): new settings in `/capabilities` endpoint. ([\#11933](https://github.com/matrix-org/synapse/issues/11933), [\#11989](https://github.com/matrix-org/synapse/issues/11989)) -- Support the `dir` parameter on the `/relations` endpoint, per [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715). ([\#11941](https://github.com/matrix-org/synapse/issues/11941)) -- Experimental implementation of [MSC3706](https://github.com/matrix-org/matrix-doc/pull/3706): extensions to `/send_join` to support reduced response size. ([\#11967](https://github.com/matrix-org/synapse/issues/11967)) - - -Bugfixes --------- - -- Fix [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) historical messages backfilling in random order on remote homeservers. ([\#11114](https://github.com/matrix-org/synapse/issues/11114)) -- Fix a bug introduced in Synapse 1.51.0 where incoming federation transactions containing at least one EDU would be dropped if debug logging was enabled for `synapse.8631_debug`. ([\#11890](https://github.com/matrix-org/synapse/issues/11890)) -- Fix a long-standing bug where some unknown endpoints would return HTML error pages instead of JSON `M_UNRECOGNIZED` errors. ([\#11930](https://github.com/matrix-org/synapse/issues/11930)) -- Implement an allow list of content types for which we will attempt to preview a URL. This prevents Synapse from making useless longer-lived connections to streaming media servers. ([\#11936](https://github.com/matrix-org/synapse/issues/11936)) -- Fix a long-standing bug where pagination tokens from `/sync` and `/messages` could not be provided to the `/relations` API. ([\#11952](https://github.com/matrix-org/synapse/issues/11952)) -- Require that modules register their callbacks using keyword arguments. ([\#11975](https://github.com/matrix-org/synapse/issues/11975)) -- Fix a long-standing bug where `M_WRONG_ROOM_KEYS_VERSION` errors would not include the specced `current_version` field. ([\#11988](https://github.com/matrix-org/synapse/issues/11988)) - - -Improved Documentation ----------------------- - -- Fix typo in User Admin API: unpind -> unbind. ([\#11859](https://github.com/matrix-org/synapse/issues/11859)) -- Document images returned by the User List Media Admin API can include those generated by URL previews. ([\#11862](https://github.com/matrix-org/synapse/issues/11862)) -- Remove outdated MSC1711 FAQ document. ([\#11907](https://github.com/matrix-org/synapse/issues/11907)) -- Correct the structured logging configuration example. Contributed by Brad Jones. ([\#11946](https://github.com/matrix-org/synapse/issues/11946)) -- Add information on the Synapse release cycle. ([\#11954](https://github.com/matrix-org/synapse/issues/11954)) -- Fix broken link in the README to the admin API for password reset. ([\#11955](https://github.com/matrix-org/synapse/issues/11955)) - - -Deprecations and Removals -------------------------- - -- Drop support for `webclient` listeners and configuring `web_client_location` to a non-HTTP(S) URL. Deprecated configurations are a configuration error. ([\#11895](https://github.com/matrix-org/synapse/issues/11895)) -- Remove deprecated `user_may_create_room_with_invites` spam checker callback. See the [upgrade notes](https://matrix-org.github.io/synapse/latest/upgrade.html#removal-of-user_may_create_room_with_invites) for more information. ([\#11950](https://github.com/matrix-org/synapse/issues/11950)) -- No longer build `.deb` packages for Ubuntu 21.04 Hirsute Hippo, which has now EOLed. ([\#11961](https://github.com/matrix-org/synapse/issues/11961)) - - -Internal Changes ----------------- - -- Enhance user registration test helpers to make them more useful for tests involving application services and devices. ([\#11615](https://github.com/matrix-org/synapse/issues/11615), [\#11616](https://github.com/matrix-org/synapse/issues/11616)) -- Improve performance when fetching bundled aggregations for multiple events. ([\#11660](https://github.com/matrix-org/synapse/issues/11660), [\#11752](https://github.com/matrix-org/synapse/issues/11752)) -- Fix type errors introduced by new annotations in the Prometheus Client library. ([\#11832](https://github.com/matrix-org/synapse/issues/11832)) -- Add missing type hints to replication code. ([\#11856](https://github.com/matrix-org/synapse/issues/11856), [\#11938](https://github.com/matrix-org/synapse/issues/11938)) -- Ensure that `opentracing` scopes are activated and closed at the right time. ([\#11869](https://github.com/matrix-org/synapse/issues/11869)) -- Improve opentracing for incoming federation requests. ([\#11870](https://github.com/matrix-org/synapse/issues/11870)) -- Improve internal docstrings in `synapse.util.caches`. ([\#11876](https://github.com/matrix-org/synapse/issues/11876)) -- Do not needlessly clear the `get_users_in_room` and `get_users_in_room_with_profiles` caches when any room state changes. ([\#11878](https://github.com/matrix-org/synapse/issues/11878)) -- Convert `ApplicationServiceTestCase` to use `simple_async_mock`. ([\#11880](https://github.com/matrix-org/synapse/issues/11880)) -- Remove experimental changes to the default push rules which were introduced in Synapse 1.19.0 but never enabled. ([\#11884](https://github.com/matrix-org/synapse/issues/11884)) -- Disable coverage calculation for olddeps build. ([\#11888](https://github.com/matrix-org/synapse/issues/11888)) -- Preparation to support sending device list updates to application services. ([\#11905](https://github.com/matrix-org/synapse/issues/11905)) -- Add a test that checks users receive their own device list updates down `/sync`. ([\#11909](https://github.com/matrix-org/synapse/issues/11909)) -- Run Complement tests sequentially. ([\#11910](https://github.com/matrix-org/synapse/issues/11910)) -- Various refactors to the application service notifier code. ([\#11911](https://github.com/matrix-org/synapse/issues/11911), [\#11912](https://github.com/matrix-org/synapse/issues/11912)) -- Tests: replace mocked `Authenticator` with the real thing. ([\#11913](https://github.com/matrix-org/synapse/issues/11913)) -- Various refactors to the typing notifications code. ([\#11914](https://github.com/matrix-org/synapse/issues/11914)) -- Use the proper type for the `Content-Length` header in the `UploadResource`. ([\#11927](https://github.com/matrix-org/synapse/issues/11927)) -- Remove an unnecessary ignoring of type hints due to fixes in upstream packages. ([\#11939](https://github.com/matrix-org/synapse/issues/11939)) -- Add missing type hints. ([\#11953](https://github.com/matrix-org/synapse/issues/11953)) -- Fix an import cycle in `synapse.event_auth`. ([\#11965](https://github.com/matrix-org/synapse/issues/11965)) -- Unpin `frozendict` but exclude the known bad version 2.1.2. ([\#11969](https://github.com/matrix-org/synapse/issues/11969)) -- Prepare for rename of default Complement branch. ([\#11971](https://github.com/matrix-org/synapse/issues/11971)) -- Fetch Synapse's version using a helper from `matrix-common`. ([\#11979](https://github.com/matrix-org/synapse/issues/11979)) - - -Synapse 1.52.0 (2022-02-08) -=========================== - -No significant changes since 1.52.0rc1. - -Note that [Twisted 22.1.0](https://github.com/twisted/twisted/releases/tag/twisted-22.1.0) -has recently been released, which fixes a [security issue](https://github.com/twisted/twisted/security/advisories/GHSA-92x2-jw7w-xvvx) -within the Twisted library. We do not believe Synapse is affected by this vulnerability, -though we advise server administrators who installed Synapse via pip to upgrade Twisted -with `pip install --upgrade Twisted treq` as a matter of good practice. The Docker image -`matrixdotorg/synapse` and the Debian packages from `packages.matrix.org` are using the -updated library. - - -Synapse 1.52.0rc1 (2022-02-01) -============================== - -Features --------- - -- Remove account data (including client config, push rules and ignored users) upon user deactivation. ([\#11621](https://github.com/matrix-org/synapse/issues/11621), [\#11788](https://github.com/matrix-org/synapse/issues/11788), [\#11789](https://github.com/matrix-org/synapse/issues/11789)) -- Add an admin API to reset connection timeouts for remote server. ([\#11639](https://github.com/matrix-org/synapse/issues/11639)) -- Add an admin API to get a list of rooms that federate with a given remote homeserver. ([\#11658](https://github.com/matrix-org/synapse/issues/11658)) -- Add a config flag to inhibit `M_USER_IN_USE` during registration. ([\#11743](https://github.com/matrix-org/synapse/issues/11743)) -- Add a module callback to set username at registration. ([\#11790](https://github.com/matrix-org/synapse/issues/11790)) -- Allow configuring a maximum file size as well as a list of allowed content types for avatars. ([\#11846](https://github.com/matrix-org/synapse/issues/11846)) - - -Bugfixes --------- - -- Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11612](https://github.com/matrix-org/synapse/issues/11612)) -- Fix a long-standing bug when previewing Reddit URLs which do not contain an image. ([\#11767](https://github.com/matrix-org/synapse/issues/11767)) -- Fix a long-standing bug that media streams could cause long-lived connections when generating URL previews. ([\#11784](https://github.com/matrix-org/synapse/issues/11784)) -- Include a `prev_content` field in state events sent to Application Services. Contributed by @totallynotvaishnav. ([\#11798](https://github.com/matrix-org/synapse/issues/11798)) -- Fix a bug introduced in Synapse 0.33.3 causing requests to sometimes log strings such as `HTTPStatus.OK` instead of integer status codes. ([\#11827](https://github.com/matrix-org/synapse/issues/11827)) - - -Improved Documentation ----------------------- - -- Update pypi installation docs to indicate that we now support Python 3.10. ([\#11820](https://github.com/matrix-org/synapse/issues/11820)) -- Add missing steps to the contribution submission process in the documentation. Contributed by @sequentialread. ([\#11821](https://github.com/matrix-org/synapse/issues/11821)) -- Remove not needed old table of contents in documentation. ([\#11860](https://github.com/matrix-org/synapse/issues/11860)) -- Consolidate the `access_token` information at the top of each relevant page in the Admin API documentation. ([\#11861](https://github.com/matrix-org/synapse/issues/11861)) - - -Deprecations and Removals -------------------------- - -- Drop support for Python 3.6, which is EOL. ([\#11683](https://github.com/matrix-org/synapse/issues/11683)) -- Remove the `experimental_msc1849_support_enabled` flag as the features are now stable. ([\#11843](https://github.com/matrix-org/synapse/issues/11843)) - - -Internal Changes ----------------- - -- Preparation for database schema simplifications: add `state_key` and `rejection_reason` columns to `events` table. ([\#11792](https://github.com/matrix-org/synapse/issues/11792)) -- Add `FrozenEvent.get_state_key` and use it in a couple of places. ([\#11793](https://github.com/matrix-org/synapse/issues/11793)) -- Preparation for database schema simplifications: stop reading from `event_reference_hashes`. ([\#11794](https://github.com/matrix-org/synapse/issues/11794)) -- Drop unused table `public_room_list_stream`. ([\#11795](https://github.com/matrix-org/synapse/issues/11795)) -- Preparation for reducing Postgres serialization errors: allow setting transaction isolation level. Contributed by Nick @ Beeper. ([\#11799](https://github.com/matrix-org/synapse/issues/11799), [\#11847](https://github.com/matrix-org/synapse/issues/11847)) -- Docker: skip the initial amd64-only build and go straight to multiarch. ([\#11810](https://github.com/matrix-org/synapse/issues/11810)) -- Run Complement on the Github Actions VM and not inside a Docker container. ([\#11811](https://github.com/matrix-org/synapse/issues/11811)) -- Log module names at startup. ([\#11813](https://github.com/matrix-org/synapse/issues/11813)) -- Improve type safety of bundled aggregations code. ([\#11815](https://github.com/matrix-org/synapse/issues/11815)) -- Correct a type annotation in the event validation logic. ([\#11817](https://github.com/matrix-org/synapse/issues/11817), [\#11830](https://github.com/matrix-org/synapse/issues/11830)) -- Minor updates and documentation for database schema delta files. ([\#11823](https://github.com/matrix-org/synapse/issues/11823)) -- Workaround a type annotation problem in `prometheus_client` 0.13.0. ([\#11834](https://github.com/matrix-org/synapse/issues/11834)) -- Minor performance improvement in room state lookup. ([\#11836](https://github.com/matrix-org/synapse/issues/11836)) -- Fix some indentation inconsistencies in the sample config. ([\#11838](https://github.com/matrix-org/synapse/issues/11838)) -- Add type hints to `tests/rest/admin`. ([\#11851](https://github.com/matrix-org/synapse/issues/11851)) - - -Synapse 1.51.0 (2022-01-25) -=========================== - -No significant changes since 1.51.0rc2. - -Synapse 1.51.0 deprecates `webclient` listeners and non-HTTP(S) `web_client_location`s. Support for these will be removed in Synapse 1.53.0, at which point Synapse will not be capable of directly serving a web client for Matrix. See the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#upgrading-to-v1510). - -Synapse 1.51.0rc2 (2022-01-24) -============================== - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.40.0 that caused Synapse to fail to process incoming federation traffic after handling a large amount of events in a v1 room. ([\#11806](https://github.com/matrix-org/synapse/issues/11806)) - - -Synapse 1.50.2 (2022-01-24) -=========================== - -This release includes the same bugfix as Synapse 1.51.0rc2. - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.40.0 that caused Synapse to fail to process incoming federation traffic after handling a large amount of events in a v1 room. ([\#11806](https://github.com/matrix-org/synapse/issues/11806)) - - -Synapse 1.51.0rc1 (2022-01-21) -============================== - -Features --------- - -- Add `track_puppeted_user_ips` config flag to record client IP addresses against puppeted users, and include the puppeted users in monthly active user counts. ([\#11561](https://github.com/matrix-org/synapse/issues/11561), [\#11749](https://github.com/matrix-org/synapse/issues/11749), [\#11757](https://github.com/matrix-org/synapse/issues/11757)) -- Include whether the requesting user has participated in a thread when generating a summary for [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440). ([\#11577](https://github.com/matrix-org/synapse/issues/11577)) -- Return an `M_FORBIDDEN` error code instead of `M_UNKNOWN` when a spam checker module prevents a user from creating a room. ([\#11672](https://github.com/matrix-org/synapse/issues/11672)) -- Add a flag to the `synapse_review_recent_signups` script to ignore and filter appservice users. ([\#11675](https://github.com/matrix-org/synapse/issues/11675), [\#11770](https://github.com/matrix-org/synapse/issues/11770)) - - -Bugfixes --------- - -- Fix a long-standing issue which could cause Synapse to incorrectly accept data in the unsigned field of events - received over federation. ([\#11530](https://github.com/matrix-org/synapse/issues/11530)) -- Fix a long-standing bug where Synapse wouldn't cache a response indicating that a remote user has no devices. ([\#11587](https://github.com/matrix-org/synapse/issues/11587)) -- Fix an error that occurs whilst trying to get the federation status of a destination server that was working normally. This admin API was newly introduced in Synapse 1.49.0. ([\#11593](https://github.com/matrix-org/synapse/issues/11593)) -- Fix bundled aggregations not being included in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11612](https://github.com/matrix-org/synapse/issues/11612), [\#11659](https://github.com/matrix-org/synapse/issues/11659), [\#11791](https://github.com/matrix-org/synapse/issues/11791)) -- Fix the `/_matrix/client/v1/room/{roomId}/hierarchy` endpoint returning incorrect fields which have been present since Synapse 1.49.0. ([\#11667](https://github.com/matrix-org/synapse/issues/11667)) -- Fix preview of some GIF URLs (like tenor.com). Contributed by Philippe Daouadi. ([\#11669](https://github.com/matrix-org/synapse/issues/11669)) -- Fix a bug where only the first 50 rooms from a space were returned from the `/hierarchy` API. This has existed since the introduction of the API in Synapse 1.41.0. ([\#11695](https://github.com/matrix-org/synapse/issues/11695)) -- Fix a bug introduced in Synapse 1.18.0 where password reset and address validation emails would not be sent if their subject was configured to use the 'app' template variable. Contributed by @br4nnigan. ([\#11710](https://github.com/matrix-org/synapse/issues/11710), [\#11745](https://github.com/matrix-org/synapse/issues/11745)) -- Make the 'List Rooms' Admin API sort stable. Contributed by Daniël Sonck. ([\#11737](https://github.com/matrix-org/synapse/issues/11737)) -- Fix a long-standing bug where space hierarchy over federation would only work correctly some of the time. ([\#11775](https://github.com/matrix-org/synapse/issues/11775)) -- Fix a bug introduced in Synapse 1.46.0 that prevented `on_logged_out` module callbacks from being correctly awaited by Synapse. ([\#11786](https://github.com/matrix-org/synapse/issues/11786)) - - -Improved Documentation ----------------------- - -- Warn against using a Let's Encrypt certificate for TLS/DTLS TURN server client connections, and suggest using ZeroSSL certificate instead. This works around client-side connectivity errors caused by WebRTC libraries that reject Let's Encrypt certificates. Contibuted by @AndrewFerr. ([\#11686](https://github.com/matrix-org/synapse/issues/11686)) -- Document the new `SYNAPSE_TEST_PERSIST_SQLITE_DB` environment variable in the contributing guide. ([\#11715](https://github.com/matrix-org/synapse/issues/11715)) -- Document that the minimum supported PostgreSQL version is now 10. ([\#11725](https://github.com/matrix-org/synapse/issues/11725)) -- Fix typo in demo docs: differnt. ([\#11735](https://github.com/matrix-org/synapse/issues/11735)) -- Update room spec URL in config files. ([\#11739](https://github.com/matrix-org/synapse/issues/11739)) -- Mention `python3-venv` and `libpq-dev` dependencies in the contribution guide. ([\#11740](https://github.com/matrix-org/synapse/issues/11740)) -- Update documentation for configuring login with Facebook. ([\#11755](https://github.com/matrix-org/synapse/issues/11755)) -- Update installation instructions to note that Python 3.6 is no longer supported. ([\#11781](https://github.com/matrix-org/synapse/issues/11781)) - - -Deprecations and Removals -------------------------- - -- Remove the unstable `/send_relation` endpoint. ([\#11682](https://github.com/matrix-org/synapse/issues/11682)) -- Remove `python_twisted_reactor_pending_calls` Prometheus metric. ([\#11724](https://github.com/matrix-org/synapse/issues/11724)) -- Remove the `password_hash` field from the response dictionaries of the [Users Admin API](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html). ([\#11576](https://github.com/matrix-org/synapse/issues/11576)) -- **Deprecate support for `webclient` listeners and non-HTTP(S) `web_client_location` configuration. ([\#11774](https://github.com/matrix-org/synapse/issues/11774), [\#11783](https://github.com/matrix-org/synapse/issues/11783))** - - -Internal Changes ----------------- - -- Run `pyupgrade --py37-plus --keep-percent-format` on Synapse. ([\#11685](https://github.com/matrix-org/synapse/issues/11685)) -- Use buildkit's cache feature to speed up docker builds. ([\#11691](https://github.com/matrix-org/synapse/issues/11691)) -- Use `auto_attribs` and native type hints for attrs classes. ([\#11692](https://github.com/matrix-org/synapse/issues/11692), [\#11768](https://github.com/matrix-org/synapse/issues/11768)) -- Remove debug logging for #4422, which has been closed since Synapse 0.99. ([\#11693](https://github.com/matrix-org/synapse/issues/11693)) -- Remove fallback code for Python 2. ([\#11699](https://github.com/matrix-org/synapse/issues/11699)) -- Add a test for [an edge case](https://github.com/matrix-org/synapse/pull/11532#discussion_r769104461) in the `/sync` logic. ([\#11701](https://github.com/matrix-org/synapse/issues/11701)) -- Add the option to write SQLite test dbs to disk when running tests. ([\#11702](https://github.com/matrix-org/synapse/issues/11702)) -- Improve Complement test output for Gitub Actions. ([\#11707](https://github.com/matrix-org/synapse/issues/11707)) -- Fix docstring on `add_account_data_for_user`. ([\#11716](https://github.com/matrix-org/synapse/issues/11716)) -- Complement environment variable name change and update `.gitignore`. ([\#11718](https://github.com/matrix-org/synapse/issues/11718)) -- Simplify calculation of Prometheus metrics for garbage collection. ([\#11723](https://github.com/matrix-org/synapse/issues/11723)) -- Improve accuracy of `python_twisted_reactor_tick_time` Prometheus metric. ([\#11724](https://github.com/matrix-org/synapse/issues/11724), [\#11771](https://github.com/matrix-org/synapse/issues/11771)) -- Minor efficiency improvements when inserting many values into the database. ([\#11742](https://github.com/matrix-org/synapse/issues/11742)) -- Invite PR authors to give themselves credit in the changelog. ([\#11744](https://github.com/matrix-org/synapse/issues/11744)) -- Add optional debugging to investigate [issue 8631](https://github.com/matrix-org/synapse/issues/8631). ([\#11760](https://github.com/matrix-org/synapse/issues/11760)) -- Remove `log_function` utility function and its uses. ([\#11761](https://github.com/matrix-org/synapse/issues/11761)) -- Add a unit test that checks both `client` and `webclient` resources will function when simultaneously enabled. ([\#11765](https://github.com/matrix-org/synapse/issues/11765)) -- Allow overriding complement commit using `COMPLEMENT_REF`. ([\#11766](https://github.com/matrix-org/synapse/issues/11766)) -- Add some comments and type annotations for `_update_outliers_txn`. ([\#11776](https://github.com/matrix-org/synapse/issues/11776)) - - -Synapse 1.50.1 (2022-01-18) -=========================== - -This release fixes a bug in Synapse 1.50.0 that could prevent clients from being able to connect to Synapse if the `webclient` resource was enabled. Further details are available in [this issue](https://github.com/matrix-org/synapse/issues/11763). - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.50.0rc1 that could cause Matrix clients to be unable to connect to Synapse instances with the `webclient` resource enabled. ([\#11764](https://github.com/matrix-org/synapse/issues/11764)) - - -Synapse 1.50.0 (2022-01-18) -=========================== - -**This release contains a critical bug that may prevent clients from being able to connect. -As such, it is not recommended to upgrade to 1.50.0. Instead, please upgrade straight to -to 1.50.1. Further details are available in [this issue](https://github.com/matrix-org/synapse/issues/11763).** - -Please note that we now only support Python 3.7+ and PostgreSQL 10+ (if applicable), because Python 3.6 and PostgreSQL 9.6 have reached end-of-life. - -No significant changes since 1.50.0rc2. - - -Synapse 1.50.0rc2 (2022-01-14) -============================== - -This release candidate fixes a federation-breaking regression introduced in Synapse 1.50.0rc1. - -Bugfixes --------- - -- Fix a bug introduced in Synapse 1.0.0 whereby some device list updates would not be sent to remote homeservers if there were too many to send at once. ([\#11729](https://github.com/matrix-org/synapse/issues/11729)) -- Fix a bug introduced in Synapse 1.50.0rc1 whereby outbound federation could fail because too many EDUs were produced for device updates. ([\#11730](https://github.com/matrix-org/synapse/issues/11730)) - - -Improved Documentation ----------------------- - -- Document that now the minimum supported PostgreSQL version is 10. ([\#11725](https://github.com/matrix-org/synapse/issues/11725)) - - -Internal Changes ----------------- - -- Fix a typechecker problem related to our (ab)use of `nacl.signing.SigningKey`s. ([\#11714](https://github.com/matrix-org/synapse/issues/11714)) - - -Synapse 1.50.0rc1 (2022-01-05) -============================== - - -Features --------- - -- Allow guests to send state events per [MSC3419](https://github.com/matrix-org/matrix-doc/pull/3419). ([\#11378](https://github.com/matrix-org/synapse/issues/11378)) -- Add experimental support for part of [MSC3202](https://github.com/matrix-org/matrix-doc/pull/3202): allowing application services to masquerade as specific devices. ([\#11538](https://github.com/matrix-org/synapse/issues/11538)) -- Add admin API to get users' account data. ([\#11664](https://github.com/matrix-org/synapse/issues/11664)) -- Include the room topic in the stripped state included with invites and knocking. ([\#11666](https://github.com/matrix-org/synapse/issues/11666)) -- Send and handle cross-signing messages using the stable prefix. ([\#10520](https://github.com/matrix-org/synapse/issues/10520)) -- Support unprefixed versions of fallback key property names. ([\#11541](https://github.com/matrix-org/synapse/issues/11541)) - - -Bugfixes --------- - -- Fix a long-standing bug where relations from other rooms could be included in the bundled aggregations of an event. ([\#11516](https://github.com/matrix-org/synapse/issues/11516)) -- Fix a long-standing bug which could cause `AssertionError`s to be written to the log when Synapse was restarted after purging events from the database. ([\#11536](https://github.com/matrix-org/synapse/issues/11536), [\#11642](https://github.com/matrix-org/synapse/issues/11642)) -- Fix a bug introduced in Synapse 1.17.0 where a pusher created for an email with capital letters would fail to be created. ([\#11547](https://github.com/matrix-org/synapse/issues/11547)) -- Fix a long-standing bug where responses included bundled aggregations when they should not, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11592](https://github.com/matrix-org/synapse/issues/11592), [\#11623](https://github.com/matrix-org/synapse/issues/11623)) -- Fix a long-standing bug that some unknown endpoints would return HTML error pages instead of JSON `M_UNRECOGNIZED` errors. ([\#11602](https://github.com/matrix-org/synapse/issues/11602)) -- Fix a bug introduced in Synapse 1.19.3 which could sometimes cause `AssertionError`s when backfilling rooms over federation. ([\#11632](https://github.com/matrix-org/synapse/issues/11632)) - - -Improved Documentation ----------------------- - -- Update Synapse install command for FreeBSD as the package is now prefixed with `py38`. Contributed by @itchychips. ([\#11267](https://github.com/matrix-org/synapse/issues/11267)) -- Document the usage of refresh tokens. ([\#11427](https://github.com/matrix-org/synapse/issues/11427)) -- Add details for how to configure a TURN server when behind a NAT. Contibuted by @AndrewFerr. ([\#11553](https://github.com/matrix-org/synapse/issues/11553)) -- Add references for using Postgres to the Docker documentation. ([\#11640](https://github.com/matrix-org/synapse/issues/11640)) -- Fix the documentation link in newly-generated configuration files. ([\#11678](https://github.com/matrix-org/synapse/issues/11678)) -- Correct the documentation for `nginx` to use a case-sensitive url pattern. Fixes an error introduced in v1.21.0. ([\#11680](https://github.com/matrix-org/synapse/issues/11680)) -- Clarify SSO mapping provider documentation by writing `def` or `async def` before the names of methods, as appropriate. ([\#11681](https://github.com/matrix-org/synapse/issues/11681)) - - -Deprecations and Removals -------------------------- - -- Replace `mock` package by its standard library version. ([\#11588](https://github.com/matrix-org/synapse/issues/11588)) -- Drop support for Python 3.6 and Ubuntu 18.04. ([\#11633](https://github.com/matrix-org/synapse/issues/11633)) - - -Internal Changes ----------------- - -- Allow specific, experimental events to be created without `prev_events`. Used by [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716). ([\#11243](https://github.com/matrix-org/synapse/issues/11243)) -- A test helper (`wait_for_background_updates`) no longer depends on classes defining a `store` property. ([\#11331](https://github.com/matrix-org/synapse/issues/11331)) -- Add type hints to `synapse.appservice`. ([\#11360](https://github.com/matrix-org/synapse/issues/11360)) -- Add missing type hints to `synapse.config` module. ([\#11480](https://github.com/matrix-org/synapse/issues/11480)) -- Add test to ensure we share the same `state_group` across the whole historical batch when using the [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send` endpoint. ([\#11487](https://github.com/matrix-org/synapse/issues/11487)) -- Refactor `tests.util.setup_test_homeserver` and `tests.server.setup_test_homeserver`. ([\#11503](https://github.com/matrix-org/synapse/issues/11503)) -- Move `glob_to_regex` and `re_word_boundary` to `matrix-python-common`. ([\#11505](https://github.com/matrix-org/synapse/issues/11505), [\#11687](https://github.com/matrix-org/synapse/issues/11687)) -- Use `HTTPStatus` constants in place of literals in `tests.rest.client.test_auth`. ([\#11520](https://github.com/matrix-org/synapse/issues/11520)) -- Add a receipt types constant for `m.read`. ([\#11531](https://github.com/matrix-org/synapse/issues/11531)) -- Clean up `synapse.rest.admin`. ([\#11535](https://github.com/matrix-org/synapse/issues/11535)) -- Add missing `errcode` to `parse_string` and `parse_boolean`. ([\#11542](https://github.com/matrix-org/synapse/issues/11542)) -- Use `HTTPStatus` constants in place of literals in `synapse.http`. ([\#11543](https://github.com/matrix-org/synapse/issues/11543)) -- Add missing type hints to storage classes. ([\#11546](https://github.com/matrix-org/synapse/issues/11546), [\#11549](https://github.com/matrix-org/synapse/issues/11549), [\#11551](https://github.com/matrix-org/synapse/issues/11551), [\#11555](https://github.com/matrix-org/synapse/issues/11555), [\#11575](https://github.com/matrix-org/synapse/issues/11575), [\#11589](https://github.com/matrix-org/synapse/issues/11589), [\#11594](https://github.com/matrix-org/synapse/issues/11594), [\#11652](https://github.com/matrix-org/synapse/issues/11652), [\#11653](https://github.com/matrix-org/synapse/issues/11653), [\#11654](https://github.com/matrix-org/synapse/issues/11654), [\#11657](https://github.com/matrix-org/synapse/issues/11657)) -- Fix an inaccurate and misleading comment in the `/sync` code. ([\#11550](https://github.com/matrix-org/synapse/issues/11550)) -- Add missing type hints to `synapse.logging.context`. ([\#11556](https://github.com/matrix-org/synapse/issues/11556)) -- Stop populating unused database column `state_events.prev_state`. ([\#11558](https://github.com/matrix-org/synapse/issues/11558)) -- Minor efficiency improvements in event persistence. ([\#11560](https://github.com/matrix-org/synapse/issues/11560)) -- Add some safety checks that storage functions are used correctly. ([\#11564](https://github.com/matrix-org/synapse/issues/11564), [\#11580](https://github.com/matrix-org/synapse/issues/11580)) -- Make `get_device` return `None` if the device doesn't exist rather than raising an exception. ([\#11565](https://github.com/matrix-org/synapse/issues/11565)) -- Split the HTML parsing code from the URL preview resource code. ([\#11566](https://github.com/matrix-org/synapse/issues/11566)) -- Remove redundant `COALESCE()`s around `COUNT()`s in database queries. ([\#11570](https://github.com/matrix-org/synapse/issues/11570)) -- Add missing type hints to `synapse.http`. ([\#11571](https://github.com/matrix-org/synapse/issues/11571)) -- Add [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) and [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) to `/versions` -> `unstable_features` to detect server support. ([\#11582](https://github.com/matrix-org/synapse/issues/11582)) -- Add type hints to `synapse/tests/rest/admin`. ([\#11590](https://github.com/matrix-org/synapse/issues/11590)) -- Drop end-of-life Python 3.6 and Postgres 9.6 from CI. ([\#11595](https://github.com/matrix-org/synapse/issues/11595)) -- Update black version and run it on all the files. ([\#11596](https://github.com/matrix-org/synapse/issues/11596)) -- Add opentracing type stubs and fix associated mypy errors. ([\#11603](https://github.com/matrix-org/synapse/issues/11603), [\#11622](https://github.com/matrix-org/synapse/issues/11622)) -- Improve OpenTracing support for requests which use a `ResponseCache`. ([\#11607](https://github.com/matrix-org/synapse/issues/11607)) -- Improve OpenTracing support for incoming HTTP requests. ([\#11618](https://github.com/matrix-org/synapse/issues/11618)) -- A number of improvements to opentracing support. ([\#11619](https://github.com/matrix-org/synapse/issues/11619)) -- Refactor the way that the `outlier` flag is set on events received over federation. ([\#11634](https://github.com/matrix-org/synapse/issues/11634)) -- Improve the error messages from `get_create_event_for_room`. ([\#11638](https://github.com/matrix-org/synapse/issues/11638)) -- Remove redundant `get_current_events_token` method. ([\#11643](https://github.com/matrix-org/synapse/issues/11643)) -- Convert `namedtuples` to `attrs`. ([\#11665](https://github.com/matrix-org/synapse/issues/11665), [\#11574](https://github.com/matrix-org/synapse/issues/11574)) -- Update the `/capabilities` response to include whether support for [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440) is available. ([\#11690](https://github.com/matrix-org/synapse/issues/11690)) -- Send the `Accept` header in HTTP requests made using `SimpleHttpClient.get_json`. ([\#11677](https://github.com/matrix-org/synapse/issues/11677)) -- Work around Mjolnir compatibility issue by adding an import for `glob_to_regex` in `synapse.util`, where it moved from. ([\#11696](https://github.com/matrix-org/synapse/issues/11696)) - - **Changelogs for older versions can be found [here](docs/changelogs/).** diff --git a/changelog.d/15846.misc b/changelog.d/15846.misc new file mode 100644 index 000000000000..f1c31d6663ab --- /dev/null +++ b/changelog.d/15846.misc @@ -0,0 +1 @@ +Split out 2022 changes from the changelog so the rendered version in GitHub doesn't timeout as much. diff --git a/docs/changelogs/CHANGES-2022.md b/docs/changelogs/CHANGES-2022.md new file mode 100644 index 000000000000..81e284951620 --- /dev/null +++ b/docs/changelogs/CHANGES-2022.md @@ -0,0 +1,2766 @@ + +Synapse 1.74.0 (2022-12-20) +=========================== + +Improved Documentation +---------------------- + +- Add release note and update documentation regarding optional ICU support in user search. ([\#14712](https://github.com/matrix-org/synapse/issues/14712)) + + +Synapse 1.74.0rc1 (2022-12-13) +============================== + +Features +-------- + +- Improve user search for international display names. ([\#14464](https://github.com/matrix-org/synapse/issues/14464)) +- Stop using deprecated `keyIds` parameter when calling `/_matrix/key/v2/server`. ([\#14490](https://github.com/matrix-org/synapse/issues/14490), [\#14525](https://github.com/matrix-org/synapse/issues/14525)) +- Add new `push.enabled` config option to allow opting out of push notification calculation. ([\#14551](https://github.com/matrix-org/synapse/issues/14551), [\#14619](https://github.com/matrix-org/synapse/issues/14619)) +- Advertise support for Matrix 1.5 on `/_matrix/client/versions`. ([\#14576](https://github.com/matrix-org/synapse/issues/14576)) +- Improve opentracing and logging for to-device message handling. ([\#14598](https://github.com/matrix-org/synapse/issues/14598)) +- Allow selecting "prejoin" events by state keys in addition to event types. ([\#14642](https://github.com/matrix-org/synapse/issues/14642)) + + +Bugfixes +-------- + +- Fix a long-standing bug where a device list update might not be sent to clients in certain circumstances. ([\#14435](https://github.com/matrix-org/synapse/issues/14435), [\#14592](https://github.com/matrix-org/synapse/issues/14592), [\#14604](https://github.com/matrix-org/synapse/issues/14604)) +- Suppress a spurious warning when `POST /rooms///`, `POST /join//` receive an empty HTTP request body. ([\#14600](https://github.com/matrix-org/synapse/issues/14600)) +- Return spec-compliant JSON errors when unknown endpoints are requested. ([\#14620](https://github.com/matrix-org/synapse/issues/14620), [\#14621](https://github.com/matrix-org/synapse/issues/14621)) +- Update html templates to load images over HTTPS. Contributed by @ashfame. ([\#14625](https://github.com/matrix-org/synapse/issues/14625)) +- Fix a long-standing bug where the user directory would return 1 more row than requested. ([\#14631](https://github.com/matrix-org/synapse/issues/14631)) +- Reject invalid read receipt requests with empty room or event IDs. Contributed by Nick @ Beeper (@fizzadar). ([\#14632](https://github.com/matrix-org/synapse/issues/14632)) +- Fix a bug introduced in Synapse 1.67.0 where not specifying a config file or a server URL would lead to the `register_new_matrix_user` script failing. ([\#14637](https://github.com/matrix-org/synapse/issues/14637)) +- Fix a long-standing bug where the user directory and room/user stats might be out of sync. ([\#14639](https://github.com/matrix-org/synapse/issues/14639), [\#14643](https://github.com/matrix-org/synapse/issues/14643)) +- Fix a bug introduced in Synapse 1.72.0 where the background updates to add non-thread unique indexes on receipts would fail if they were previously interrupted. ([\#14650](https://github.com/matrix-org/synapse/issues/14650)) +- Improve validation of field size limits in events. ([\#14664](https://github.com/matrix-org/synapse/issues/14664)) +- Fix bugs introduced in Synapse 1.55.0 and 1.69.0 where application services would not be notified of events in the correct rooms, due to stale caches. ([\#14670](https://github.com/matrix-org/synapse/issues/14670)) + + +Improved Documentation +---------------------- + +- Update worker settings for `pusher` and `federation_sender` functionality. ([\#14493](https://github.com/matrix-org/synapse/issues/14493)) +- Add links to third party package repositories, and point to the bug which highlights Ubuntu's out-of-date packages. ([\#14517](https://github.com/matrix-org/synapse/issues/14517)) +- Remove old, incorrect minimum postgres version note and replace with a link to the [Dependency Deprecation Policy](https://matrix-org.github.io/synapse/v1.73/deprecation_policy.html). ([\#14590](https://github.com/matrix-org/synapse/issues/14590)) +- Add Single-Sign On setup instructions for Mastodon-based instances. ([\#14594](https://github.com/matrix-org/synapse/issues/14594)) +- Change `turn_allow_guests` example value to lowercase `true`. ([\#14634](https://github.com/matrix-org/synapse/issues/14634)) + + +Internal Changes +---------------- + +- Optimise push badge count calculations. Contributed by Nick @ Beeper (@fizzadar). ([\#14255](https://github.com/matrix-org/synapse/issues/14255)) +- Faster remote room joins: stream the un-partial-stating of rooms over replication. ([\#14473](https://github.com/matrix-org/synapse/issues/14473), [\#14474](https://github.com/matrix-org/synapse/issues/14474)) +- Share the `ClientRestResource` for both workers and the main process. ([\#14528](https://github.com/matrix-org/synapse/issues/14528)) +- Add `--editable` flag to `complement.sh` which uses an editable install of Synapse for faster turn-around times whilst developing iteratively. ([\#14548](https://github.com/matrix-org/synapse/issues/14548)) +- Faster joins: use servers list approximation to send read receipts when in partial state instead of waiting for the full state of the room. ([\#14549](https://github.com/matrix-org/synapse/issues/14549)) +- Modernize unit tests configuration related to workers. ([\#14568](https://github.com/matrix-org/synapse/issues/14568)) +- Bump jsonschema from 4.17.0 to 4.17.3. ([\#14591](https://github.com/matrix-org/synapse/issues/14591)) +- Fix Rust lint CI. ([\#14602](https://github.com/matrix-org/synapse/issues/14602)) +- Bump JasonEtco/create-an-issue from 2.5.0 to 2.8.1. ([\#14607](https://github.com/matrix-org/synapse/issues/14607)) +- Alter some unit test environment parameters to decrease time spent running tests. ([\#14610](https://github.com/matrix-org/synapse/issues/14610)) +- Switch to Go recommended installation method for `gotestfmt` template in CI. ([\#14611](https://github.com/matrix-org/synapse/issues/14611)) +- Bump phonenumbers from 8.13.0 to 8.13.1. ([\#14612](https://github.com/matrix-org/synapse/issues/14612)) +- Bump types-setuptools from 65.5.0.3 to 65.6.0.1. ([\#14613](https://github.com/matrix-org/synapse/issues/14613)) +- Bump twine from 4.0.1 to 4.0.2. ([\#14614](https://github.com/matrix-org/synapse/issues/14614)) +- Bump types-requests from 2.28.11.2 to 2.28.11.5. ([\#14615](https://github.com/matrix-org/synapse/issues/14615)) +- Bump cryptography from 38.0.3 to 38.0.4. ([\#14616](https://github.com/matrix-org/synapse/issues/14616)) +- Remove useless cargo install with apt from Dockerfile. ([\#14636](https://github.com/matrix-org/synapse/issues/14636)) +- Bump certifi from 2021.10.8 to 2022.12.7. ([\#14645](https://github.com/matrix-org/synapse/issues/14645)) +- Bump flake8-bugbear from 22.10.27 to 22.12.6. ([\#14656](https://github.com/matrix-org/synapse/issues/14656)) +- Bump packaging from 21.3 to 22.0. ([\#14657](https://github.com/matrix-org/synapse/issues/14657)) +- Bump types-pillow from 9.3.0.1 to 9.3.0.4. ([\#14658](https://github.com/matrix-org/synapse/issues/14658)) +- Bump serde from 1.0.148 to 1.0.150. ([\#14659](https://github.com/matrix-org/synapse/issues/14659)) +- Bump phonenumbers from 8.13.1 to 8.13.2. ([\#14660](https://github.com/matrix-org/synapse/issues/14660)) +- Bump authlib from 1.1.0 to 1.2.0. ([\#14661](https://github.com/matrix-org/synapse/issues/14661)) +- Move `StateFilter` to `synapse.types`. ([\#14668](https://github.com/matrix-org/synapse/issues/14668)) +- Improve type hints. ([\#14597](https://github.com/matrix-org/synapse/issues/14597), [\#14646](https://github.com/matrix-org/synapse/issues/14646), [\#14671](https://github.com/matrix-org/synapse/issues/14671)) + + +Synapse 1.73.0 (2022-12-06) +=========================== + +Please note that legacy Prometheus metric names have been removed in this release; see [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.73/docs/upgrade.md#legacy-prometheus-metric-names-have-now-been-removed) for more details. + +No significant changes since 1.73.0rc2. + + +Synapse 1.73.0rc2 (2022-12-01) +============================== + +Bugfixes +-------- + +- Fix a regression in Synapse 1.73.0rc1 where Synapse's main process would stop responding to HTTP requests when a user with a large number of devices logs in. ([\#14582](https://github.com/matrix-org/synapse/issues/14582)) + + +Synapse 1.73.0rc1 (2022-11-29) +============================== + +Features +-------- + +- Speed-up `/messages` with `filter_events_for_client` optimizations. ([\#14527](https://github.com/matrix-org/synapse/issues/14527)) +- Improve DB performance by reducing amount of data that gets read in `device_lists_changes_in_room`. ([\#14534](https://github.com/matrix-org/synapse/issues/14534)) +- Add support for handling avatar in SSO OIDC login. Contributed by @ashfame. ([\#13917](https://github.com/matrix-org/synapse/issues/13917)) +- Move MSC3030 `/timestamp_to_event` endpoints to stable `v1` location (`/_matrix/client/v1/rooms//timestamp_to_event?ts=&dir=`, `/_matrix/federation/v1/timestamp_to_event/?ts=&dir=`). ([\#14471](https://github.com/matrix-org/synapse/issues/14471)) +- Reduce database load of [Client-Server endpoints](https://spec.matrix.org/v1.5/client-server-api/#aggregations) which return bundled aggregations. ([\#14491](https://github.com/matrix-org/synapse/issues/14491), [\#14508](https://github.com/matrix-org/synapse/issues/14508), [\#14510](https://github.com/matrix-org/synapse/issues/14510)) +- Add unstable support for an Extensible Events room version (`org.matrix.msc1767.10`) via [MSC1767](https://github.com/matrix-org/matrix-spec-proposals/pull/1767), [MSC3931](https://github.com/matrix-org/matrix-spec-proposals/pull/3931), [MSC3932](https://github.com/matrix-org/matrix-spec-proposals/pull/3932), and [MSC3933](https://github.com/matrix-org/matrix-spec-proposals/pull/3933). ([\#14520](https://github.com/matrix-org/synapse/issues/14520), [\#14521](https://github.com/matrix-org/synapse/issues/14521), [\#14524](https://github.com/matrix-org/synapse/issues/14524)) +- Prune user's old devices on login if they have too many. ([\#14038](https://github.com/matrix-org/synapse/issues/14038), [\#14580](https://github.com/matrix-org/synapse/issues/14580)) + + +Bugfixes +-------- + +- Fix a long-standing bug where paginating from the start of a room did not work. Contributed by @gnunicorn. ([\#14149](https://github.com/matrix-org/synapse/issues/14149)) +- Fix a bug introduced in Synapse 1.58.0 where a user with presence state `org.matrix.msc3026.busy` would mistakenly be set to `online` when calling `/sync` or `/events` on a worker process. ([\#14393](https://github.com/matrix-org/synapse/issues/14393)) +- Fix a bug introduced in Synapse 1.70.0 where a receipt's thread ID was not sent over federation. ([\#14466](https://github.com/matrix-org/synapse/issues/14466)) +- Fix a long-standing bug where the [List media admin API](https://matrix-org.github.io/synapse/latest/admin_api/media_admin_api.html#list-all-media-in-a-room) would fail when processing an image with broken thumbnail information. ([\#14537](https://github.com/matrix-org/synapse/issues/14537)) +- Fix a bug introduced in Synapse 1.67.0 where two logging context warnings would be logged on startup. ([\#14574](https://github.com/matrix-org/synapse/issues/14574)) +- In application service transactions that include the experimental `org.matrix.msc3202.device_one_time_key_counts` key, include a duplicate key of `org.matrix.msc3202.device_one_time_keys_count` to match the name proposed by [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202). ([\#14565](https://github.com/matrix-org/synapse/issues/14565)) +- Fix a bug introduced in Synapse 0.9 where Synapse would fail to fetch server keys whose IDs contain a forward slash. ([\#14490](https://github.com/matrix-org/synapse/issues/14490)) + + +Improved Documentation +---------------------- + +- Fixed link to 'Synapse administration endpoints'. ([\#14499](https://github.com/matrix-org/synapse/issues/14499)) + + +Deprecations and Removals +------------------------- + +- Remove legacy Prometheus metrics names. They were deprecated in Synapse v1.69.0 and disabled by default in Synapse v1.71.0. ([\#14538](https://github.com/matrix-org/synapse/issues/14538)) + + +Internal Changes +---------------- + +- Improve type hinting throughout Synapse. ([\#14055](https://github.com/matrix-org/synapse/issues/14055), [\#14412](https://github.com/matrix-org/synapse/issues/14412), [\#14529](https://github.com/matrix-org/synapse/issues/14529), [\#14452](https://github.com/matrix-org/synapse/issues/14452)). +- Remove old stream ID tracking code. Contributed by Nick @Beeper (@fizzadar). ([\#14376](https://github.com/matrix-org/synapse/issues/14376), [\#14468](https://github.com/matrix-org/synapse/issues/14468)) +- Remove the `worker_main_http_uri` configuration setting. This is now handled via internal replication. ([\#14400](https://github.com/matrix-org/synapse/issues/14400), [\#14476](https://github.com/matrix-org/synapse/issues/14476)) +- Refactor `federation_sender` and `pusher` configuration loading. ([\#14496](https://github.com/matrix-org/synapse/issues/14496)) +([\#14509](https://github.com/matrix-org/synapse/issues/14509), [\#14573](https://github.com/matrix-org/synapse/issues/14573)) +- Faster joins: do not wait for full state when creating events to send. ([\#14403](https://github.com/matrix-org/synapse/issues/14403)) +- Faster joins: filter out non local events when a room doesn't have its full state. ([\#14404](https://github.com/matrix-org/synapse/issues/14404)) +- Faster joins: send events to initial list of servers if we don't have the full state yet. ([\#14408](https://github.com/matrix-org/synapse/issues/14408)) +- Faster joins: use servers list approximation received during `send_join` (potentially updated with received membership events) in `assert_host_in_room`. ([\#14515](https://github.com/matrix-org/synapse/issues/14515)) +- Fix type logic in TCP replication code that prevented correctly ignoring blank commands. ([\#14449](https://github.com/matrix-org/synapse/issues/14449)) +- Remove option to skip locking of tables when performing emulated upserts, to avoid a class of bugs in future. ([\#14469](https://github.com/matrix-org/synapse/issues/14469)) +- `scripts-dev/federation_client`: Fix routing on servers with `.well-known` files. ([\#14479](https://github.com/matrix-org/synapse/issues/14479)) +- Reduce default third party invite rate limit to 216 invites per day. ([\#14487](https://github.com/matrix-org/synapse/issues/14487)) +- Refactor conversion of device list changes in room to outbound pokes to track unconverted rows using a `(stream ID, room ID)` position instead of updating the `converted_to_destinations` flag on every row. ([\#14516](https://github.com/matrix-org/synapse/issues/14516)) +- Add more prompts to the bug report form. ([\#14522](https://github.com/matrix-org/synapse/issues/14522)) +- Extend editorconfig rules on indent and line length to `.pyi` files. ([\#14526](https://github.com/matrix-org/synapse/issues/14526)) +- Run Rust CI when `Cargo.lock` changes. This is particularly useful for dependabot updates. ([\#14571](https://github.com/matrix-org/synapse/issues/14571)) +- Fix a possible variable shadow in `create_new_client_event`. ([\#14575](https://github.com/matrix-org/synapse/issues/14575)) +- Bump various dependencies in the `poetry.lock` file and in CI scripts. ([\#14557](https://github.com/matrix-org/synapse/issues/14557), [\#14559](https://github.com/matrix-org/synapse/issues/14559), [\#14560](https://github.com/matrix-org/synapse/issues/14560), [\#14500](https://github.com/matrix-org/synapse/issues/14500), [\#14501](https://github.com/matrix-org/synapse/issues/14501), [\#14502](https://github.com/matrix-org/synapse/issues/14502), [\#14503](https://github.com/matrix-org/synapse/issues/14503), [\#14504](https://github.com/matrix-org/synapse/issues/14504), [\#14505](https://github.com/matrix-org/synapse/issues/14505)). + + +Synapse 1.72.0 (2022-11-22) +=========================== + +Please note that Synapse now only supports PostgreSQL 11+, because PostgreSQL 10 has reached end-of-life, c.f. our [Deprecation Policy](https://github.com/matrix-org/synapse/blob/develop/docs/deprecation_policy.md). + +Bugfixes +-------- + +- Update forgotten references to legacy metrics in the included Grafana dashboard. ([\#14477](https://github.com/matrix-org/synapse/issues/14477)) + + +Synapse 1.72.0rc1 (2022-11-16) +============================== + +Features +-------- + +- Add experimental support for [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3912): Relation-based redactions. ([\#14260](https://github.com/matrix-org/synapse/issues/14260)) +- Build Debian packages for Ubuntu 22.10 (Kinetic Kudu). ([\#14396](https://github.com/matrix-org/synapse/issues/14396)) +- Add an [Admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html) endpoint for user lookup based on third-party ID (3PID). Contributed by @ashfame. ([\#14405](https://github.com/matrix-org/synapse/issues/14405)) +- Faster joins: include heroes' membership events in the partial join response, for rooms without a name or canonical alias. ([\#14442](https://github.com/matrix-org/synapse/issues/14442)) + + +Bugfixes +-------- + +- Faster joins: do not block creation of or queries for room aliases during the resync. ([\#14292](https://github.com/matrix-org/synapse/issues/14292)) +- Fix a bug introduced in Synapse 1.64.0rc1 which could cause log spam when fetching events from other homeservers. ([\#14347](https://github.com/matrix-org/synapse/issues/14347)) +- Fix a bug introduced in 1.66 which would not send certain pushrules to clients. Contributed by Nico. ([\#14356](https://github.com/matrix-org/synapse/issues/14356)) +- Fix a bug introduced in v1.71.0rc1 where the power level event was incorrectly created during initial room creation. ([\#14361](https://github.com/matrix-org/synapse/issues/14361)) +- Fix the refresh token endpoint to be under /r0 and /v3 instead of /v1. Contributed by Tulir @ Beeper. ([\#14364](https://github.com/matrix-org/synapse/issues/14364)) +- Fix a long-standing bug where Synapse would raise an error when encountering an unrecognised field in a `/sync` filter, instead of ignoring it for forward compatibility. ([\#14369](https://github.com/matrix-org/synapse/issues/14369)) +- Fix a background database update, introduced in Synapse 1.64.0, which could cause poor database performance. ([\#14374](https://github.com/matrix-org/synapse/issues/14374)) +- Fix PostgreSQL sometimes using table scans for queries against the `event_search` table, taking a long time and a large amount of IO. ([\#14409](https://github.com/matrix-org/synapse/issues/14409)) +- Fix rendering of some HTML templates (including emails). Introduced in v1.71.0. ([\#14448](https://github.com/matrix-org/synapse/issues/14448)) +- Fix a bug introduced in Synapse 1.70.0 where the background updates to add non-thread unique indexes on receipts could fail when upgrading from 1.67.0 or earlier. ([\#14453](https://github.com/matrix-org/synapse/issues/14453)) + + +Updates to the Docker image +--------------------------- + +- Add all Stream Writer worker types to `configure_workers_and_start.py`. ([\#14197](https://github.com/matrix-org/synapse/issues/14197)) +- Remove references to legacy worker types in the multi-worker Dockerfile. ([\#14294](https://github.com/matrix-org/synapse/issues/14294)) + + +Improved Documentation +---------------------- + +- Upload documentation PRs to Netlify. ([\#12947](https://github.com/matrix-org/synapse/issues/12947), [\#14370](https://github.com/matrix-org/synapse/issues/14370)) +- Add addtional TURN server configuration example based on [eturnal](https://github.com/processone/eturnal) and adjust general TURN server doc structure. ([\#14293](https://github.com/matrix-org/synapse/issues/14293)) +- Add example on how to load balance /sync requests. Contributed by [aceArt](https://aceart.de). ([\#14297](https://github.com/matrix-org/synapse/issues/14297)) +- Edit sample Nginx reverse proxy configuration to use HTTP/1.1. Contributed by Brad Jones. ([\#14414](https://github.com/matrix-org/synapse/issues/14414)) + + +Deprecations and Removals +------------------------- + +- Remove support for PostgreSQL 10. ([\#14392](https://github.com/matrix-org/synapse/issues/14392), [\#14397](https://github.com/matrix-org/synapse/issues/14397)) + + +Internal Changes +---------------- + +- Run unit tests against Python 3.11. ([\#13812](https://github.com/matrix-org/synapse/issues/13812)) +- Add TLS support for generic worker endpoints. ([\#14128](https://github.com/matrix-org/synapse/issues/14128), [\#14455](https://github.com/matrix-org/synapse/issues/14455)) +- Switch to a maintained action for installing Rust in CI. ([\#14313](https://github.com/matrix-org/synapse/issues/14313)) +- Add override ability to `complement.sh` command line script to request certain types of workers. ([\#14324](https://github.com/matrix-org/synapse/issues/14324)) +- Enabling testing of [MSC3874](https://github.com/matrix-org/matrix-spec-proposals/pull/3874) (filtering of `/messages` by relation type) in complement. ([\#14339](https://github.com/matrix-org/synapse/issues/14339)) +- Concisely log a failure to resolve state due to missing `prev_events`. ([\#14346](https://github.com/matrix-org/synapse/issues/14346)) +- Use a maintained Github action to install Rust. ([\#14351](https://github.com/matrix-org/synapse/issues/14351)) +- Cleanup old worker datastore classes. Contributed by Nick @ Beeper (@fizzadar). ([\#14375](https://github.com/matrix-org/synapse/issues/14375)) +- Test against PostgreSQL 15 in CI. ([\#14394](https://github.com/matrix-org/synapse/issues/14394)) +- Remove unreachable code. ([\#14410](https://github.com/matrix-org/synapse/issues/14410)) +- Clean-up event persistence code. ([\#14411](https://github.com/matrix-org/synapse/issues/14411)) +- Update docstring to clarify that `get_partial_state_events_batch` does not just give you completely arbitrary partial-state events. ([\#14417](https://github.com/matrix-org/synapse/issues/14417)) +- Fix mypy errors introduced by bumping the locked version of `attrs` and `gitpython`. ([\#14433](https://github.com/matrix-org/synapse/issues/14433)) +- Make Dependabot only bump Rust deps in the lock file. ([\#14434](https://github.com/matrix-org/synapse/issues/14434)) +- Fix an incorrect stub return type for `PushRuleEvaluator.run`. ([\#14451](https://github.com/matrix-org/synapse/issues/14451)) +- Improve performance of `/context` in large rooms. ([\#14461](https://github.com/matrix-org/synapse/issues/14461)) + + +Synapse 1.71.0 (2022-11-08) +=========================== + +Please note that, as announced in the release notes for Synapse 1.69.0, legacy Prometheus metric names are now disabled by default. +They will be removed altogether in Synapse 1.73.0. +If not already done, server administrators should update their dashboards and alerting rules to avoid using the deprecated metric names. +See the [upgrade notes](https://matrix-org.github.io/synapse/v1.71/upgrade.html#upgrading-to-v1710) for more details. + +**Note:** in line with our [deprecation policy](https://matrix-org.github.io/synapse/latest/deprecation_policy.html) for platform dependencies, this will be the last release to support PostgreSQL 10, which reaches upstream end-of-life on November 10th, 2022. Future releases of Synapse will require PostgreSQL 11+. + +No significant changes since 1.71.0rc2. + + +Synapse 1.71.0rc2 (2022-11-04) +============================== + +Improved Documentation +---------------------- + +- Document the changes to monthly active user metrics due to deprecation of legacy Prometheus metric names. ([\#14358](https://github.com/matrix-org/synapse/issues/14358), [\#14360](https://github.com/matrix-org/synapse/issues/14360)) + + +Deprecations and Removals +------------------------- + +- Disable legacy Prometheus metric names by default. They can still be re-enabled for now, but they will be removed altogether in Synapse 1.73.0. ([\#14353](https://github.com/matrix-org/synapse/issues/14353)) + + +Internal Changes +---------------- + +- Run unit tests against Python 3.11. ([\#13812](https://github.com/matrix-org/synapse/issues/13812)) + + +Synapse 1.71.0rc1 (2022-11-01) +============================== + +Features +-------- + +- Support back-channel logouts from OpenID Connect providers. ([\#11414](https://github.com/matrix-org/synapse/issues/11414)) +- Allow use of Postgres and SQLlite full-text search operators in search queries. ([\#11635](https://github.com/matrix-org/synapse/issues/11635), [\#14310](https://github.com/matrix-org/synapse/issues/14310), [\#14311](https://github.com/matrix-org/synapse/issues/14311)) +- Implement [MSC3664](https://github.com/matrix-org/matrix-doc/pull/3664), Pushrules for relations. Contributed by Nico. ([\#11804](https://github.com/matrix-org/synapse/issues/11804)) +- Improve aesthetics of HTML templates. Note that these changes do not retroactively apply to templates which have been [customised](https://matrix-org.github.io/synapse/latest/templates.html#templates) by server admins. ([\#13652](https://github.com/matrix-org/synapse/issues/13652)) +- Enable write-ahead logging for SQLite installations. Contributed by [@asymmetric](https://github.com/asymmetric). ([\#13897](https://github.com/matrix-org/synapse/issues/13897)) +- Show erasure status when [listing users](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html#query-user-account) in the Admin API. ([\#14205](https://github.com/matrix-org/synapse/issues/14205)) +- Provide a specific error code when a `/sync` request provides a filter which doesn't represent a JSON object. ([\#14262](https://github.com/matrix-org/synapse/issues/14262)) + + +Bugfixes +-------- + +- Fix a long-standing bug where the `update_synapse_database` script could not be run with multiple databases. Contributed by @thefinn93 @ Beeper. ([\#13422](https://github.com/matrix-org/synapse/issues/13422)) +- Fix a bug which prevented setting an avatar on homeservers which have an explicit port in their `server_name` and have `max_avatar_size` and/or `allowed_avatar_mimetypes` configuration. Contributed by @ashfame. ([\#13927](https://github.com/matrix-org/synapse/issues/13927)) +- Check appservice user interest against the local users instead of all users in the room to align with [MSC3905](https://github.com/matrix-org/matrix-spec-proposals/pull/3905). ([\#13958](https://github.com/matrix-org/synapse/issues/13958)) +- Fix a long-standing bug where Synapse would accidentally include extra information in the response to [`PUT /_matrix/federation/v2/invite/{roomId}/{eventId}`](https://spec.matrix.org/v1.4/server-server-api/#put_matrixfederationv2inviteroomideventid). ([\#14064](https://github.com/matrix-org/synapse/issues/14064)) +- Fix a bug introduced in Synapse 1.64.0 where presence updates could be missing from `/sync` responses. ([\#14243](https://github.com/matrix-org/synapse/issues/14243)) +- Fix a bug introduced in Synapse 1.60.0 which caused an error to be logged when Synapse received a SIGHUP signal if debug logging was enabled. ([\#14258](https://github.com/matrix-org/synapse/issues/14258)) +- Prevent history insertion ([MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716)) during an partial join ([MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706)). ([\#14291](https://github.com/matrix-org/synapse/issues/14291)) +- Fix a bug introduced in Synapse 1.34.0 where device names would be returned via a federation user key query request when `allow_device_name_lookup_over_federation` was set to `false`. ([\#14304](https://github.com/matrix-org/synapse/issues/14304)) +- Fix a bug introduced in Synapse 0.34.0 where logs could include error spam when background processes are measured as taking a negative amount of time. ([\#14323](https://github.com/matrix-org/synapse/issues/14323)) +- Fix a bug introduced in Synapse 1.70.0 where clients were unable to PUT new [dehydrated devices](https://github.com/matrix-org/matrix-spec-proposals/pull/2697). ([\#14336](https://github.com/matrix-org/synapse/issues/14336)) + + +Improved Documentation +---------------------- + +- Explain how to disable the use of [`trusted_key_servers`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#trusted_key_servers). ([\#13999](https://github.com/matrix-org/synapse/issues/13999)) +- Add workers settings to [configuration manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#individual-worker-configuration). ([\#14086](https://github.com/matrix-org/synapse/issues/14086)) +- Correct the name of the config option [`encryption_enabled_by_default_for_room_type`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#encryption_enabled_by_default_for_room_type). ([\#14110](https://github.com/matrix-org/synapse/issues/14110)) +- Update docstrings of `SynapseError` and `FederationError` to bettter describe what they are used for and the effects of using them are. ([\#14191](https://github.com/matrix-org/synapse/issues/14191)) + + +Internal Changes +---------------- + +- Remove unused `@lru_cache` decorator. ([\#13595](https://github.com/matrix-org/synapse/issues/13595)) +- Save login tokens in database and prevent login token reuse. ([\#13844](https://github.com/matrix-org/synapse/issues/13844)) +- Refactor OIDC tests to better mimic an actual OIDC provider. ([\#13910](https://github.com/matrix-org/synapse/issues/13910)) +- Fix type annotation causing import time error in the Complement forking launcher. ([\#14084](https://github.com/matrix-org/synapse/issues/14084)) +- Refactor [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to loop over federation destinations with standard pattern and error handling. ([\#14096](https://github.com/matrix-org/synapse/issues/14096)) +- Add initial power level event to batch of bulk persisted events when creating a new room. ([\#14228](https://github.com/matrix-org/synapse/issues/14228)) +- Refactor `/key/` endpoints to use `RestServlet` classes. ([\#14229](https://github.com/matrix-org/synapse/issues/14229)) +- Switch to using the `matrix-org/backend-meta` version of `triage-incoming` for new issues in CI. ([\#14230](https://github.com/matrix-org/synapse/issues/14230)) +- Build wheels on macos 11, not 10.15. ([\#14249](https://github.com/matrix-org/synapse/issues/14249)) +- Add debugging to help diagnose lost device list updates. ([\#14268](https://github.com/matrix-org/synapse/issues/14268)) +- Add Rust cache to CI for `trial` runs. ([\#14287](https://github.com/matrix-org/synapse/issues/14287)) +- Improve type hinting of `RawHeaders`. ([\#14303](https://github.com/matrix-org/synapse/issues/14303)) +- Use Poetry 1.2.0 in the Twisted Trunk CI job. ([\#14305](https://github.com/matrix-org/synapse/issues/14305)) + +
+Dependency updates + +Runtime: + +- Bump anyhow from 1.0.65 to 1.0.66. ([\#14278](https://github.com/matrix-org/synapse/issues/14278)) +- Bump jinja2 from 3.0.3 to 3.1.2. ([\#14271](https://github.com/matrix-org/synapse/issues/14271)) +- Bump prometheus-client from 0.14.0 to 0.15.0. ([\#14274](https://github.com/matrix-org/synapse/issues/14274)) +- Bump psycopg2 from 2.9.4 to 2.9.5. ([\#14331](https://github.com/matrix-org/synapse/issues/14331)) +- Bump pysaml2 from 7.1.2 to 7.2.1. ([\#14270](https://github.com/matrix-org/synapse/issues/14270)) +- Bump sentry-sdk from 1.5.11 to 1.10.1. ([\#14330](https://github.com/matrix-org/synapse/issues/14330)) +- Bump serde from 1.0.145 to 1.0.147. ([\#14277](https://github.com/matrix-org/synapse/issues/14277)) +- Bump serde_json from 1.0.86 to 1.0.87. ([\#14279](https://github.com/matrix-org/synapse/issues/14279)) + +Tooling and CI: + +- Bump black from 22.3.0 to 22.10.0. ([\#14328](https://github.com/matrix-org/synapse/issues/14328)) +- Bump flake8-bugbear from 21.3.2 to 22.9.23. ([\#14042](https://github.com/matrix-org/synapse/issues/14042)) +- Bump peaceiris/actions-gh-pages from 3.8.0 to 3.9.0. ([\#14276](https://github.com/matrix-org/synapse/issues/14276)) +- Bump peaceiris/actions-mdbook from 1.1.14 to 1.2.0. ([\#14275](https://github.com/matrix-org/synapse/issues/14275)) +- Bump setuptools-rust from 1.5.1 to 1.5.2. ([\#14273](https://github.com/matrix-org/synapse/issues/14273)) +- Bump twine from 3.8.0 to 4.0.1. ([\#14332](https://github.com/matrix-org/synapse/issues/14332)) +- Bump types-opentracing from 2.4.7 to 2.4.10. ([\#14133](https://github.com/matrix-org/synapse/issues/14133)) +- Bump types-requests from 2.28.11 to 2.28.11.2. ([\#14272](https://github.com/matrix-org/synapse/issues/14272)) +
+ +Synapse 1.70.1 (2022-10-28) +=========================== + +This release fixes some regressions that were discovered in 1.70.0. + +[#14300](https://github.com/matrix-org/synapse/issues/14300) +was previously reported to be a regression in 1.70.0 as well. However, we have +since concluded that it was limited to the reporter and thus have not needed +to include any fix for it in 1.70.1. + + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.70.0rc1 where the access tokens sent to application services as headers were malformed. Application services which were obtaining access tokens from query parameters were not affected. ([\#14301](https://github.com/matrix-org/synapse/issues/14301)) +- Fix room creation being rate limited too aggressively since Synapse v1.69.0. ([\#14314](https://github.com/matrix-org/synapse/issues/14314)) + + +Synapse 1.70.0 (2022-10-26) +=========================== + +No significant changes since 1.70.0rc2. + + +Synapse 1.70.0rc2 (2022-10-25) +============================== + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.70.0rc1 where the information returned from the `/threads` API could be stale when threaded events are redacted. ([\#14248](https://github.com/matrix-org/synapse/issues/14248)) +- Fix a bug introduced in Synapse 1.70.0rc1 leading to broken outbound federation when using Python 3.7. ([\#14280](https://github.com/matrix-org/synapse/issues/14280)) +- Fix a bug introduced in Synapse 1.70.0rc1 where edits to non-message events were aggregated by the homeserver. ([\#14283](https://github.com/matrix-org/synapse/issues/14283)) + + +Internal Changes +---------------- + +- Build ABI3 wheels for CPython. ([\#14253](https://github.com/matrix-org/synapse/issues/14253)) +- For the aarch64 architecture, only build wheels for CPython manylinux. ([\#14259](https://github.com/matrix-org/synapse/issues/14259)) + + +Synapse 1.70.0rc1 (2022-10-19) +============================== + +Features +-------- + +- Support for [MSC3856](https://github.com/matrix-org/matrix-spec-proposals/pull/3856): threads list API. ([\#13394](https://github.com/matrix-org/synapse/issues/13394), [\#14171](https://github.com/matrix-org/synapse/issues/14171), [\#14175](https://github.com/matrix-org/synapse/issues/14175)) +- Support for thread-specific notifications & receipts ([MSC3771](https://github.com/matrix-org/matrix-spec-proposals/pull/3771) and [MSC3773](https://github.com/matrix-org/matrix-spec-proposals/pull/3773)). ([\#13776](https://github.com/matrix-org/synapse/issues/13776), [\#13824](https://github.com/matrix-org/synapse/issues/13824), [\#13877](https://github.com/matrix-org/synapse/issues/13877), [\#13878](https://github.com/matrix-org/synapse/issues/13878), [\#14050](https://github.com/matrix-org/synapse/issues/14050), [\#14140](https://github.com/matrix-org/synapse/issues/14140), [\#14159](https://github.com/matrix-org/synapse/issues/14159), [\#14163](https://github.com/matrix-org/synapse/issues/14163), [\#14174](https://github.com/matrix-org/synapse/issues/14174), [\#14222](https://github.com/matrix-org/synapse/issues/14222)) +- Stop fetching missing `prev_events` after we already know their signature is invalid. ([\#13816](https://github.com/matrix-org/synapse/issues/13816)) +- Send application service access tokens as a header (and query parameter). Implements [MSC2832](https://github.com/matrix-org/matrix-spec-proposals/pull/2832). ([\#13996](https://github.com/matrix-org/synapse/issues/13996)) +- Ignore server ACL changes when generating pushes. Implements [MSC3786](https://github.com/matrix-org/matrix-spec-proposals/pull/3786). ([\#13997](https://github.com/matrix-org/synapse/issues/13997)) +- Experimental support for redirecting to an implementation of a [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) HTTP rendezvous service. ([\#14018](https://github.com/matrix-org/synapse/issues/14018)) +- The `/relations` endpoint can now be used on workers. ([\#14028](https://github.com/matrix-org/synapse/issues/14028)) +- Advertise support for Matrix 1.3 and 1.4 on `/_matrix/client/versions`. ([\#14032](https://github.com/matrix-org/synapse/issues/14032), [\#14184](https://github.com/matrix-org/synapse/issues/14184)) +- Improve validation of request bodies for the [Device Management](https://spec.matrix.org/v1.4/client-server-api/#device-management) and [MSC2697 Device Dehyrdation](https://github.com/matrix-org/matrix-spec-proposals/pull/2697) client-server API endpoints. ([\#14054](https://github.com/matrix-org/synapse/issues/14054)) +- Experimental support for [MSC3874](https://github.com/matrix-org/matrix-spec-proposals/pull/3874): Filtering threads from the `/messages` endpoint. ([\#14148](https://github.com/matrix-org/synapse/issues/14148)) +- Improve the validation of the following PUT endpoints: [`/directory/room/{roomAlias}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directoryroomroomalias), [`/directory/list/room/{roomId}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directorylistroomroomid) and [`/directory/list/appservice/{networkId}/{roomId}`](https://spec.matrix.org/v1.4/application-service-api/#put_matrixclientv3directorylistappservicenetworkidroomid). ([\#14179](https://github.com/matrix-org/synapse/issues/14179)) +- Build and publish binary wheels for `aarch64` platforms. ([\#14212](https://github.com/matrix-org/synapse/issues/14212)) + + +Bugfixes +-------- + +- Prevent device names from appearing in device list updates in some situations when `allow_device_name_lookup_over_federation` is `false`. (This is not comprehensive: see [\#13114](https://github.com/matrix-org/synapse/issues/13114).) ([\#10015](https://github.com/matrix-org/synapse/issues/10015)) +- Fix a long-standing bug where redactions were not being sent over federation if we did not have the original event. ([\#13813](https://github.com/matrix-org/synapse/issues/13813)) +- Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled or have their new content applied. ([\#14034](https://github.com/matrix-org/synapse/issues/14034)) +- Fix a bug introduced in Synapse 1.53.0 when querying `/publicRooms` with both a `room_type` filter and a `third_party_instance_id`. ([\#14053](https://github.com/matrix-org/synapse/issues/14053)) +- Fix a bug introduced in Synapse 1.35.0 where errors parsing a `/send_join` or `/state` response would produce excessive, low-quality Sentry events. ([\#14065](https://github.com/matrix-org/synapse/issues/14065)) +- Fix a long-standing bug where Synapse would error on the optional 'invite_room_state' field not being provided to [`PUT /_matrix/federation/v2/invite/{roomId}/{eventId}`](https://spec.matrix.org/v1.4/server-server-api/#put_matrixfederationv2inviteroomideventid). ([\#14083](https://github.com/matrix-org/synapse/issues/14083)) +- Fix a bug where invalid oEmbed fields would cause the entire response to be discarded. Introduced in Synapse 1.18.0. ([\#14089](https://github.com/matrix-org/synapse/issues/14089)) +- Fix a bug introduced in Synapse 1.37.0 in which an incorrect key name was used for sending and receiving room metadata when knocking on a room. ([\#14102](https://github.com/matrix-org/synapse/issues/14102)) +- Fix a bug introduced in v1.69.0rc1 where the joined hosts for a given event were not being properly cached. ([\#14125](https://github.com/matrix-org/synapse/issues/14125)) +- Fix a bug introduced in Synapse 1.30.0 where purging and rejoining a room without restarting in-between would result in a broken room. ([\#14161](https://github.com/matrix-org/synapse/issues/14161), [\#14164](https://github.com/matrix-org/synapse/issues/14164)) +- Fix [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint returning potentially inaccurate closest events with `outliers` present. ([\#14215](https://github.com/matrix-org/synapse/issues/14215)) + + +Updates to the Docker image +--------------------------- + +- Update the version of frozendict in Docker images and Debian packages from 2.3.3 to 2.3.4, which may fix memory leak problems. ([\#13955](https://github.com/matrix-org/synapse/issues/13955)) +- Use the `minimal` Rust profile when building Synapse. ([\#14141](https://github.com/matrix-org/synapse/issues/14141)) +- Prevent a class of database sharding errors when using `Dockerfile-workers` to spawn multiple instances of the same worker. Contributed by Jason Little. ([\#14165](https://github.com/matrix-org/synapse/issues/14165)) +- Set `LD_PRELOAD` to use jemalloc memory allocator in Dockerfile-workers. ([\#14182](https://github.com/matrix-org/synapse/issues/14182)) +- Fix pre-startup logging being lost when using the `Dockerfile-workers` image. ([\#14195](https://github.com/matrix-org/synapse/issues/14195)) + + +Improved Documentation +---------------------- + +- Add sample worker files for `pusher` and `federation_sender`. ([\#14077](https://github.com/matrix-org/synapse/issues/14077)) +- Improve the listener example on the metrics documentation. ([\#14078](https://github.com/matrix-org/synapse/issues/14078)) +- Expand Google OpenID Connect example config to map email attribute. Contributed by @ptman. ([\#14081](https://github.com/matrix-org/synapse/issues/14081)) +- The changelog entry ending in a full stop or exclamation mark is not optional. ([\#14087](https://github.com/matrix-org/synapse/issues/14087)) +- Fix links to jemalloc documentation, which were broken in [#13491](https://github.com/matrix-org/synapse/pull/14124). ([\#14093](https://github.com/matrix-org/synapse/issues/14093)) +- Remove not needed `replication` listener in docker compose example. ([\#14107](https://github.com/matrix-org/synapse/issues/14107)) +- Fix name of `alias_creation_rules` option in the config manual documentation. ([\#14124](https://github.com/matrix-org/synapse/issues/14124)) +- Clarify comment on event contexts. ([\#14145](https://github.com/matrix-org/synapse/issues/14145)) +- Fix dead link to the [Admin Registration API](https://matrix-org.github.io/synapse/latest/admin_api/register_api.html). ([\#14189](https://github.com/matrix-org/synapse/issues/14189)) + + +Deprecations and Removals +------------------------- + +- Remove the experimental implementation of [MSC3772](https://github.com/matrix-org/matrix-spec-proposals/pull/3772). ([\#14094](https://github.com/matrix-org/synapse/issues/14094)) +- Remove the unstable identifier for [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715). ([\#14106](https://github.com/matrix-org/synapse/issues/14106), [\#14146](https://github.com/matrix-org/synapse/issues/14146)) + + +Internal Changes +---------------- + +- Optimise queries used to get a users rooms during sync. Contributed by Nick @ Beeper (@fizzadar). ([\#13991](https://github.com/matrix-org/synapse/issues/13991)) +- Update authlib from 0.15.5 to 1.1.0. ([\#14006](https://github.com/matrix-org/synapse/issues/14006)) +- Make `parse_server_name` consistent in handling invalid server names. ([\#14007](https://github.com/matrix-org/synapse/issues/14007)) +- Don't repeatedly wake up the same users for batched events. ([\#14033](https://github.com/matrix-org/synapse/issues/14033)) +- Complement test image: capture logs from nginx. ([\#14063](https://github.com/matrix-org/synapse/issues/14063)) +- Don't create noisy Sentry events when a requester drops connection to the metrics server mid-request. ([\#14072](https://github.com/matrix-org/synapse/issues/14072)) +- Run the integration test suites with the asyncio reactor enabled in CI. ([\#14092](https://github.com/matrix-org/synapse/issues/14092)) +- Add debug logs to figure out why an event was filtered out of the client response. ([\#14095](https://github.com/matrix-org/synapse/issues/14095)) +- Indicate what endpoint came back with a JSON response we were unable to parse. ([\#14097](https://github.com/matrix-org/synapse/issues/14097)) +- Break up calls to fetch rooms for many users. Contributed by Nick @ Beeper (@fizzadar). ([\#14109](https://github.com/matrix-org/synapse/issues/14109)) +- Faster joins: prioritise the server we joined by when restarting a partial join resync. ([\#14126](https://github.com/matrix-org/synapse/issues/14126)) +- Cache Rust build cache when building docker images. ([\#14130](https://github.com/matrix-org/synapse/issues/14130)) +- Enable dependabot for Rust dependencies. ([\#14132](https://github.com/matrix-org/synapse/issues/14132)) +- Bump typing-extensions from 4.1.1 to 4.4.0. ([\#14134](https://github.com/matrix-org/synapse/issues/14134)) +- Use the `minimal` Rust profile when building Synapse. ([\#14141](https://github.com/matrix-org/synapse/issues/14141)) +- Remove unused configuration code. ([\#14142](https://github.com/matrix-org/synapse/issues/14142)) +- Prepare for the [`gotestfmt` repository move](https://github.com/GoTestTools/gotestfmt/discussions/46). ([\#14144](https://github.com/matrix-org/synapse/issues/14144)) +- Invalidate rooms for user caches on replicated event, fix sync cache race in synapse workers. Contributed by Nick @ Beeper (@fizzadar). ([\#14155](https://github.com/matrix-org/synapse/issues/14155)) +- Enable url previews when testing with complement. ([\#14198](https://github.com/matrix-org/synapse/issues/14198)) +- When authenticating batched events, check for auth events in batch as well as DB. ([\#14214](https://github.com/matrix-org/synapse/issues/14214)) +- Update CI config to avoid GitHub Actions deprecation warnings. ([\#14216](https://github.com/matrix-org/synapse/issues/14216), [\#14224](https://github.com/matrix-org/synapse/issues/14224)) +- Update dependency requirements to allow building with poetry-core 1.3.2. ([\#14217](https://github.com/matrix-org/synapse/issues/14217)) +- Rename the `cache_memory` extra to `cache-memory`, for compatability with poetry-core 1.3.0 and [PEP 685](https://peps.python.org/pep-0685/). From-source installations using this extra will need to install using the new name. ([\#14221](https://github.com/matrix-org/synapse/issues/14221)) +- Specify dev-dependencies using lower bounds, to reduce the likelihood of a dependabot merge conflict. The lockfile continues to pin to specific versions. ([\#14227](https://github.com/matrix-org/synapse/issues/14227)) + + +Synapse 1.69.0 (2022-10-17) +=========================== + +Please note that legacy Prometheus metric names are now deprecated and will be removed in Synapse 1.73.0. +Server administrators should update their dashboards and alerting rules to avoid using the deprecated metric names. +See the [upgrade notes](https://matrix-org.github.io/synapse/v1.69/upgrade.html#upgrading-to-v1690) for more details. + + +No significant changes since 1.69.0rc4. + + +Synapse 1.69.0rc4 (2022-10-14) +============================== + +Bugfixes +-------- + +- Fix poor performance of the `event_push_backfill_thread_id` background update, which was introduced in Synapse 1.68.0rc1. ([\#14172](https://github.com/matrix-org/synapse/issues/14172), [\#14181](https://github.com/matrix-org/synapse/issues/14181)) + + +Updates to the Docker image +--------------------------- + +- Fix docker build OOMing in CI for arm64 builds. ([\#14173](https://github.com/matrix-org/synapse/issues/14173)) + + +Synapse 1.69.0rc3 (2022-10-12) +============================== + +Bugfixes +-------- + +- Fix an issue with Docker images causing the Rust dependencies to not be pinned correctly. Introduced in v1.68.0 ([\#14129](https://github.com/matrix-org/synapse/issues/14129)) +- Fix a bug introduced in Synapse 1.69.0rc1 which would cause registration replication requests to fail if the worker sending the request is not running Synapse 1.69. ([\#14135](https://github.com/matrix-org/synapse/issues/14135)) +- Fix error in background update when rotating existing notifications. Introduced in v1.69.0rc2. ([\#14138](https://github.com/matrix-org/synapse/issues/14138)) + + +Internal Changes +---------------- + +- Rename the `url_preview` extra to `url-preview`, for compatability with poetry-core 1.3.0 and [PEP 685](https://peps.python.org/pep-0685/). From-source installations using this extra will need to install using the new name. ([\#14085](https://github.com/matrix-org/synapse/issues/14085)) + + +Synapse 1.69.0rc2 (2022-10-06) +============================== + +Deprecations and Removals +------------------------- + +- Deprecate the `generate_short_term_login_token` method in favor of an async `create_login_token` method in the Module API. ([\#13842](https://github.com/matrix-org/synapse/issues/13842)) + + +Internal Changes +---------------- + +- Ensure Synapse v1.69 works with upcoming database changes in v1.70. ([\#14045](https://github.com/matrix-org/synapse/issues/14045)) +- Fix a bug introduced in Synapse v1.68.0 where messages could not be sent in rooms with non-integer `notifications` power level. ([\#14073](https://github.com/matrix-org/synapse/issues/14073)) +- Temporarily pin build-system requirements to workaround an incompatibility with poetry-core 1.3.0. This will be reverted before the v1.69.0 release proper, see [\#14079](https://github.com/matrix-org/synapse/issues/14079). ([\#14080](https://github.com/matrix-org/synapse/issues/14080)) + + +Synapse 1.69.0rc1 (2022-10-04) +============================== + +Features +-------- + +- Allow application services to set the `origin_server_ts` of a state event by providing the query parameter `ts` in [`PUT /_matrix/client/r0/rooms/{roomId}/state/{eventType}/{stateKey}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3roomsroomidstateeventtypestatekey), per [MSC3316](https://github.com/matrix-org/matrix-doc/pull/3316). Contributed by @lukasdenk. ([\#11866](https://github.com/matrix-org/synapse/issues/11866)) +- Allow server admins to require a manual approval process before new accounts can be used (using [MSC3866](https://github.com/matrix-org/matrix-spec-proposals/pull/3866)). ([\#13556](https://github.com/matrix-org/synapse/issues/13556)) +- Exponentially backoff from backfilling the same event over and over. ([\#13635](https://github.com/matrix-org/synapse/issues/13635), [\#13936](https://github.com/matrix-org/synapse/issues/13936)) +- Add cache invalidation across workers to module API. ([\#13667](https://github.com/matrix-org/synapse/issues/13667), [\#13947](https://github.com/matrix-org/synapse/issues/13947)) +- Experimental implementation of [MSC3882](https://github.com/matrix-org/matrix-spec-proposals/pull/3882) to allow an existing device/session to generate a login token for use on a new device/session. ([\#13722](https://github.com/matrix-org/synapse/issues/13722), [\#13868](https://github.com/matrix-org/synapse/issues/13868)) +- Experimental support for thread-specific receipts ([MSC3771](https://github.com/matrix-org/matrix-spec-proposals/pull/3771)). ([\#13782](https://github.com/matrix-org/synapse/issues/13782), [\#13893](https://github.com/matrix-org/synapse/issues/13893), [\#13932](https://github.com/matrix-org/synapse/issues/13932), [\#13937](https://github.com/matrix-org/synapse/issues/13937), [\#13939](https://github.com/matrix-org/synapse/issues/13939)) +- Add experimental support for [MSC3881: Remotely toggle push notifications for another client](https://github.com/matrix-org/matrix-spec-proposals/pull/3881). ([\#13799](https://github.com/matrix-org/synapse/issues/13799), [\#13831](https://github.com/matrix-org/synapse/issues/13831), [\#13860](https://github.com/matrix-org/synapse/issues/13860)) +- Keep track when an event pulled over federation fails its signature check so we can intelligently back-off in the future. ([\#13815](https://github.com/matrix-org/synapse/issues/13815)) +- Improve validation for the unspecced, internal-only `_matrix/client/unstable/add_threepid/msisdn/submit_token` endpoint. ([\#13832](https://github.com/matrix-org/synapse/issues/13832)) +- Faster remote room joins: record _when_ we first partial-join to a room. ([\#13892](https://github.com/matrix-org/synapse/issues/13892)) +- Support a `dir` parameter on the `/relations` endpoint per [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715). ([\#13920](https://github.com/matrix-org/synapse/issues/13920)) +- Ask mail servers receiving emails from Synapse to not send automatic replies (e.g. out-of-office responses). ([\#13957](https://github.com/matrix-org/synapse/issues/13957)) + + +Bugfixes +-------- + +- Send push notifications for invites received over federation. ([\#13719](https://github.com/matrix-org/synapse/issues/13719), [\#14014](https://github.com/matrix-org/synapse/issues/14014)) +- Fix a long-standing bug where typing events would be accepted from remote servers not present in a room. Also fix a bug where incoming typing events would cause other incoming events to get stuck during a fast join. ([\#13830](https://github.com/matrix-org/synapse/issues/13830)) +- Fix a bug introduced in Synapse v1.53.0 where the experimental implementation of [MSC3715](https://github.com/matrix-org/matrix-spec-proposals/pull/3715) would give incorrect results when paginating forward. ([\#13840](https://github.com/matrix-org/synapse/issues/13840)) +- Fix access token leak to logs from proxy agent. ([\#13855](https://github.com/matrix-org/synapse/issues/13855)) +- Fix `have_seen_event` cache not being invalidated after we persist an event which causes inefficiency effects like extra `/state` federation calls. ([\#13863](https://github.com/matrix-org/synapse/issues/13863)) +- Faster room joins: Fix a bug introduced in 1.66.0 where an error would be logged when syncing after joining a room. ([\#13872](https://github.com/matrix-org/synapse/issues/13872)) +- Fix a bug introduced in 1.66.0 where some required fields in the pushrules sent to clients were not present anymore. Contributed by Nico. ([\#13904](https://github.com/matrix-org/synapse/issues/13904)) +- Fix packaging to include `Cargo.lock` in `sdist`. ([\#13909](https://github.com/matrix-org/synapse/issues/13909)) +- Fix a long-standing bug where device updates could cause delays sending out to-device messages over federation. ([\#13922](https://github.com/matrix-org/synapse/issues/13922)) +- Fix a bug introduced in v1.68.0 where Synapse would require `setuptools_rust` at runtime, even though the package is only required at build time. ([\#13952](https://github.com/matrix-org/synapse/issues/13952)) +- Fix a long-standing bug where `POST /_matrix/client/v3/keys/query` requests could result in excessively large SQL queries. ([\#13956](https://github.com/matrix-org/synapse/issues/13956)) +- Fix a performance regression in the `get_users_in_room` database query. Introduced in v1.67.0. ([\#13972](https://github.com/matrix-org/synapse/issues/13972)) +- Fix a bug introduced in v1.68.0 bug where Rust extension wasn't built in `release` mode when using `poetry install`. ([\#14009](https://github.com/matrix-org/synapse/issues/14009)) +- Do not return an unspecified `original_event` field when using the stable `/relations` endpoint. Introduced in Synapse v1.57.0. ([\#14025](https://github.com/matrix-org/synapse/issues/14025)) +- Correctly handle a race with device lists when a remote user leaves during a partial join. ([\#13885](https://github.com/matrix-org/synapse/issues/13885)) +- Correctly handle sending local device list updates to remote servers during a partial join. ([\#13934](https://github.com/matrix-org/synapse/issues/13934)) + + +Improved Documentation +---------------------- + +- Add `worker_main_http_uri` for the worker generator bash script. ([\#13772](https://github.com/matrix-org/synapse/issues/13772)) +- Update URL for the NixOS module for Synapse. ([\#13818](https://github.com/matrix-org/synapse/issues/13818)) +- Fix a mistake in sso_mapping_providers.md: `map_user_attributes` is expected to return `display_name`, not `displayname`. ([\#13836](https://github.com/matrix-org/synapse/issues/13836)) +- Fix a cross-link from the registration admin API to the `registration_shared_secret` configuration documentation. ([\#13870](https://github.com/matrix-org/synapse/issues/13870)) +- Update the man page for the `hash_password` script to correct the default number of bcrypt rounds performed. ([\#13911](https://github.com/matrix-org/synapse/issues/13911), [\#13930](https://github.com/matrix-org/synapse/issues/13930)) +- Emphasize the right reasons when to use `(room_id, event_id)` in a database schema. ([\#13915](https://github.com/matrix-org/synapse/issues/13915)) +- Add instruction to contributing guide for running unit tests in parallel. Contributed by @ashfame. ([\#13928](https://github.com/matrix-org/synapse/issues/13928)) +- Clarify that the `auto_join_rooms` config option can also be used with Space aliases. ([\#13931](https://github.com/matrix-org/synapse/issues/13931)) +- Add some cross references to worker documentation. ([\#13974](https://github.com/matrix-org/synapse/issues/13974)) +- Linkify urls in config documentation. ([\#14003](https://github.com/matrix-org/synapse/issues/14003)) + + +Deprecations and Removals +------------------------- + +- Remove the `complete_sso_login` method from the Module API which was deprecated in Synapse 1.13.0. ([\#13843](https://github.com/matrix-org/synapse/issues/13843)) +- Announce that legacy metric names are deprecated, will be turned off by default in Synapse v1.71.0 and removed altogether in Synapse v1.73.0. See the upgrade notes for more information. ([\#14024](https://github.com/matrix-org/synapse/issues/14024)) + + +Internal Changes +---------------- + +- Speed up creation of DM rooms. ([\#13487](https://github.com/matrix-org/synapse/issues/13487), [\#13800](https://github.com/matrix-org/synapse/issues/13800)) +- Port push rules to using Rust. ([\#13768](https://github.com/matrix-org/synapse/issues/13768), [\#13838](https://github.com/matrix-org/synapse/issues/13838), [\#13889](https://github.com/matrix-org/synapse/issues/13889)) +- Optimise get rooms for user calls. Contributed by Nick @ Beeper (@fizzadar). ([\#13787](https://github.com/matrix-org/synapse/issues/13787)) +- Update the script which makes full schema dumps. ([\#13792](https://github.com/matrix-org/synapse/issues/13792)) +- Use shared methods for cache invalidation when persisting events, remove duplicate codepaths. Contributed by Nick @ Beeper (@fizzadar). ([\#13796](https://github.com/matrix-org/synapse/issues/13796)) +- Improve the `synapse.api.auth.Auth` mock used in unit tests. ([\#13809](https://github.com/matrix-org/synapse/issues/13809)) +- Faster Remote Room Joins: tell remote homeservers that we are unable to authorise them if they query a room which has partial state on our server. ([\#13823](https://github.com/matrix-org/synapse/issues/13823)) +- Carry IdP Session IDs through user-mapping sessions. ([\#13839](https://github.com/matrix-org/synapse/issues/13839)) +- Fix the release script not publishing binary wheels. ([\#13850](https://github.com/matrix-org/synapse/issues/13850)) +- Raise issue if complement fails with latest deps. ([\#13859](https://github.com/matrix-org/synapse/issues/13859)) +- Correct the comments in the complement dockerfile. ([\#13867](https://github.com/matrix-org/synapse/issues/13867)) +- Create a new snapshot of the database schema. ([\#13873](https://github.com/matrix-org/synapse/issues/13873)) +- Faster room joins: Send device list updates to most servers in rooms with partial state. ([\#13874](https://github.com/matrix-org/synapse/issues/13874), [\#14013](https://github.com/matrix-org/synapse/issues/14013)) +- Add comments to the Prometheus recording rules to make it clear which set of rules you need for Grafana or Prometheus Console. ([\#13876](https://github.com/matrix-org/synapse/issues/13876)) +- Only pull relevant backfill points from the database based on the current depth and limit (instead of all) every time we want to `/backfill`. ([\#13879](https://github.com/matrix-org/synapse/issues/13879)) +- Faster room joins: Avoid waiting for full state when processing `/keys/changes` requests. ([\#13888](https://github.com/matrix-org/synapse/issues/13888)) +- Improve backfill robustness by trying more servers when we get a `4xx` error back. ([\#13890](https://github.com/matrix-org/synapse/issues/13890)) +- Fix mypy errors with canonicaljson 1.6.3. ([\#13905](https://github.com/matrix-org/synapse/issues/13905)) +- Faster remote room joins: correctly handle remote device list updates during a partial join. ([\#13913](https://github.com/matrix-org/synapse/issues/13913)) +- Complement image: propagate SIGTERM to all workers. ([\#13914](https://github.com/matrix-org/synapse/issues/13914)) +- Update an innaccurate comment in Synapse's upsert database helper. ([\#13924](https://github.com/matrix-org/synapse/issues/13924)) +- Update mypy (0.950 -> 0.981) and mypy-zope (0.3.7 -> 0.3.11). ([\#13925](https://github.com/matrix-org/synapse/issues/13925), [\#13993](https://github.com/matrix-org/synapse/issues/13993)) +- Use dedicated `get_local_users_in_room(room_id)` function to find local users when calculating users to copy over during a room upgrade. ([\#13960](https://github.com/matrix-org/synapse/issues/13960)) +- Refactor language in user directory `_track_user_joined_room` code to make it more clear that we use both local and remote users. ([\#13966](https://github.com/matrix-org/synapse/issues/13966)) +- Revert catch-all exceptions being recorded as event pull attempt failures (only handle what we know about). ([\#13969](https://github.com/matrix-org/synapse/issues/13969)) +- Speed up calculating push actions in large rooms. ([\#13973](https://github.com/matrix-org/synapse/issues/13973), [\#13992](https://github.com/matrix-org/synapse/issues/13992)) +- Enable update notifications from Github's dependabot. ([\#13976](https://github.com/matrix-org/synapse/issues/13976)) +- Prototype a workflow to automatically add changelogs to dependabot PRs. ([\#13998](https://github.com/matrix-org/synapse/issues/13998), [\#14011](https://github.com/matrix-org/synapse/issues/14011), [\#14017](https://github.com/matrix-org/synapse/issues/14017), [\#14021](https://github.com/matrix-org/synapse/issues/14021), [\#14027](https://github.com/matrix-org/synapse/issues/14027)) +- Fix type annotations to be compatible with new annotations in development versions of twisted. ([\#14012](https://github.com/matrix-org/synapse/issues/14012)) +- Clear out stale entries in `event_push_actions_staging` table. ([\#14020](https://github.com/matrix-org/synapse/issues/14020)) +- Bump versions of GitHub actions. ([\#13978](https://github.com/matrix-org/synapse/issues/13978), [\#13979](https://github.com/matrix-org/synapse/issues/13979), [\#13980](https://github.com/matrix-org/synapse/issues/13980), [\#13982](https://github.com/matrix-org/synapse/issues/13982), [\#14015](https://github.com/matrix-org/synapse/issues/14015), [\#14019](https://github.com/matrix-org/synapse/issues/14019), [\#14022](https://github.com/matrix-org/synapse/issues/14022), [\#14023](https://github.com/matrix-org/synapse/issues/14023)) + + +Synapse 1.68.0 (2022-09-27) +=========================== + +Please note that Synapse will now refuse to start if configured to use a version of SQLite older than 3.27. + +In addition, please note that installing Synapse from a source checkout now requires a recent Rust compiler. +Those using packages will not be affected. On most platforms, installing with `pip install matrix-synapse` will not be affected. +See the [upgrade notes](https://matrix-org.github.io/synapse/v1.68/upgrade.html#upgrading-to-v1680). + +Bugfixes +-------- + +- Fix packaging to include `Cargo.lock` in `sdist`. ([\#13909](https://github.com/matrix-org/synapse/issues/13909)) + + +Synapse 1.68.0rc2 (2022-09-23) +============================== + +Bugfixes +-------- + +- Fix building from packaged sdist. Broken in v1.68.0rc1. ([\#13866](https://github.com/matrix-org/synapse/issues/13866)) + + +Internal Changes +---------------- + +- Fix the release script not publishing binary wheels. ([\#13850](https://github.com/matrix-org/synapse/issues/13850)) +- Lower minimum supported rustc version to 1.58.1. ([\#13857](https://github.com/matrix-org/synapse/issues/13857)) +- Lock Rust dependencies' versions. ([\#13858](https://github.com/matrix-org/synapse/issues/13858)) + + +Synapse 1.68.0rc1 (2022-09-20) +============================== + +Features +-------- + +- Keep track of when we fail to process a pulled event over federation so we can intelligently back off in the future. ([\#13589](https://github.com/matrix-org/synapse/issues/13589), [\#13814](https://github.com/matrix-org/synapse/issues/13814)) +- Add an [admin API endpoint to fetch messages within a particular window of time](https://matrix-org.github.io/synapse/v1.68/admin_api/rooms.html#room-messages-api). ([\#13672](https://github.com/matrix-org/synapse/issues/13672)) +- Add an [admin API endpoint to find a user based on their external ID in an auth provider](https://matrix-org.github.io/synapse/v1.68/admin_api/user_admin_api.html#find-a-user-based-on-their-id-in-an-auth-provider). ([\#13810](https://github.com/matrix-org/synapse/issues/13810)) +- Cancel the processing of key query requests when they time out. ([\#13680](https://github.com/matrix-org/synapse/issues/13680)) +- Improve validation of request bodies for the following client-server API endpoints: [`/account/3pid/msisdn/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidmsisdnrequesttoken), [`/org.matrix.msc3720/account_status`](https://github.com/matrix-org/matrix-spec-proposals/blob/babolivier/user_status/proposals/3720-account-status.md#post-_matrixclientv1account_status), [`/account/3pid/add`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidadd), [`/account/3pid/bind`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidbind), [`/account/3pid/delete`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3piddelete) and [`/account/3pid/unbind`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidunbind). ([\#13687](https://github.com/matrix-org/synapse/issues/13687), [\#13736](https://github.com/matrix-org/synapse/issues/13736)) +- Document the timestamp when a user accepts the consent, if [consent tracking](https://matrix-org.github.io/synapse/latest/consent_tracking.html) is used. ([\#13741](https://github.com/matrix-org/synapse/issues/13741)) +- Add a `listeners[x].request_id_header` configuration option to specify which request header to extract and use as the request ID in order to correlate requests from a reverse proxy. ([\#13801](https://github.com/matrix-org/synapse/issues/13801)) + + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`). ([\#13506](https://github.com/matrix-org/synapse/issues/13506)) +- Fix a long-standing bug where previously rejected events could end up in room state because they pass auth checks given the current state of the room. ([\#13723](https://github.com/matrix-org/synapse/issues/13723)) +- Fix a long-standing bug where Synapse fails to start if a signing key file contains an empty line. ([\#13738](https://github.com/matrix-org/synapse/issues/13738)) +- Fix a long-standing bug where Synapse would fail to handle malformed user IDs or room aliases gracefully in certain cases. ([\#13746](https://github.com/matrix-org/synapse/issues/13746)) +- Fix a long-standing bug where device lists would remain cached when remote users left and rejoined the last room shared with the local homeserver. ([\#13749](https://github.com/matrix-org/synapse/issues/13749), [\#13826](https://github.com/matrix-org/synapse/issues/13826)) +- Fix a long-standing bug that could cause stale caches in some rare cases on the first startup of Synapse with replication. ([\#13766](https://github.com/matrix-org/synapse/issues/13766)) +- Fix a long-standing spec compliance bug where Synapse would accept a trailing slash on the end of `/get_missing_events` federation requests. ([\#13789](https://github.com/matrix-org/synapse/issues/13789)) +- Delete associated data from `event_failed_pull_attempts`, `insertion_events`, `insertion_event_extremities`, `insertion_event_extremities`, `insertion_event_extremities` when purging the room. ([\#13825](https://github.com/matrix-org/synapse/issues/13825)) + + +Improved Documentation +---------------------- + +- Note that `libpq` is required on ARM-based Macs. ([\#13480](https://github.com/matrix-org/synapse/issues/13480)) +- Fix a mistake in the config manual introduced in Synapse 1.22.0: the `event_cache_size` _is_ scaled by `caches.global_factor`. ([\#13726](https://github.com/matrix-org/synapse/issues/13726)) +- Fix a typo in the documentation for the login ratelimiting configuration. ([\#13727](https://github.com/matrix-org/synapse/issues/13727)) +- Define Synapse's compatability policy for SQLite versions. ([\#13728](https://github.com/matrix-org/synapse/issues/13728)) +- Add docs for the common fix of deleting the `matrix_synapse.egg-info/` directory for fixing Python dependency problems. ([\#13785](https://github.com/matrix-org/synapse/issues/13785)) +- Update request log format documentation to mention the format used when the authenticated user is controlling another user. ([\#13794](https://github.com/matrix-org/synapse/issues/13794)) + + +Deprecations and Removals +------------------------- + +- Synapse will now refuse to start if configured to use SQLite < 3.27. ([\#13760](https://github.com/matrix-org/synapse/issues/13760)) +- Don't include redundant `prev_state` in new events. Contributed by Denis Kariakin (@dakariakin). ([\#13791](https://github.com/matrix-org/synapse/issues/13791)) + + +Internal Changes +---------------- + +- Add a stub Rust crate. ([\#12595](https://github.com/matrix-org/synapse/issues/12595), [\#13734](https://github.com/matrix-org/synapse/issues/13734), [\#13735](https://github.com/matrix-org/synapse/issues/13735), [\#13743](https://github.com/matrix-org/synapse/issues/13743), [\#13763](https://github.com/matrix-org/synapse/issues/13763), [\#13769](https://github.com/matrix-org/synapse/issues/13769), [\#13778](https://github.com/matrix-org/synapse/issues/13778)) +- Bump the minimum dependency of `matrix_common` to 1.3.0 to make use of the `MXCUri` class. Use `MXCUri` to simplify media retention test code. ([\#13162](https://github.com/matrix-org/synapse/issues/13162)) +- Add and populate the `event_stream_ordering` column on the `receipts` table for future optimisation of push action processing. Contributed by Nick @ Beeper (@fizzadar). ([\#13703](https://github.com/matrix-org/synapse/issues/13703)) +- Rename the `EventFormatVersions` enum values so that they line up with room version numbers. ([\#13706](https://github.com/matrix-org/synapse/issues/13706)) +- Update trial old deps CI to use Poetry 1.2.0. ([\#13707](https://github.com/matrix-org/synapse/issues/13707), [\#13725](https://github.com/matrix-org/synapse/issues/13725)) +- Add experimental configuration option to allow disabling legacy Prometheus metric names. ([\#13714](https://github.com/matrix-org/synapse/issues/13714), [\#13717](https://github.com/matrix-org/synapse/issues/13717), [\#13718](https://github.com/matrix-org/synapse/issues/13718)) +- Fix typechecking with latest types-jsonschema. ([\#13724](https://github.com/matrix-org/synapse/issues/13724)) +- Strip number suffix from instance name to consolidate services that traces are spread over. ([\#13729](https://github.com/matrix-org/synapse/issues/13729)) +- Instrument `get_metadata_for_events` for understandable traces in Jaeger. ([\#13730](https://github.com/matrix-org/synapse/issues/13730)) +- Remove old queries to join room memberships to current state events. Contributed by Nick @ Beeper (@fizzadar). ([\#13745](https://github.com/matrix-org/synapse/issues/13745)) +- Avoid raising an error due to malformed user IDs in `get_current_hosts_in_room`. Malformed user IDs cannot currently join a room, so this error would not be hit. ([\#13748](https://github.com/matrix-org/synapse/issues/13748)) +- Update the docstrings for `get_users_in_room` and `get_current_hosts_in_room` to explain the impact of partial state. ([\#13750](https://github.com/matrix-org/synapse/issues/13750)) +- Use an additional database query when persisting receipts. ([\#13752](https://github.com/matrix-org/synapse/issues/13752)) +- Preparatory work for storing thread IDs for notifications and receipts. ([\#13753](https://github.com/matrix-org/synapse/issues/13753)) +- Re-type hint some collections as read-only. ([\#13754](https://github.com/matrix-org/synapse/issues/13754)) +- Remove unused Prometheus recording rules from `synapse-v2.rules` and add comments describing where the rest are used. ([\#13756](https://github.com/matrix-org/synapse/issues/13756)) +- Add a check for editable installs if the Rust library needs rebuilding. ([\#13759](https://github.com/matrix-org/synapse/issues/13759)) +- Tag traces with the instance name to be able to easily jump into the right logs and filter traces by instance. ([\#13761](https://github.com/matrix-org/synapse/issues/13761)) +- Concurrently fetch room push actions when calculating badge counts. Contributed by Nick @ Beeper (@fizzadar). ([\#13765](https://github.com/matrix-org/synapse/issues/13765)) +- Update the script which makes full schema dumps. ([\#13770](https://github.com/matrix-org/synapse/issues/13770)) +- Deduplicate `is_server_notices_room`. ([\#13780](https://github.com/matrix-org/synapse/issues/13780)) +- Simplify the dependency DAG in the tests workflow. ([\#13784](https://github.com/matrix-org/synapse/issues/13784)) +- Remove an old, incorrect migration file. ([\#13788](https://github.com/matrix-org/synapse/issues/13788)) +- Remove unused method in `synapse.api.auth.Auth`. ([\#13795](https://github.com/matrix-org/synapse/issues/13795)) +- Fix a memory leak when running the unit tests. ([\#13798](https://github.com/matrix-org/synapse/issues/13798)) +- Use partial indices on SQLite. ([\#13802](https://github.com/matrix-org/synapse/issues/13802)) +- Check that portdb generates the same postgres schema as that in the source tree. ([\#13808](https://github.com/matrix-org/synapse/issues/13808)) +- Fix Docker build when Rust .so has been built locally first. ([\#13811](https://github.com/matrix-org/synapse/issues/13811)) +- Complement: Initialise the Postgres database directly inside the target image instead of the base Postgres image to fix building using Buildah. ([\#13819](https://github.com/matrix-org/synapse/issues/13819)) +- Support providing an index predicate clause when doing upserts. ([\#13822](https://github.com/matrix-org/synapse/issues/13822)) +- Minor speedups to linting in CI. ([\#13827](https://github.com/matrix-org/synapse/issues/13827)) + + +Synapse 1.67.0 (2022-09-13) +=========================== + +This release removes using the deprecated direct TCP replication configuration +for workers. Server admins should use Redis instead. See the [upgrade +notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670). + +The minimum version of `poetry` supported for managing source checkouts is now +1.2.0. + +**Notice:** from the next major release (1.68.0) installing Synapse from a source +checkout will require a recent Rust compiler. Those using packages or +`pip install matrix-synapse` will not be affected. See the [upgrade +notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670). + +**Notice:** from the next major release (1.68.0), running Synapse with a SQLite +database will require SQLite version 3.27.0 or higher. (The [current minimum + version is SQLite 3.22.0](https://github.com/matrix-org/synapse/blob/release-v1.67/synapse/storage/engines/sqlite.py#L69-L78).) +See [#12983](https://github.com/matrix-org/synapse/issues/12983) and the [upgrade notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670) for more details. + + +No significant changes since 1.67.0rc1. + + +Synapse 1.67.0rc1 (2022-09-06) +============================== + +Features +-------- + +- Support setting the registration shared secret in a file, via a new `registration_shared_secret_path` configuration option. ([\#13614](https://github.com/matrix-org/synapse/issues/13614)) +- Change the default startup behaviour so that any missing "additional" configuration files (signing key, etc) are generated automatically. ([\#13615](https://github.com/matrix-org/synapse/issues/13615)) +- Improve performance of sending messages in rooms with thousands of local users. ([\#13634](https://github.com/matrix-org/synapse/issues/13634)) + + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.13 where the [List Rooms admin API](https://matrix-org.github.io/synapse/develop/admin_api/rooms.html#list-room-api) would return integers instead of booleans for the `federatable` and `public` fields when using a Sqlite database. ([\#13509](https://github.com/matrix-org/synapse/issues/13509)) +- Fix bug that user cannot `/forget` rooms after the last member has left the room. ([\#13546](https://github.com/matrix-org/synapse/issues/13546)) +- Faster Room Joins: fix `/make_knock` blocking indefinitely when the room in question is a partial-stated room. ([\#13583](https://github.com/matrix-org/synapse/issues/13583)) +- Fix loading the current stream position behind the actual position. ([\#13585](https://github.com/matrix-org/synapse/issues/13585)) +- Fix a longstanding bug in `register_new_matrix_user` which meant it was always necessary to explicitly give a server URL. ([\#13616](https://github.com/matrix-org/synapse/issues/13616)) +- Fix the running of [MSC1763](https://github.com/matrix-org/matrix-spec-proposals/pull/1763) retention purge_jobs in deployments with background jobs running on a worker by forcing them back onto the main worker. Contributed by Brad @ Beeper. ([\#13632](https://github.com/matrix-org/synapse/issues/13632)) +- Fix a long-standing bug that downloaded media for URL previews was not deleted while database background updates were running. ([\#13657](https://github.com/matrix-org/synapse/issues/13657)) +- Fix [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to return the correct next event when the events have the same timestamp. ([\#13658](https://github.com/matrix-org/synapse/issues/13658)) +- Fix bug where we wedge media plugins if clients disconnect early. Introduced in v1.22.0. ([\#13660](https://github.com/matrix-org/synapse/issues/13660)) +- Fix a long-standing bug which meant that keys for unwhitelisted servers were not returned by `/_matrix/key/v2/query`. ([\#13683](https://github.com/matrix-org/synapse/issues/13683)) +- Fix a bug introduced in Synapse 1.20.0 that would cause the unstable unread counts from [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654) to be calculated even if the feature is disabled. ([\#13694](https://github.com/matrix-org/synapse/issues/13694)) + + +Updates to the Docker image +--------------------------- + +- Update docker image to use a stable version of poetry. ([\#13688](https://github.com/matrix-org/synapse/issues/13688)) + + +Improved Documentation +---------------------- + +- Improve the description of the ["chain cover index"](https://matrix-org.github.io/synapse/latest/auth_chain_difference_algorithm.html) used internally by Synapse. ([\#13602](https://github.com/matrix-org/synapse/issues/13602)) +- Document how ["monthly active users"](https://matrix-org.github.io/synapse/latest/usage/administration/monthly_active_users.html) is calculated and used. ([\#13617](https://github.com/matrix-org/synapse/issues/13617)) +- Improve documentation around user registration. ([\#13640](https://github.com/matrix-org/synapse/issues/13640)) +- Remove documentation of legacy `frontend_proxy` worker app. ([\#13645](https://github.com/matrix-org/synapse/issues/13645)) +- Clarify documentation that HTTP replication traffic can be protected with a shared secret. ([\#13656](https://github.com/matrix-org/synapse/issues/13656)) +- Remove unintentional colons from [config manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html) headers. ([\#13665](https://github.com/matrix-org/synapse/issues/13665)) +- Update docs to make enabling metrics more clear. ([\#13678](https://github.com/matrix-org/synapse/issues/13678)) +- Clarify `(room_id, event_id)` global uniqueness and how we should scope our database schemas. ([\#13701](https://github.com/matrix-org/synapse/issues/13701)) + + +Deprecations and Removals +------------------------- + +- Drop support for calling `/_matrix/client/v3/rooms/{roomId}/invite` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu. ([\#13241](https://github.com/matrix-org/synapse/issues/13241)) +- Remove redundant `_get_joined_users_from_context` cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13569](https://github.com/matrix-org/synapse/issues/13569)) +- Remove the ability to use direct TCP replication with workers. Direct TCP replication was deprecated in Synapse 1.18.0. Workers now require using Redis. ([\#13647](https://github.com/matrix-org/synapse/issues/13647)) +- Remove support for unstable [private read receipts](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). ([\#13653](https://github.com/matrix-org/synapse/issues/13653), [\#13692](https://github.com/matrix-org/synapse/issues/13692)) + + +Internal Changes +---------------- + +- Extend the release script to wait for GitHub Actions to finish and to be usable as a guide for the whole process. ([\#13483](https://github.com/matrix-org/synapse/issues/13483)) +- Add experimental configuration option to allow disabling legacy Prometheus metric names. ([\#13540](https://github.com/matrix-org/synapse/issues/13540)) +- Cache user IDs instead of profiles to reduce cache memory usage. Contributed by Nick @ Beeper (@fizzadar). ([\#13573](https://github.com/matrix-org/synapse/issues/13573), [\#13600](https://github.com/matrix-org/synapse/issues/13600)) +- Optimize how Synapse calculates domains to fetch from during backfill. ([\#13575](https://github.com/matrix-org/synapse/issues/13575)) +- Comment about a better future where we can get the state diff between two events. ([\#13586](https://github.com/matrix-org/synapse/issues/13586)) +- Instrument `_check_sigs_and_hash_and_fetch` to trace time spent in child concurrent calls for understandable traces in Jaeger. ([\#13588](https://github.com/matrix-org/synapse/issues/13588)) +- Improve performance of `@cachedList`. ([\#13591](https://github.com/matrix-org/synapse/issues/13591)) +- Minor speed up of fetching large numbers of push rules. ([\#13592](https://github.com/matrix-org/synapse/issues/13592)) +- Optimise push action fetching queries. Contributed by Nick @ Beeper (@fizzadar). ([\#13597](https://github.com/matrix-org/synapse/issues/13597)) +- Rename `event_map` to `unpersisted_events` when computing the auth differences. ([\#13603](https://github.com/matrix-org/synapse/issues/13603)) +- Refactor `get_users_in_room(room_id)` mis-use with dedicated `get_current_hosts_in_room(room_id)` function. ([\#13605](https://github.com/matrix-org/synapse/issues/13605)) +- Use dedicated `get_local_users_in_room(room_id)` function to find local users when calculating `join_authorised_via_users_server` of a `/make_join` request. ([\#13606](https://github.com/matrix-org/synapse/issues/13606)) +- Refactor `get_users_in_room(room_id)` mis-use to lookup single local user with dedicated `check_local_user_in_room(...)` function. ([\#13608](https://github.com/matrix-org/synapse/issues/13608)) +- Drop unused column `application_services_state.last_txn`. ([\#13627](https://github.com/matrix-org/synapse/issues/13627)) +- Improve readability of Complement CI logs by printing failure results last. ([\#13639](https://github.com/matrix-org/synapse/issues/13639)) +- Generalise the `@cancellable` annotation so it can be used on functions other than just servlet methods. ([\#13662](https://github.com/matrix-org/synapse/issues/13662)) +- Introduce a `CommonUsageMetrics` class to share some usage metrics between the Prometheus exporter and the phone home stats. ([\#13671](https://github.com/matrix-org/synapse/issues/13671)) +- Add some logging to help track down #13444. ([\#13679](https://github.com/matrix-org/synapse/issues/13679)) +- Update poetry lock file for v1.2.0. ([\#13689](https://github.com/matrix-org/synapse/issues/13689)) +- Add cache to `is_partial_state_room`. ([\#13693](https://github.com/matrix-org/synapse/issues/13693)) +- Update the Grafana dashboard that is included with Synapse in the `contrib` directory. ([\#13697](https://github.com/matrix-org/synapse/issues/13697)) +- Only run trial CI on all python versions on non-PRs. ([\#13698](https://github.com/matrix-org/synapse/issues/13698)) +- Fix typechecking with latest types-jsonschema. ([\#13712](https://github.com/matrix-org/synapse/issues/13712)) +- Reduce number of CI checks we run for PRs. ([\#13713](https://github.com/matrix-org/synapse/issues/13713)) + + +Synapse 1.66.0 (2022-08-31) +=========================== + +No significant changes since 1.66.0rc2. + +This release removes the ability for homeservers to delegate email ownership +verification and password reset confirmation to identity servers. This removal +was originally planned for Synapse 1.64, but was later deferred until now. See +the [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details. + +Deployments with multiple workers should note that the direct TCP replication +configuration was deprecated in Synapse 1.18.0 and will be removed in Synapse +v1.67.0. In particular, the TCP `replication` [listener](https://matrix-org.github.io/synapse/v1.66/usage/configuration/config_documentation.html#listeners) +type (not to be confused with the `replication` resource on the `http` listener +type) and the `worker_replication_port` config option will be removed . + +To migrate to Redis, add the [`redis` config](https://matrix-org.github.io/synapse/v1.66/workers.html#shared-configuration), +then remove the TCP `replication` listener from config of the master and +`worker_replication_port` from worker config. Note that a HTTP listener with a +`replication` resource is still required. See the +[worker documentation](https://matrix-org.github.io/synapse/v1.66/workers.html) +for more details. + + +Synapse 1.66.0rc2 (2022-08-30) +============================== + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.66.0rc1 where the new rate limit metrics were misreported (`synapse_rate_limit_sleep_affected_hosts`, `synapse_rate_limit_reject_affected_hosts`). ([\#13649](https://github.com/matrix-org/synapse/issues/13649)) + + +Synapse 1.66.0rc1 (2022-08-23) +============================== + +Features +-------- + +- Improve validation of request bodies for the following client-server API endpoints: [`/account/password`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountpassword), [`/account/password/email/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountpasswordemailrequesttoken), [`/account/deactivate`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountdeactivate) and [`/account/3pid/email/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidemailrequesttoken). ([\#13188](https://github.com/matrix-org/synapse/issues/13188), [\#13563](https://github.com/matrix-org/synapse/issues/13563)) +- Add forgotten status to [Room Details Admin API](https://matrix-org.github.io/synapse/latest/admin_api/rooms.html#room-details-api). ([\#13503](https://github.com/matrix-org/synapse/issues/13503)) +- Add an experimental implementation for [MSC3852 (Expose user agents on `Device`)](https://github.com/matrix-org/matrix-spec-proposals/pull/3852). ([\#13549](https://github.com/matrix-org/synapse/issues/13549)) +- Add `org.matrix.msc2716v4` experimental room version with updated content fields. Part of [MSC2716 (Importing history)](https://github.com/matrix-org/matrix-spec-proposals/pull/2716). ([\#13551](https://github.com/matrix-org/synapse/issues/13551)) +- Add support for compression to federation responses. ([\#13537](https://github.com/matrix-org/synapse/issues/13537)) +- Improve performance of sending messages in rooms with thousands of local users. ([\#13522](https://github.com/matrix-org/synapse/issues/13522), [\#13547](https://github.com/matrix-org/synapse/issues/13547)) + + +Bugfixes +-------- + +- Faster room joins: make `/joined_members` block whilst the room is partial stated. ([\#13514](https://github.com/matrix-org/synapse/issues/13514)) +- Fix a bug introduced in Synapse 1.21.0 where the [`/event_reports` Admin API](https://matrix-org.github.io/synapse/develop/admin_api/event_reports.html) could return a total count which was larger than the number of results you can actually query for. ([\#13525](https://github.com/matrix-org/synapse/issues/13525)) +- Fix a bug introduced in Synapse 1.52.0 where sending server notices fails if `max_avatar_size` or `allowed_avatar_mimetypes` is set and not `system_mxid_avatar_url`. ([\#13566](https://github.com/matrix-org/synapse/issues/13566)) +- Fix a bug where the `opentracing.force_tracing_for_users` config option would not apply to [`/sendToDevice`](https://spec.matrix.org/v1.3/client-server-api/#put_matrixclientv3sendtodeviceeventtypetxnid) and [`/keys/upload`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3keysupload) requests. ([\#13574](https://github.com/matrix-org/synapse/issues/13574)) + + +Improved Documentation +---------------------- + +- Add `openssl` example for generating registration HMAC digest. ([\#13472](https://github.com/matrix-org/synapse/issues/13472)) +- Tidy up Synapse's README. ([\#13491](https://github.com/matrix-org/synapse/issues/13491)) +- Document that event purging related to the `redaction_retention_period` config option is executed only every 5 minutes. ([\#13492](https://github.com/matrix-org/synapse/issues/13492)) +- Add a warning to retention documentation regarding the possibility of database corruption. ([\#13497](https://github.com/matrix-org/synapse/issues/13497)) +- Document that the `DOCKER_BUILDKIT=1` flag is needed to build the docker image. ([\#13515](https://github.com/matrix-org/synapse/issues/13515)) +- Add missing links in `user_consent` section of configuration manual. ([\#13536](https://github.com/matrix-org/synapse/issues/13536)) +- Fix the doc and some warnings that were referring to the nonexistent `custom_templates_directory` setting (instead of `custom_template_directory`). ([\#13538](https://github.com/matrix-org/synapse/issues/13538)) + + +Deprecations and Removals +------------------------- + +- Remove the ability for homeservers to delegate email ownership verification + and password reset confirmation to identity servers. See [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details. + +Internal Changes +---------------- + +### Faster room joins + +- Update the rejected state of events during de-partial-stating. ([\#13459](https://github.com/matrix-org/synapse/issues/13459)) +- Avoid blocking lazy-loading `/sync`s during partial joins due to remote memberships. Pull remote memberships from auth events instead of the room state. ([\#13477](https://github.com/matrix-org/synapse/issues/13477)) +- Refuse to start when faster joins is enabled on a deployment with workers, since worker configurations are not currently supported. ([\#13531](https://github.com/matrix-org/synapse/issues/13531)) + +### Metrics and tracing + +- Allow use of both `@trace` and `@tag_args` stacked on the same function. ([\#13453](https://github.com/matrix-org/synapse/issues/13453)) +- Instrument the federation/backfill part of `/messages` for understandable traces in Jaeger. ([\#13489](https://github.com/matrix-org/synapse/issues/13489)) +- Instrument `FederationStateIdsServlet` (`/state_ids`) for understandable traces in Jaeger. ([\#13499](https://github.com/matrix-org/synapse/issues/13499), [\#13554](https://github.com/matrix-org/synapse/issues/13554)) +- Track HTTP response times over 10 seconds from `/messages` (`synapse_room_message_list_rest_servlet_response_time_seconds`). ([\#13533](https://github.com/matrix-org/synapse/issues/13533)) +- Add metrics to track how the rate limiter is affecting requests (sleep/reject). ([\#13534](https://github.com/matrix-org/synapse/issues/13534), [\#13541](https://github.com/matrix-org/synapse/issues/13541)) +- Add metrics to time how long it takes us to do backfill processing (`synapse_federation_backfill_processing_before_time_seconds`, `synapse_federation_backfill_processing_after_time_seconds`). ([\#13535](https://github.com/matrix-org/synapse/issues/13535), [\#13584](https://github.com/matrix-org/synapse/issues/13584)) +- Add metrics to track rate limiter queue timing (`synapse_rate_limit_queue_wait_time_seconds`). ([\#13544](https://github.com/matrix-org/synapse/issues/13544)) +- Update metrics to track `/messages` response time by room size. ([\#13545](https://github.com/matrix-org/synapse/issues/13545)) + +### Everything else + +- Refactor methods in `synapse.api.auth.Auth` to use `Requester` objects everywhere instead of user IDs. ([\#13024](https://github.com/matrix-org/synapse/issues/13024)) +- Clean-up tests for notifications. ([\#13471](https://github.com/matrix-org/synapse/issues/13471)) +- Add some miscellaneous comments to document sync, especially around `compute_state_delta`. ([\#13474](https://github.com/matrix-org/synapse/issues/13474)) +- Use literals in place of `HTTPStatus` constants in tests. ([\#13479](https://github.com/matrix-org/synapse/issues/13479), [\#13488](https://github.com/matrix-org/synapse/issues/13488)) +- Add comments about how event push actions are rotated. ([\#13485](https://github.com/matrix-org/synapse/issues/13485)) +- Modify HTML template content to better support mobile devices' screen sizes. ([\#13493](https://github.com/matrix-org/synapse/issues/13493)) +- Add a linter script which will reject non-strict types in Pydantic models. ([\#13502](https://github.com/matrix-org/synapse/issues/13502)) +- Reduce the number of tests using legacy TCP replication. ([\#13543](https://github.com/matrix-org/synapse/issues/13543)) +- Allow specifying additional request fields when using the `HomeServerTestCase.login` helper method. ([\#13549](https://github.com/matrix-org/synapse/issues/13549)) +- Make `HomeServerTestCase` load any configured homeserver modules automatically. ([\#13558](https://github.com/matrix-org/synapse/issues/13558)) + + +Synapse 1.65.0 (2022-08-16) +=========================== + +No significant changes since 1.65.0rc2. + + +Synapse 1.65.0rc2 (2022-08-11) +============================== + +Internal Changes +---------------- + +- Revert 'Remove the unspecced `room_id` field in the `/hierarchy` response. ([\#13365](https://github.com/matrix-org/synapse/issues/13365))' to give more time for clients to update. ([\#13501](https://github.com/matrix-org/synapse/issues/13501)) + + +Synapse 1.65.0rc1 (2022-08-09) +============================== + +Features +-------- + +- Add support for stable prefixes for [MSC2285 (private read receipts)](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). ([\#13273](https://github.com/matrix-org/synapse/issues/13273)) +- Add new unstable error codes `ORG.MATRIX.MSC3848.ALREADY_JOINED`, `ORG.MATRIX.MSC3848.NOT_JOINED`, and `ORG.MATRIX.MSC3848.INSUFFICIENT_POWER` described in [MSC3848](https://github.com/matrix-org/matrix-spec-proposals/pull/3848). ([\#13343](https://github.com/matrix-org/synapse/issues/13343)) +- Use stable prefixes for [MSC3827](https://github.com/matrix-org/matrix-spec-proposals/pull/3827). ([\#13370](https://github.com/matrix-org/synapse/issues/13370)) +- Add a new module API method to translate a room alias into a room ID. ([\#13428](https://github.com/matrix-org/synapse/issues/13428)) +- Add a new module API method to create a room. ([\#13429](https://github.com/matrix-org/synapse/issues/13429)) +- Add remote join capability to the module API's `update_room_membership` method (in a backwards compatible manner). ([\#13441](https://github.com/matrix-org/synapse/issues/13441)) + + +Bugfixes +-------- + +- Update the version of the LDAP3 auth provider module included in the `matrixdotorg/synapse` DockerHub images and the Debian packages hosted on packages.matrix.org to 0.2.2. This version fixes a regression in the module. ([\#13470](https://github.com/matrix-org/synapse/issues/13470)) +- Fix a bug introduced in Synapse 1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`) (this was reverted in v1.65.0rc2, see changelog notes above). ([\#13365](https://github.com/matrix-org/synapse/issues/13365)) +- Fix a bug introduced in Synapse 0.24.0 that would respond with the wrong error status code to `/joined_members` requests when the requester is not a current member of the room. Contributed by @andrewdoh. ([\#13374](https://github.com/matrix-org/synapse/issues/13374)) +- Fix bug in handling of typing events for appservices. Contributed by Nick @ Beeper (@fizzadar). ([\#13392](https://github.com/matrix-org/synapse/issues/13392)) +- Fix a bug introduced in Synapse 1.57.0 where rooms listed in `exclude_rooms_from_sync` in the configuration file would not be properly excluded from incremental syncs. ([\#13408](https://github.com/matrix-org/synapse/issues/13408)) +- Fix a bug in the experimental faster-room-joins support which could cause it to get stuck in an infinite loop. ([\#13353](https://github.com/matrix-org/synapse/issues/13353)) +- Faster room joins: fix a bug which caused rejected events to become un-rejected during state syncing. ([\#13413](https://github.com/matrix-org/synapse/issues/13413)) +- Faster room joins: fix error when running out of servers to sync partial state with, so that Synapse raises the intended error instead. ([\#13432](https://github.com/matrix-org/synapse/issues/13432)) + + +Updates to the Docker image +--------------------------- + +- Make Docker images build on armv7 by installing cryptography dependencies in the 'requirements' stage. Contributed by Jasper Spaans. ([\#13372](https://github.com/matrix-org/synapse/issues/13372)) + + +Improved Documentation +---------------------- + +- Update the 'registration tokens' page to acknowledge that the relevant MSC was merged into version 1.2 of the Matrix specification. Contributed by @moan0s. ([\#11897](https://github.com/matrix-org/synapse/issues/11897)) +- Document which HTTP resources support gzip compression. ([\#13221](https://github.com/matrix-org/synapse/issues/13221)) +- Add steps describing how to elevate an existing user to administrator by manipulating the database. ([\#13230](https://github.com/matrix-org/synapse/issues/13230)) +- Fix wrong headline for `url_preview_accept_language` in documentation. ([\#13437](https://github.com/matrix-org/synapse/issues/13437)) +- Remove redundant 'Contents' section from the Configuration Manual. Contributed by @dklimpel. ([\#13438](https://github.com/matrix-org/synapse/issues/13438)) +- Update documentation for config setting `macaroon_secret_key`. ([\#13443](https://github.com/matrix-org/synapse/issues/13443)) +- Update outdated information on `sso_mapping_providers` documentation. ([\#13449](https://github.com/matrix-org/synapse/issues/13449)) +- Fix example code in module documentation of `password_auth_provider_callbacks`. ([\#13450](https://github.com/matrix-org/synapse/issues/13450)) +- Make the configuration for the cache clearer. ([\#13481](https://github.com/matrix-org/synapse/issues/13481)) + + +Internal Changes +---------------- + +- Extend the release script to automatically push a new SyTest branch, rather than having that be a manual process. ([\#12978](https://github.com/matrix-org/synapse/issues/12978)) +- Make minor clarifications to the error messages given when we fail to join a room via any server. ([\#13160](https://github.com/matrix-org/synapse/issues/13160)) +- Enable Complement CI tests in the 'latest deps' test run. ([\#13213](https://github.com/matrix-org/synapse/issues/13213)) +- Fix long-standing bugged logic which was never hit in `get_pdu` asking every remote destination even after it finds an event. ([\#13346](https://github.com/matrix-org/synapse/issues/13346)) +- Faster room joins: avoid blocking when pulling events with partially missing prev events. ([\#13355](https://github.com/matrix-org/synapse/issues/13355)) +- Instrument `/messages` for understandable traces in Jaeger. ([\#13368](https://github.com/matrix-org/synapse/issues/13368)) +- Remove an unused argument to `get_relations_for_event`. ([\#13383](https://github.com/matrix-org/synapse/issues/13383)) +- Add a `merge-back` command to the release script, which automates merging the correct branches after a release. ([\#13393](https://github.com/matrix-org/synapse/issues/13393)) +- Adding missing type hints to tests. ([\#13397](https://github.com/matrix-org/synapse/issues/13397)) +- Faster Room Joins: don't leave a stuck room partial state flag if the join fails. ([\#13403](https://github.com/matrix-org/synapse/issues/13403)) +- Refactor `_resolve_state_at_missing_prevs` to compute an `EventContext` instead. ([\#13404](https://github.com/matrix-org/synapse/issues/13404), [\#13431](https://github.com/matrix-org/synapse/issues/13431)) +- Faster Room Joins: prevent Synapse from answering federated join requests for a room which it has not fully joined yet. ([\#13416](https://github.com/matrix-org/synapse/issues/13416)) +- Re-enable running Complement tests against Synapse with workers. ([\#13420](https://github.com/matrix-org/synapse/issues/13420)) +- Prevent unnecessary lookups to any external `get_event` cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13435](https://github.com/matrix-org/synapse/issues/13435)) +- Add some tracing to give more insight into local room joins. ([\#13439](https://github.com/matrix-org/synapse/issues/13439)) +- Rename class `RateLimitConfig` to `RatelimitSettings` and `FederationRateLimitConfig` to `FederationRatelimitSettings`. ([\#13442](https://github.com/matrix-org/synapse/issues/13442)) +- Add some comments about how event push actions are stored. ([\#13445](https://github.com/matrix-org/synapse/issues/13445), [\#13455](https://github.com/matrix-org/synapse/issues/13455)) +- Improve rebuild speed for the "synapse-workers" docker image. ([\#13447](https://github.com/matrix-org/synapse/issues/13447)) +- Fix `@tag_args` being off-by-one with the arguments when tagging a span (tracing). ([\#13452](https://github.com/matrix-org/synapse/issues/13452)) +- Update type of `EventContext.rejected`. ([\#13460](https://github.com/matrix-org/synapse/issues/13460)) +- Use literals in place of `HTTPStatus` constants in tests. ([\#13463](https://github.com/matrix-org/synapse/issues/13463), [\#13469](https://github.com/matrix-org/synapse/issues/13469)) +- Correct a misnamed argument in state res v2 internals. ([\#13467](https://github.com/matrix-org/synapse/issues/13467)) + + +Synapse 1.64.0 (2022-08-02) +=========================== + +No significant changes since 1.64.0rc2. + + +Deprecation Warning +------------------- + +Synapse 1.66.0 will remove the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server. + +If you require your homeserver to verify e-mail addresses or to support password resets via e-mail, please configure your homeserver with SMTP access so that it can send e-mails on its own behalf. +[Consult the configuration documentation for more information.](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#email) + + +Synapse 1.64.0rc2 (2022-07-29) +============================== + +This RC reintroduces support for `account_threepid_delegates.email`, which was removed in 1.64.0rc1. It remains deprecated and will be removed altogether in Synapse 1.66.0. ([\#13406](https://github.com/matrix-org/synapse/issues/13406)) + + +Synapse 1.64.0rc1 (2022-07-26) +============================== + +This RC removed the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server. + +We have also stopped building `.deb` packages for Ubuntu 21.10 as it is no longer an active version of Ubuntu. + + +Features +-------- + +- Improve error messages when media thumbnails cannot be served. ([\#13038](https://github.com/matrix-org/synapse/issues/13038)) +- Allow pagination from remote event after discovering it from [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event`. ([\#13205](https://github.com/matrix-org/synapse/issues/13205)) +- Add a `room_type` field in the responses for the list room and room details admin APIs. Contributed by @andrewdoh. ([\#13208](https://github.com/matrix-org/synapse/issues/13208)) +- Add support for room version 10. ([\#13220](https://github.com/matrix-org/synapse/issues/13220)) +- Add per-room rate limiting for room joins. For each room, Synapse now monitors the rate of join events in that room, and throttles additional joins if that rate grows too large. ([\#13253](https://github.com/matrix-org/synapse/issues/13253), [\#13254](https://github.com/matrix-org/synapse/issues/13254), [\#13255](https://github.com/matrix-org/synapse/issues/13255), [\#13276](https://github.com/matrix-org/synapse/issues/13276)) +- Support Implicit TLS (TLS without using a STARTTLS upgrade, typically on port 465) for sending emails, enabled by the new option `force_tls`. Contributed by Jan Schär. ([\#13317](https://github.com/matrix-org/synapse/issues/13317)) + + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.15.0 where adding a user through the Synapse Admin API with a phone number would fail if the `enable_email_notifs` and `email_notifs_for_new_users` options were enabled. Contributed by @thomasweston12. ([\#13263](https://github.com/matrix-org/synapse/issues/13263)) +- Fix a bug introduced in Synapse 1.40.0 where a user invited to a restricted room would be briefly unable to join. ([\#13270](https://github.com/matrix-org/synapse/issues/13270)) +- Fix a long-standing bug where, in rare instances, Synapse could store the incorrect state for a room after a state resolution. ([\#13278](https://github.com/matrix-org/synapse/issues/13278)) +- Fix a bug introduced in v1.18.0 where the `synapse_pushers` metric would overcount pushers when they are replaced. ([\#13296](https://github.com/matrix-org/synapse/issues/13296)) +- Disable autocorrection and autocapitalisation on the username text field shown during registration when using SSO. ([\#13350](https://github.com/matrix-org/synapse/issues/13350)) +- Update locked version of `frozendict` to 2.3.3, which has fixes for memory leaks affecting `/sync`. ([\#13284](https://github.com/matrix-org/synapse/issues/13284), [\#13352](https://github.com/matrix-org/synapse/issues/13352)) + + +Improved Documentation +---------------------- + +- Provide an example of using the Admin API. Contributed by @jejo86. ([\#13231](https://github.com/matrix-org/synapse/issues/13231)) +- Move the documentation for how URL previews work to the URL preview module. ([\#13233](https://github.com/matrix-org/synapse/issues/13233), [\#13261](https://github.com/matrix-org/synapse/issues/13261)) +- Add another `contrib` script to help set up worker processes. Contributed by @villepeh. ([\#13271](https://github.com/matrix-org/synapse/issues/13271)) +- Document that certain config options were added or changed in Synapse 1.62. Contributed by @behrmann. ([\#13314](https://github.com/matrix-org/synapse/issues/13314)) +- Document the new `rc_invites.per_issuer` throttling option added in Synapse 1.63. ([\#13333](https://github.com/matrix-org/synapse/issues/13333)) +- Mention that BuildKit is needed when building Docker images for tests. ([\#13338](https://github.com/matrix-org/synapse/issues/13338)) +- Improve Caddy reverse proxy documentation. ([\#13344](https://github.com/matrix-org/synapse/issues/13344)) + + +Deprecations and Removals +------------------------- + +- Drop tables that were formerly used for groups/communities. ([\#12967](https://github.com/matrix-org/synapse/issues/12967)) +- Drop support for delegating email verification to an external server. ([\#13192](https://github.com/matrix-org/synapse/issues/13192)) +- Drop support for calling `/_matrix/client/v3/account/3pid/bind` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu. ([\#13239](https://github.com/matrix-org/synapse/issues/13239)) +- Stop building `.deb` packages for Ubuntu 21.10 (Impish Indri), which has reached end of life. ([\#13326](https://github.com/matrix-org/synapse/issues/13326)) + + +Internal Changes +---------------- + +- Use lower transaction isolation level when purging rooms to avoid serialization errors. Contributed by Nick @ Beeper. ([\#12942](https://github.com/matrix-org/synapse/issues/12942)) +- Remove code which incorrectly attempted to reconcile state with remote servers when processing incoming events. ([\#12943](https://github.com/matrix-org/synapse/issues/12943)) +- Make the AS login method call `Auth.get_user_by_req` for checking the AS token. ([\#13094](https://github.com/matrix-org/synapse/issues/13094)) +- Always use a version of canonicaljson that supports the C implementation of frozendict. ([\#13172](https://github.com/matrix-org/synapse/issues/13172)) +- Add prometheus counters for ephemeral events and to device messages pushed to app services. Contributed by Brad @ Beeper. ([\#13175](https://github.com/matrix-org/synapse/issues/13175)) +- Refactor receipts servlet logic to avoid duplicated code. ([\#13198](https://github.com/matrix-org/synapse/issues/13198)) +- Preparation for database schema simplifications: populate `state_key` and `rejection_reason` for existing rows in the `events` table. ([\#13215](https://github.com/matrix-org/synapse/issues/13215)) +- Remove unused database table `event_reference_hashes`. ([\#13218](https://github.com/matrix-org/synapse/issues/13218)) +- Further reduce queries used sending events when creating new rooms. Contributed by Nick @ Beeper (@fizzadar). ([\#13224](https://github.com/matrix-org/synapse/issues/13224)) +- Call the v2 identity service `/3pid/unbind` endpoint, rather than v1. Contributed by @Vetchu. ([\#13240](https://github.com/matrix-org/synapse/issues/13240)) +- Use an asynchronous cache wrapper for the get event cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13242](https://github.com/matrix-org/synapse/issues/13242), [\#13308](https://github.com/matrix-org/synapse/issues/13308)) +- Optimise federation sender and appservice pusher event stream processing queries. Contributed by Nick @ Beeper (@fizzadar). ([\#13251](https://github.com/matrix-org/synapse/issues/13251)) +- Log the stack when waiting for an entire room to be un-partial stated. ([\#13257](https://github.com/matrix-org/synapse/issues/13257)) +- Fix spurious warning when fetching state after a missing prev event. ([\#13258](https://github.com/matrix-org/synapse/issues/13258)) +- Clean-up tests for notifications. ([\#13260](https://github.com/matrix-org/synapse/issues/13260)) +- Do not fail build if complement with workers fails. ([\#13266](https://github.com/matrix-org/synapse/issues/13266)) +- Don't pull out state in `compute_event_context` for unconflicted state. ([\#13267](https://github.com/matrix-org/synapse/issues/13267), [\#13274](https://github.com/matrix-org/synapse/issues/13274)) +- Reduce the rebuild time for the complement-synapse docker image. ([\#13279](https://github.com/matrix-org/synapse/issues/13279)) +- Don't pull out the full state when creating an event. ([\#13281](https://github.com/matrix-org/synapse/issues/13281), [\#13307](https://github.com/matrix-org/synapse/issues/13307)) +- Upgrade from Poetry 1.1.12 to 1.1.14, to fix bugs when locking packages. ([\#13285](https://github.com/matrix-org/synapse/issues/13285)) +- Make `DictionaryCache` expire full entries if they haven't been queried in a while, even if specific keys have been queried recently. ([\#13292](https://github.com/matrix-org/synapse/issues/13292)) +- Use `HTTPStatus` constants in place of literals in tests. ([\#13297](https://github.com/matrix-org/synapse/issues/13297)) +- Improve performance of query `_get_subset_users_in_room_with_profiles`. ([\#13299](https://github.com/matrix-org/synapse/issues/13299)) +- Up batch size of `bulk_get_push_rules` and `_get_joined_profiles_from_event_ids`. ([\#13300](https://github.com/matrix-org/synapse/issues/13300)) +- Remove unnecessary `json.dumps` from tests. ([\#13303](https://github.com/matrix-org/synapse/issues/13303)) +- Reduce memory usage of sending dummy events. ([\#13310](https://github.com/matrix-org/synapse/issues/13310)) +- Prevent formatting changes of [#3679](https://github.com/matrix-org/synapse/pull/3679) from appearing in `git blame`. ([\#13311](https://github.com/matrix-org/synapse/issues/13311)) +- Change `get_users_in_room` and `get_rooms_for_user` caches to enable pruning of old entries. ([\#13313](https://github.com/matrix-org/synapse/issues/13313)) +- Validate federation destinations and log an error if a destination is invalid. ([\#13318](https://github.com/matrix-org/synapse/issues/13318)) +- Fix `FederationClient.get_pdu()` returning events from the cache as `outliers` instead of original events we saw over federation. ([\#13320](https://github.com/matrix-org/synapse/issues/13320)) +- Reduce memory usage of state caches. ([\#13323](https://github.com/matrix-org/synapse/issues/13323)) +- Reduce the amount of state we store in the `state_cache`. ([\#13324](https://github.com/matrix-org/synapse/issues/13324)) +- Add missing type hints to open tracing module. ([\#13328](https://github.com/matrix-org/synapse/issues/13328), [\#13345](https://github.com/matrix-org/synapse/issues/13345), [\#13362](https://github.com/matrix-org/synapse/issues/13362)) +- Remove old base slaved store and de-duplicate cache ID generators. Contributed by Nick @ Beeper (@fizzadar). ([\#13329](https://github.com/matrix-org/synapse/issues/13329), [\#13349](https://github.com/matrix-org/synapse/issues/13349)) +- When reporting metrics is enabled, use ~8x less data to describe DB transaction metrics. ([\#13342](https://github.com/matrix-org/synapse/issues/13342)) +- Faster room joins: skip soft fail checks while Synapse only has partial room state, since the current membership of event senders may not be accurately known. ([\#13354](https://github.com/matrix-org/synapse/issues/13354)) + + +Synapse 1.63.1 (2022-07-20) +=========================== + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.63.0 where push actions were incorrectly calculated for appservice users. This caused performance issues on servers with large numbers of appservices. ([\#13332](https://github.com/matrix-org/synapse/issues/13332)) + + +Synapse 1.63.0 (2022-07-19) +=========================== + +Improved Documentation +---------------------- + +- Clarify that homeserver server names are included in the reported data when the `report_stats` config option is enabled. ([\#13321](https://github.com/matrix-org/synapse/issues/13321)) + + +Synapse 1.63.0rc1 (2022-07-12) +============================== + +Features +-------- + +- Add a rate limit for local users sending invites. ([\#13125](https://github.com/matrix-org/synapse/issues/13125)) +- Implement [MSC3827](https://github.com/matrix-org/matrix-spec-proposals/pull/3827): Filtering of `/publicRooms` by room type. ([\#13031](https://github.com/matrix-org/synapse/issues/13031)) +- Improve validation logic in the account data REST endpoints. ([\#13148](https://github.com/matrix-org/synapse/issues/13148)) + + +Bugfixes +-------- + +- Fix a long-standing bug where application services were not able to join remote federated rooms without a profile. ([\#13131](https://github.com/matrix-org/synapse/issues/13131)) +- Fix a long-standing bug where `_get_state_map_for_room` might raise errors when third party event rules callbacks are present. ([\#13174](https://github.com/matrix-org/synapse/issues/13174)) +- Fix a long-standing bug where the `synapse_port_db` script could fail to copy rows with negative row ids. ([\#13226](https://github.com/matrix-org/synapse/issues/13226)) +- Fix a bug introduced in 1.54.0 where appservices would not receive room-less EDUs, like presence, when both [MSC2409](https://github.com/matrix-org/matrix-spec-proposals/pull/2409) and [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202) are enabled. ([\#13236](https://github.com/matrix-org/synapse/issues/13236)) +- Fix a bug introduced in 1.62.0 where rows were not deleted from `event_push_actions` table on large servers. ([\#13194](https://github.com/matrix-org/synapse/issues/13194)) +- Fix a bug introduced in 1.62.0 where notification counts would get stuck after a highlighted message. ([\#13223](https://github.com/matrix-org/synapse/issues/13223)) +- Fix exception when using experimental [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to look for remote federated imported events before room creation. ([\#13197](https://github.com/matrix-org/synapse/issues/13197)) +- Fix [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202)-enabled appservices not receiving to-device messages, preventing messages from being decrypted. ([\#13235](https://github.com/matrix-org/synapse/issues/13235)) + + +Updates to the Docker image +--------------------------- + +- Bump the version of `lxml` in matrix.org Docker images Debian packages from 4.8.0 to 4.9.1. ([\#13207](https://github.com/matrix-org/synapse/issues/13207)) + + +Improved Documentation +---------------------- + +- Add an explanation of the `--report-stats` argument to the docs. ([\#13029](https://github.com/matrix-org/synapse/issues/13029)) +- Add a helpful example bash script to the contrib directory for creating multiple worker configuration files of the same type. Contributed by @villepeh. ([\#13032](https://github.com/matrix-org/synapse/issues/13032)) +- Add missing links to config options. ([\#13166](https://github.com/matrix-org/synapse/issues/13166)) +- Add documentation for homeserver usage statistics collection. ([\#13086](https://github.com/matrix-org/synapse/issues/13086)) +- Add documentation for the existing `databases` option in the homeserver configuration manual. ([\#13212](https://github.com/matrix-org/synapse/issues/13212)) +- Clean up references to sample configuration and redirect users to the configuration manual instead. ([\#13077](https://github.com/matrix-org/synapse/issues/13077), [\#13139](https://github.com/matrix-org/synapse/issues/13139)) +- Document how the Synapse team does reviews. ([\#13132](https://github.com/matrix-org/synapse/issues/13132)) +- Fix wrong section header for `allow_public_rooms_over_federation` in the homeserver config documentation. ([\#13116](https://github.com/matrix-org/synapse/issues/13116)) + + +Deprecations and Removals +------------------------- + +- Remove obsolete and for 8 years unused `RoomEventsStoreTestCase`. Contributed by @arkamar. ([\#13200](https://github.com/matrix-org/synapse/issues/13200)) + + +Internal Changes +---------------- + +- Add type annotations to `synapse.logging`, `tests.server` and `tests.utils`. ([\#13028](https://github.com/matrix-org/synapse/issues/13028), [\#13103](https://github.com/matrix-org/synapse/issues/13103), [\#13159](https://github.com/matrix-org/synapse/issues/13159), [\#13136](https://github.com/matrix-org/synapse/issues/13136)) +- Enforce type annotations for `tests.test_server`. ([\#13135](https://github.com/matrix-org/synapse/issues/13135)) +- Support temporary experimental return values for spam checker module callbacks. ([\#13044](https://github.com/matrix-org/synapse/issues/13044)) +- Add support to `complement.sh` for skipping the docker build. ([\#13143](https://github.com/matrix-org/synapse/issues/13143), [\#13158](https://github.com/matrix-org/synapse/issues/13158)) +- Add support to `complement.sh` for setting the log level using the `SYNAPSE_TEST_LOG_LEVEL` environment variable. ([\#13152](https://github.com/matrix-org/synapse/issues/13152)) +- Enable Complement testing in the 'Twisted Trunk' CI runs. ([\#13079](https://github.com/matrix-org/synapse/issues/13079), [\#13157](https://github.com/matrix-org/synapse/issues/13157)) +- Improve startup times in Complement test runs against workers, particularly in CPU-constrained environments. ([\#13127](https://github.com/matrix-org/synapse/issues/13127)) +- Update config used by Complement to allow device name lookup over federation. ([\#13167](https://github.com/matrix-org/synapse/issues/13167)) +- Faster room joins: handle race between persisting an event and un-partial stating a room. ([\#13100](https://github.com/matrix-org/synapse/issues/13100)) +- Faster room joins: fix race in recalculation of current room state. ([\#13151](https://github.com/matrix-org/synapse/issues/13151)) +- Faster room joins: skip waiting for full state when processing incoming events over federation. ([\#13144](https://github.com/matrix-org/synapse/issues/13144)) +- Raise a `DependencyError` on missing dependencies instead of a `ConfigError`. ([\#13113](https://github.com/matrix-org/synapse/issues/13113)) +- Avoid stripping line breaks from SQL sent to the database. ([\#13129](https://github.com/matrix-org/synapse/issues/13129)) +- Apply ratelimiting earlier in processing of `/send` requests. ([\#13134](https://github.com/matrix-org/synapse/issues/13134)) +- Improve exception handling when processing events received over federation. ([\#13145](https://github.com/matrix-org/synapse/issues/13145)) +- Check that `auto_vacuum` is disabled when porting a SQLite database to Postgres, as `VACUUM`s must not be performed between runs of the script. ([\#13195](https://github.com/matrix-org/synapse/issues/13195)) +- Reduce DB usage of `/sync` when a large number of unread messages have recently been sent in a room. ([\#13119](https://github.com/matrix-org/synapse/issues/13119), [\#13153](https://github.com/matrix-org/synapse/issues/13153)) +- Reduce memory consumption when processing incoming events in large rooms. ([\#13078](https://github.com/matrix-org/synapse/issues/13078), [\#13222](https://github.com/matrix-org/synapse/issues/13222)) +- Reduce number of queries used to get profile information. Contributed by Nick @ Beeper (@fizzadar). ([\#13209](https://github.com/matrix-org/synapse/issues/13209)) +- Reduce number of events queried during room creation. Contributed by Nick @ Beeper (@fizzadar). ([\#13210](https://github.com/matrix-org/synapse/issues/13210)) +- More aggressively rotate push actions. ([\#13211](https://github.com/matrix-org/synapse/issues/13211)) +- Add `max_line_length` setting for Python files to the `.editorconfig`. Contributed by @sumnerevans @ Beeper. ([\#13228](https://github.com/matrix-org/synapse/issues/13228)) + +Synapse 1.62.0 (2022-07-05) +=========================== + +No significant changes since 1.62.0rc3. + +Authors of spam-checker plugins should consult the [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.62/docs/upgrade.md#upgrading-to-v1620) to learn about the enriched signatures for spam checker callbacks, which are supported with this release of Synapse. + +## Security advisory + +The following issue is fixed in 1.62.0. + +* [GHSA-jhjh-776m-4765](https://github.com/matrix-org/synapse/security/advisories/GHSA-jhjh-776m-4765) / [CVE-2022-31152](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-31152) + + Synapse instances prior to 1.62.0 did not implement the Matrix [event authorization rules](https://spec.matrix.org/v1.3/rooms/v10/#authorization-rules) correctly. An attacker could craft events which would be accepted by Synapse but not a spec-conformant server, potentially causing divergence in the room state between servers. + + Homeservers with federation disabled via the [`federation_domain_whitelist`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#federation_domain_whitelist) config option are unaffected. + + Administrators of homeservers with federation enabled are advised to upgrade to v1.62.0 or higher. + + Fixed by [#13087](https://github.com/matrix-org/synapse/pull/13087) and [#13088](https://github.com/matrix-org/synapse/pull/13088). + +Synapse 1.62.0rc3 (2022-07-04) +============================== + +Bugfixes +-------- + +- Update the version of the [ldap3 plugin](https://github.com/matrix-org/matrix-synapse-ldap3/) included in the `matrixdotorg/synapse` DockerHub images and the Debian packages hosted on `packages.matrix.org` to 0.2.1. This fixes [a bug](https://github.com/matrix-org/matrix-synapse-ldap3/pull/163) with usernames containing uppercase characters. ([\#13156](https://github.com/matrix-org/synapse/issues/13156)) +- Fix a bug introduced in Synapse 1.62.0rc1 affecting unread counts for users on small servers. ([\#13168](https://github.com/matrix-org/synapse/issues/13168)) + + +Synapse 1.62.0rc2 (2022-07-01) +============================== + +Bugfixes +-------- + +- Fix unread counts for users on large servers. Introduced in v1.62.0rc1. ([\#13140](https://github.com/matrix-org/synapse/issues/13140)) +- Fix DB performance when deleting old push notifications. Introduced in v1.62.0rc1. ([\#13141](https://github.com/matrix-org/synapse/issues/13141)) + + +Synapse 1.62.0rc1 (2022-06-28) +============================== + +Features +-------- + +- Port the spam-checker API callbacks to a new, richer API. This is part of an ongoing change to let spam-checker modules inform users of the reason their event or operation is rejected. ([\#12857](https://github.com/matrix-org/synapse/issues/12857), [\#13047](https://github.com/matrix-org/synapse/issues/13047)) +- Allow server admins to customise the response of the `/.well-known/matrix/client` endpoint. ([\#13035](https://github.com/matrix-org/synapse/issues/13035)) +- Add metrics measuring the CPU and DB time spent in state resolution. ([\#13036](https://github.com/matrix-org/synapse/issues/13036)) +- Speed up fetching of device list changes in `/sync` and `/keys/changes`. ([\#13045](https://github.com/matrix-org/synapse/issues/13045), [\#13098](https://github.com/matrix-org/synapse/issues/13098)) +- Improve URL previews for sites which only provide Twitter Card metadata, e.g. LWN.net. ([\#13056](https://github.com/matrix-org/synapse/issues/13056)) + + +Bugfixes +-------- + +- Update [MSC3786](https://github.com/matrix-org/matrix-spec-proposals/pull/3786) implementation to check `state_key`. ([\#12939](https://github.com/matrix-org/synapse/issues/12939)) +- Fix a bug introduced in Synapse 1.58 where Synapse would not report full version information when installed from a git checkout. This is a best-effort affair and not guaranteed to be stable. ([\#12973](https://github.com/matrix-org/synapse/issues/12973)) +- Fix a bug introduced in Synapse 1.60 where Synapse would fail to start if the `sqlite3` module was not available. ([\#12979](https://github.com/matrix-org/synapse/issues/12979)) +- Fix a bug where non-standard information was required when requesting the `/hierarchy` API over federation. Introduced + in Synapse 1.41.0. ([\#12991](https://github.com/matrix-org/synapse/issues/12991)) +- Fix a long-standing bug which meant that rate limiting was not restrictive enough in some cases. ([\#13018](https://github.com/matrix-org/synapse/issues/13018)) +- Fix a bug introduced in Synapse 1.58 where profile requests for a malformed user ID would ccause an internal error. Synapse now returns 400 Bad Request in this situation. ([\#13041](https://github.com/matrix-org/synapse/issues/13041)) +- Fix some inconsistencies in the event authentication code. ([\#13087](https://github.com/matrix-org/synapse/issues/13087), [\#13088](https://github.com/matrix-org/synapse/issues/13088)) +- Fix a long-standing bug where room directory requests would cause an internal server error if given a malformed room alias. ([\#13106](https://github.com/matrix-org/synapse/issues/13106)) + + +Improved Documentation +---------------------- + +- Add documentation for how to configure Synapse with Workers using Docker Compose. Includes example worker config and docker-compose.yaml. Contributed by @Thumbscrew. ([\#12737](https://github.com/matrix-org/synapse/issues/12737)) +- Ensure the [Poetry cheat sheet](https://matrix-org.github.io/synapse/develop/development/dependencies.html) is available in the online documentation. ([\#13022](https://github.com/matrix-org/synapse/issues/13022)) +- Mention removed community/group worker endpoints in upgrade.md. Contributed by @olmari. ([\#13023](https://github.com/matrix-org/synapse/issues/13023)) +- Add instructions for running Complement with `gotestfmt`-formatted output locally. ([\#13073](https://github.com/matrix-org/synapse/issues/13073)) +- Update OpenTracing docs to reference the configuration manual rather than the configuration file. ([\#13076](https://github.com/matrix-org/synapse/issues/13076)) +- Update information on downstream Debian packages. ([\#13095](https://github.com/matrix-org/synapse/issues/13095)) +- Remove documentation for the Delete Group Admin API which no longer exists. ([\#13112](https://github.com/matrix-org/synapse/issues/13112)) + + +Deprecations and Removals +------------------------- + +- Remove the unspecced `DELETE /directory/list/room/{roomId}` endpoint, which hid rooms from the [public room directory](https://spec.matrix.org/v1.3/client-server-api/#listing-rooms). Instead, `PUT` to the same URL with a visibility of `"private"`. ([\#13123](https://github.com/matrix-org/synapse/issues/13123)) + + +Internal Changes +---------------- + +- Add tests for cancellation of `GET /rooms/$room_id/members` and `GET /rooms/$room_id/state` requests. ([\#12674](https://github.com/matrix-org/synapse/issues/12674)) +- Report login failures due to unknown third party identifiers in the same way as failures due to invalid passwords. This prevents an attacker from using the error response to determine if the identifier exists. Contributed by Daniel Aloni. ([\#12738](https://github.com/matrix-org/synapse/issues/12738)) +- Merge the Complement testing Docker images into a single, multi-purpose image. ([\#12881](https://github.com/matrix-org/synapse/issues/12881), [\#13075](https://github.com/matrix-org/synapse/issues/13075)) +- Simplify the database schema for `event_edges`. ([\#12893](https://github.com/matrix-org/synapse/issues/12893)) +- Clean up the test code for client disconnection. ([\#12929](https://github.com/matrix-org/synapse/issues/12929)) +- Remove code generating comments in configuration. ([\#12941](https://github.com/matrix-org/synapse/issues/12941)) +- Add `Cross-Origin-Resource-Policy: cross-origin` header to content repository's thumbnail and download endpoints. ([\#12944](https://github.com/matrix-org/synapse/issues/12944)) +- Replace noop background updates with `DELETE` delta. ([\#12954](https://github.com/matrix-org/synapse/issues/12954), [\#13050](https://github.com/matrix-org/synapse/issues/13050)) +- Use lower isolation level when inserting read receipts to avoid serialization errors. Contributed by Nick @ Beeper. ([\#12957](https://github.com/matrix-org/synapse/issues/12957)) +- Reduce the amount of state we pull from the DB. ([\#12963](https://github.com/matrix-org/synapse/issues/12963)) +- Enable testing against PostgreSQL databases in Complement CI. ([\#12965](https://github.com/matrix-org/synapse/issues/12965), [\#13034](https://github.com/matrix-org/synapse/issues/13034)) +- Fix an inaccurate comment. ([\#12969](https://github.com/matrix-org/synapse/issues/12969)) +- Remove the `delete_device` method and always call `delete_devices`. ([\#12970](https://github.com/matrix-org/synapse/issues/12970)) +- Use a GitHub form for issues rather than a hard-to-read, easy-to-ignore template. ([\#12982](https://github.com/matrix-org/synapse/issues/12982)) +- Move [MSC3715](https://github.com/matrix-org/matrix-spec-proposals/pull/3715) behind an experimental config flag. ([\#12984](https://github.com/matrix-org/synapse/issues/12984)) +- Add type hints to tests. ([\#12985](https://github.com/matrix-org/synapse/issues/12985), [\#13099](https://github.com/matrix-org/synapse/issues/13099)) +- Refactor macaroon tokens generation and move the unsubscribe link in notification emails to `/_synapse/client/unsubscribe`. ([\#12986](https://github.com/matrix-org/synapse/issues/12986)) +- Fix documentation for running complement tests. ([\#12990](https://github.com/matrix-org/synapse/issues/12990)) +- Faster joins: add issue links to the TODO comments in the code. ([\#13004](https://github.com/matrix-org/synapse/issues/13004)) +- Reduce DB usage of `/sync` when a large number of unread messages have recently been sent in a room. ([\#13005](https://github.com/matrix-org/synapse/issues/13005), [\#13096](https://github.com/matrix-org/synapse/issues/13096), [\#13118](https://github.com/matrix-org/synapse/issues/13118)) +- Replaced usage of PyJWT with methods from Authlib in `org.matrix.login.jwt`. Contributed by Hannes Lerchl. ([\#13011](https://github.com/matrix-org/synapse/issues/13011)) +- Modernize the `contrib/graph/` scripts. ([\#13013](https://github.com/matrix-org/synapse/issues/13013)) +- Remove redundant `room_version` parameters from event auth functions. ([\#13017](https://github.com/matrix-org/synapse/issues/13017)) +- Decouple `synapse.api.auth_blocking.AuthBlocking` from `synapse.api.auth.Auth`. ([\#13021](https://github.com/matrix-org/synapse/issues/13021)) +- Add type annotations to `synapse.storage.databases.main.devices`. ([\#13025](https://github.com/matrix-org/synapse/issues/13025)) +- Set default `sync_response_cache_duration` to two minutes. ([\#13042](https://github.com/matrix-org/synapse/issues/13042)) +- Rename CI test runs. ([\#13046](https://github.com/matrix-org/synapse/issues/13046)) +- Increase timeout of complement CI test runs. ([\#13048](https://github.com/matrix-org/synapse/issues/13048)) +- Refactor entry points so that they all have a `main` function. ([\#13052](https://github.com/matrix-org/synapse/issues/13052)) +- Refactor the Dockerfile-workers configuration script to use Jinja2 templates in Synapse workers' Supervisord blocks. ([\#13054](https://github.com/matrix-org/synapse/issues/13054)) +- Add headers to individual options in config documentation to allow for linking. ([\#13055](https://github.com/matrix-org/synapse/issues/13055)) +- Make Complement CI logs easier to read. ([\#13057](https://github.com/matrix-org/synapse/issues/13057), [\#13058](https://github.com/matrix-org/synapse/issues/13058), [\#13069](https://github.com/matrix-org/synapse/issues/13069)) +- Don't instantiate modules with keyword arguments. ([\#13060](https://github.com/matrix-org/synapse/issues/13060)) +- Fix type checking errors against Twisted trunk. ([\#13061](https://github.com/matrix-org/synapse/issues/13061)) +- Allow MSC3030 `timestamp_to_event` calls from anyone on world-readable rooms. ([\#13062](https://github.com/matrix-org/synapse/issues/13062)) +- Add a CI job to check that schema deltas are in the correct folder. ([\#13063](https://github.com/matrix-org/synapse/issues/13063)) +- Avoid rechecking event auth rules which are independent of room state. ([\#13065](https://github.com/matrix-org/synapse/issues/13065)) +- Reduce the duplication of code that invokes the rate limiter. ([\#13070](https://github.com/matrix-org/synapse/issues/13070)) +- Add a Subject Alternative Name to the certificate generated for Complement tests. ([\#13071](https://github.com/matrix-org/synapse/issues/13071)) +- Add more tests for room upgrades. ([\#13074](https://github.com/matrix-org/synapse/issues/13074)) +- Pin dependencies maintained by matrix.org to [semantic version](https://semver.org/) bounds. ([\#13082](https://github.com/matrix-org/synapse/issues/13082)) +- Correctly report prometheus DB stats for `get_earliest_token_for_stats`. ([\#13085](https://github.com/matrix-org/synapse/issues/13085)) +- Fix a long-standing bug where a finished logging context would be re-started when Synapse failed to persist an event from federation. ([\#13089](https://github.com/matrix-org/synapse/issues/13089)) +- Simplify the alias deletion logic as an application service. ([\#13093](https://github.com/matrix-org/synapse/issues/13093)) +- Add type annotations to `tests.test_server`. ([\#13124](https://github.com/matrix-org/synapse/issues/13124)) + + +Synapse 1.61.1 (2022-06-28) +=========================== + +This patch release fixes a security issue regarding URL previews, affecting all prior versions of Synapse. Server administrators are encouraged to update Synapse as soon as possible. We are not aware of these vulnerabilities being exploited in the wild. + +Server administrators who are unable to update Synapse may use the workarounds described in the linked GitHub Security Advisory below. + +## Security advisory + +The following issue is fixed in 1.61.1. + +* [GHSA-22p3-qrh9-cx32](https://github.com/matrix-org/synapse/security/advisories/GHSA-22p3-qrh9-cx32) / [CVE-2022-31052](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-31052) + + Synapse instances with the [`url_preview_enabled`](https://matrix-org.github.io/synapse/v1.61/usage/configuration/config_documentation.html#media-store) homeserver config option set to `true` are affected. URL previews of some web pages can lead to unbounded recursion, causing the request to either fail, or in some cases crash the running Synapse process. + + Requesting URL previews requires authentication. Nevertheless, it is possible to exploit this maliciously, either by malicious users on the homeserver, or by remote users sending URLs that a local user's client may automatically request a URL preview for. + + Homeservers with the `url_preview_enabled` configuration option set to `false` (the default) are unaffected. Instances with the `enable_media_repo` configuration option set to `false` are also unaffected, as this also disables URL preview functionality. + + Fixed by [fa1308061802ac7b7d20e954ba7372c5ac292333](https://github.com/matrix-org/synapse/commit/fa1308061802ac7b7d20e954ba7372c5ac292333). + +Synapse 1.61.0 (2022-06-14) +=========================== + +This release removes support for the non-standard feature known both as 'groups' and as 'communities', which have been superseded by *Spaces*. + +See [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1610) +for more details. + +Improved Documentation +---------------------- + +- Mention removed community/group worker endpoints in [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1610). Contributed by @olmari. ([\#13023](https://github.com/matrix-org/synapse/issues/13023)) + + +Synapse 1.61.0rc1 (2022-06-07) +============================== + +Features +-------- + +- Add new `media_retention` options to the homeserver config for routinely cleaning up non-recently accessed media. ([\#12732](https://github.com/matrix-org/synapse/issues/12732), [\#12972](https://github.com/matrix-org/synapse/issues/12972), [\#12977](https://github.com/matrix-org/synapse/issues/12977)) +- Experimental support for [MSC3772](https://github.com/matrix-org/matrix-spec-proposals/pull/3772): Push rule for mutually related events. ([\#12740](https://github.com/matrix-org/synapse/issues/12740), [\#12859](https://github.com/matrix-org/synapse/issues/12859)) +- Update to the `check_event_for_spam` module callback: Deprecate the current callback signature, replace it with a new signature that is both less ambiguous (replacing booleans with explicit allow/block) and more powerful (ability to return explicit error codes). ([\#12808](https://github.com/matrix-org/synapse/issues/12808)) +- Add storage and module API methods to get monthly active users (and their corresponding appservices) within an optionally specified time range. ([\#12838](https://github.com/matrix-org/synapse/issues/12838), [\#12917](https://github.com/matrix-org/synapse/issues/12917)) +- Support the new error code `ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED` from [MSC3823](https://github.com/matrix-org/matrix-spec-proposals/pull/3823). ([\#12845](https://github.com/matrix-org/synapse/issues/12845), [\#12923](https://github.com/matrix-org/synapse/issues/12923)) +- Add a configurable background job to delete stale devices. ([\#12855](https://github.com/matrix-org/synapse/issues/12855)) +- Improve URL previews for pages with empty elements. ([\#12951](https://github.com/matrix-org/synapse/issues/12951)) +- Allow updating a user's password using the admin API without logging out their devices. Contributed by @jcgruenhage. ([\#12952](https://github.com/matrix-org/synapse/issues/12952)) + + +Bugfixes +-------- + +- Always send an `access_token` in `/thirdparty/` requests to appservices, as required by the [Application Service API specification](https://spec.matrix.org/v1.1/application-service-api/#third-party-networks). ([\#12746](https://github.com/matrix-org/synapse/issues/12746)) +- Implement [MSC3816](https://github.com/matrix-org/matrix-spec-proposals/pull/3816): sending the root event in a thread should count as having 'participated' in it. ([\#12766](https://github.com/matrix-org/synapse/issues/12766)) +- Delete events from the `federation_inbound_events_staging` table when a room is purged through the admin API. ([\#12784](https://github.com/matrix-org/synapse/issues/12784)) +- Fix a bug where we did not correctly handle invalid device list updates over federation. Contributed by Carl Bordum Hansen. ([\#12829](https://github.com/matrix-org/synapse/issues/12829)) +- Fix a bug which allowed multiple async operations to access database locks concurrently. Contributed by @sumnerevans @ Beeper. ([\#12832](https://github.com/matrix-org/synapse/issues/12832)) +- Fix an issue introduced in Synapse 0.34 where the `/notifications` endpoint would only return notifications if a user registered at least one pusher. Contributed by Famedly. ([\#12840](https://github.com/matrix-org/synapse/issues/12840)) +- Fix a bug where servers using a Postgres database would fail to backfill from an insertion event when MSC2716 is enabled (`experimental_features.msc2716_enabled`). ([\#12843](https://github.com/matrix-org/synapse/issues/12843)) +- Fix [MSC3787](https://github.com/matrix-org/matrix-spec-proposals/pull/3787) rooms being omitted from room directory, room summary and space hierarchy responses. ([\#12858](https://github.com/matrix-org/synapse/issues/12858)) +- Fix a bug introduced in Synapse 1.54.0 which could sometimes cause exceptions when handling federated traffic. ([\#12877](https://github.com/matrix-org/synapse/issues/12877)) +- Fix a bug introduced in Synapse 1.59.0 which caused room deletion to fail with a foreign key violation error. ([\#12889](https://github.com/matrix-org/synapse/issues/12889)) +- Fix a long-standing bug which caused the `/messages` endpoint to return an incorrect `end` attribute when there were no more events. Contributed by @Vetchu. ([\#12903](https://github.com/matrix-org/synapse/issues/12903)) +- Fix a bug introduced in Synapse 1.58.0 where `/sync` would fail if the most recent event in a room was a redaction of an event that has since been purged. ([\#12905](https://github.com/matrix-org/synapse/issues/12905)) +- Fix a potential memory leak when generating thumbnails. ([\#12932](https://github.com/matrix-org/synapse/issues/12932)) +- Fix a long-standing bug where a URL preview would break if the image failed to download. ([\#12950](https://github.com/matrix-org/synapse/issues/12950)) + + +Improved Documentation +---------------------- + +- Fix typographical errors in documentation. ([\#12863](https://github.com/matrix-org/synapse/issues/12863)) +- Fix documentation incorrectly stating the `sendToDevice` endpoint can be directed at generic workers. Contributed by Nick @ Beeper. ([\#12867](https://github.com/matrix-org/synapse/issues/12867)) + + +Deprecations and Removals +------------------------- + +- Remove support for the non-standard groups/communities feature from Synapse. ([\#12553](https://github.com/matrix-org/synapse/issues/12553), [\#12558](https://github.com/matrix-org/synapse/issues/12558), [\#12563](https://github.com/matrix-org/synapse/issues/12563), [\#12895](https://github.com/matrix-org/synapse/issues/12895), [\#12897](https://github.com/matrix-org/synapse/issues/12897), [\#12899](https://github.com/matrix-org/synapse/issues/12899), [\#12900](https://github.com/matrix-org/synapse/issues/12900), [\#12936](https://github.com/matrix-org/synapse/issues/12936), [\#12966](https://github.com/matrix-org/synapse/issues/12966)) +- Remove contributed `kick_users.py` script. This is broken under Python 3, and is not added to the environment when `pip install`ing Synapse. ([\#12908](https://github.com/matrix-org/synapse/issues/12908)) +- Remove `contrib/jitsimeetbridge`. This was an unused experiment that hasn't been meaningfully changed since 2014. ([\#12909](https://github.com/matrix-org/synapse/issues/12909)) +- Remove unused `contrib/experiements/cursesio.py` script, which fails to run under Python 3. ([\#12910](https://github.com/matrix-org/synapse/issues/12910)) +- Remove unused `contrib/experiements/test_messaging.py` script. This fails to run on Python 3. ([\#12911](https://github.com/matrix-org/synapse/issues/12911)) + + +Internal Changes +---------------- + +- Test Synapse against Complement with workers. ([\#12810](https://github.com/matrix-org/synapse/issues/12810), [\#12933](https://github.com/matrix-org/synapse/issues/12933)) +- Reduce the amount of state we pull from the DB. ([\#12811](https://github.com/matrix-org/synapse/issues/12811), [\#12964](https://github.com/matrix-org/synapse/issues/12964)) +- Try other homeservers when re-syncing state for rooms with partial state. ([\#12812](https://github.com/matrix-org/synapse/issues/12812)) +- Resume state re-syncing for rooms with partial state after a Synapse restart. ([\#12813](https://github.com/matrix-org/synapse/issues/12813)) +- Remove Mutual Rooms' ([MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666)) endpoint dependency on the User Directory. ([\#12836](https://github.com/matrix-org/synapse/issues/12836)) +- Experimental: expand `check_event_for_spam` with ability to return additional fields. This enables spam-checker implementations to experiment with mechanisms to give users more information about why they are blocked and whether any action is needed from them to be unblocked. ([\#12846](https://github.com/matrix-org/synapse/issues/12846)) +- Remove `dont_notify` from the `.m.rule.room.server_acl` rule. ([\#12849](https://github.com/matrix-org/synapse/issues/12849)) +- Remove the unstable `/hierarchy` endpoint from [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946). ([\#12851](https://github.com/matrix-org/synapse/issues/12851)) +- Pull out less state when handling gaps in room DAG. ([\#12852](https://github.com/matrix-org/synapse/issues/12852), [\#12904](https://github.com/matrix-org/synapse/issues/12904)) +- Clean-up the push rules datastore. ([\#12856](https://github.com/matrix-org/synapse/issues/12856)) +- Correct a type annotation in the URL preview source code. ([\#12860](https://github.com/matrix-org/synapse/issues/12860)) +- Update `pyjwt` dependency to [2.4.0](https://github.com/jpadilla/pyjwt/releases/tag/2.4.0). ([\#12865](https://github.com/matrix-org/synapse/issues/12865)) +- Enable the `/account/whoami` endpoint on synapse worker processes. Contributed by Nick @ Beeper. ([\#12866](https://github.com/matrix-org/synapse/issues/12866)) +- Enable the `batch_send` endpoint on synapse worker processes. Contributed by Nick @ Beeper. ([\#12868](https://github.com/matrix-org/synapse/issues/12868)) +- Don't generate empty AS transactions when the AS is flagged as down. Contributed by Nick @ Beeper. ([\#12869](https://github.com/matrix-org/synapse/issues/12869)) +- Fix up the variable `state_store` naming. ([\#12871](https://github.com/matrix-org/synapse/issues/12871)) +- Faster room joins: when querying the current state of the room, wait for state to be populated. ([\#12872](https://github.com/matrix-org/synapse/issues/12872)) +- Avoid running queries which will never result in deletions. ([\#12879](https://github.com/matrix-org/synapse/issues/12879)) +- Use constants for EDU types. ([\#12884](https://github.com/matrix-org/synapse/issues/12884)) +- Reduce database load of `/sync` when presence is enabled. ([\#12885](https://github.com/matrix-org/synapse/issues/12885)) +- Refactor `have_seen_events` to reduce memory consumed when processing federation traffic. ([\#12886](https://github.com/matrix-org/synapse/issues/12886)) +- Refactor receipt linearization code. ([\#12888](https://github.com/matrix-org/synapse/issues/12888)) +- Add type annotations to `synapse.logging.opentracing`. ([\#12894](https://github.com/matrix-org/synapse/issues/12894)) +- Remove PyNaCl occurrences directly used in Synapse code. ([\#12902](https://github.com/matrix-org/synapse/issues/12902)) +- Bump types-jsonschema from 4.4.1 to 4.4.6. ([\#12912](https://github.com/matrix-org/synapse/issues/12912)) +- Rename storage classes. ([\#12913](https://github.com/matrix-org/synapse/issues/12913)) +- Preparation for database schema simplifications: stop reading from `event_edges.room_id`. ([\#12914](https://github.com/matrix-org/synapse/issues/12914)) +- Check if we are in a virtual environment before overriding the `PYTHONPATH` environment variable in the demo script. ([\#12916](https://github.com/matrix-org/synapse/issues/12916)) +- Improve the logging when signature checks on events fail. ([\#12925](https://github.com/matrix-org/synapse/issues/12925)) + + +Synapse 1.60.0 (2022-05-31) +=========================== + +This release of Synapse adds a unique index to the `state_group_edges` table, in +order to prevent accidentally introducing duplicate information (for example, +because a database backup was restored multiple times). If your Synapse database +already has duplicate rows in this table, this could fail with an error and +require manual remediation. + +Additionally, the signature of the `check_event_for_spam` module callback has changed. +The previous signature has been deprecated and remains working for now. Module authors +should update their modules to use the new signature where possible. + +See [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1600) +for more details. + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.60.0rc1 that would break some imports from `synapse.module_api`. ([\#12918](https://github.com/matrix-org/synapse/issues/12918)) + + +Synapse 1.60.0rc2 (2022-05-27) +============================== + +Features +-------- + +- Add an option allowing users to use their password to reauthenticate for privileged actions even though password login is disabled. ([\#12883](https://github.com/matrix-org/synapse/issues/12883)) + + +Bugfixes +-------- + +- Explicitly close `ijson` coroutines once we are done with them, instead of leaving the garbage collector to close them. ([\#12875](https://github.com/matrix-org/synapse/issues/12875)) + + +Internal Changes +---------------- + +- Improve URL previews by not including the content of media tags in the generated description. ([\#12887](https://github.com/matrix-org/synapse/issues/12887)) + + +Synapse 1.60.0rc1 (2022-05-24) +============================== + +Features +-------- + +- Measure the time taken in spam-checking callbacks and expose those measurements as metrics. ([\#12513](https://github.com/matrix-org/synapse/issues/12513)) +- Add a `default_power_level_content_override` config option to set default room power levels per room preset. ([\#12618](https://github.com/matrix-org/synapse/issues/12618)) +- Add support for [MSC3787: Allowing knocks to restricted rooms](https://github.com/matrix-org/matrix-spec-proposals/pull/3787). ([\#12623](https://github.com/matrix-org/synapse/issues/12623)) +- Send `USER_IP` commands on a different Redis channel, in order to reduce traffic to workers that do not process these commands. ([\#12672](https://github.com/matrix-org/synapse/issues/12672), [\#12809](https://github.com/matrix-org/synapse/issues/12809)) +- Synapse will now reload [cache config](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#caching) when it receives a [SIGHUP](https://en.wikipedia.org/wiki/SIGHUP) signal. ([\#12673](https://github.com/matrix-org/synapse/issues/12673)) +- Add a config options to allow for auto-tuning of caches. ([\#12701](https://github.com/matrix-org/synapse/issues/12701)) +- Update [MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716) implementation to process marker events from the current state to avoid markers being lost in timeline gaps for federated servers which would cause the imported history to be undiscovered. ([\#12718](https://github.com/matrix-org/synapse/issues/12718)) +- Add a `drop_federated_event` callback to `SpamChecker` to disregard inbound federated events before they take up much processing power, in an emergency. ([\#12744](https://github.com/matrix-org/synapse/issues/12744)) +- Implement [MSC3818: Copy room type on upgrade](https://github.com/matrix-org/matrix-spec-proposals/pull/3818). ([\#12786](https://github.com/matrix-org/synapse/issues/12786), [\#12792](https://github.com/matrix-org/synapse/issues/12792)) +- Update to the `check_event_for_spam` module callback. Deprecate the current callback signature, replace it with a new signature that is both less ambiguous (replacing booleans with explicit allow/block) and more powerful (ability to return explicit error codes). ([\#12808](https://github.com/matrix-org/synapse/issues/12808)) + + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.7.0 that would prevent events from being sent to clients if there's a retention policy in the room when the support for retention policies is disabled. ([\#12611](https://github.com/matrix-org/synapse/issues/12611)) +- Fix a bug introduced in Synapse 1.57.0 where `/messages` would throw a 500 error when querying for a non-existent room. ([\#12683](https://github.com/matrix-org/synapse/issues/12683)) +- Add a unique index to `state_group_edges` to prevent duplicates being accidentally introduced and the consequential impact to performance. ([\#12687](https://github.com/matrix-org/synapse/issues/12687)) +- Fix a long-standing bug where an empty room would be created when a user with an insufficient power level tried to upgrade a room. ([\#12696](https://github.com/matrix-org/synapse/issues/12696)) +- Fix a bug introduced in Synapse 1.30.0 where empty rooms could be automatically created if a monthly active users limit is set. ([\#12713](https://github.com/matrix-org/synapse/issues/12713)) +- Fix push to dismiss notifications when read on another client. Contributed by @SpiritCroc @ Beeper. ([\#12721](https://github.com/matrix-org/synapse/issues/12721)) +- Fix poor database performance when reading the cache invalidation stream for large servers with lots of workers. ([\#12747](https://github.com/matrix-org/synapse/issues/12747)) +- Fix a long-standing bug where the user directory background process would fail to make forward progress if a user included a null codepoint in their display name or avatar. ([\#12762](https://github.com/matrix-org/synapse/issues/12762)) +- Delete events from the `federation_inbound_events_staging` table when a room is purged through the admin API. ([\#12770](https://github.com/matrix-org/synapse/issues/12770)) +- Give a meaningful error message when a client tries to create a room with an invalid alias localpart. ([\#12779](https://github.com/matrix-org/synapse/issues/12779)) +- Fix a bug introduced in 1.43.0 where a file (`providers.json`) was never closed. Contributed by @arkamar. ([\#12794](https://github.com/matrix-org/synapse/issues/12794)) +- Fix a long-standing bug where finished log contexts would be re-started when failing to contact remote homeservers. ([\#12803](https://github.com/matrix-org/synapse/issues/12803)) +- Fix a bug, introduced in Synapse 1.21.0, that led to media thumbnails being unusable before the index has been added in the background. ([\#12823](https://github.com/matrix-org/synapse/issues/12823)) + + +Updates to the Docker image +--------------------------- + +- Fix the docker file after a dependency update. ([\#12853](https://github.com/matrix-org/synapse/issues/12853)) + + +Improved Documentation +---------------------- + +- Fix a typo in the Media Admin API documentation. ([\#12715](https://github.com/matrix-org/synapse/issues/12715)) +- Update the OpenID Connect example for Keycloak to be compatible with newer versions of Keycloak. Contributed by @nhh. ([\#12727](https://github.com/matrix-org/synapse/issues/12727)) +- Fix typo in server listener documentation. ([\#12742](https://github.com/matrix-org/synapse/issues/12742)) +- Link to the configuration manual from the welcome page of the documentation. ([\#12748](https://github.com/matrix-org/synapse/issues/12748)) +- Fix typo in `run_background_tasks_on` option name in configuration manual documentation. ([\#12749](https://github.com/matrix-org/synapse/issues/12749)) +- Add information regarding the `rc_invites` ratelimiting option to the configuration docs. ([\#12759](https://github.com/matrix-org/synapse/issues/12759)) +- Add documentation for cancellation of request processing. ([\#12761](https://github.com/matrix-org/synapse/issues/12761)) +- Recommend using docker to run tests against postgres. ([\#12765](https://github.com/matrix-org/synapse/issues/12765)) +- Add missing user directory endpoint from the generic worker documentation. Contributed by @olmari. ([\#12773](https://github.com/matrix-org/synapse/issues/12773)) +- Add additional info to documentation of config option `cache_autotuning`. ([\#12776](https://github.com/matrix-org/synapse/issues/12776)) +- Update configuration manual documentation to document size-related suffixes. ([\#12777](https://github.com/matrix-org/synapse/issues/12777)) +- Fix invalid YAML syntax in the example documentation for the `url_preview_accept_language` config option. ([\#12785](https://github.com/matrix-org/synapse/issues/12785)) + + +Deprecations and Removals +------------------------- + +- Require a body in POST requests to `/rooms/{roomId}/receipt/{receiptType}/{eventId}`, as required by the [Matrix specification](https://spec.matrix.org/v1.2/client-server-api/#post_matrixclientv3roomsroomidreceiptreceipttypeeventid). This breaks compatibility with Element Android 1.2.0 and earlier: users of those clients will be unable to send read receipts. ([\#12709](https://github.com/matrix-org/synapse/issues/12709)) + + +Internal Changes +---------------- + +- Improve event caching mechanism to avoid having multiple copies of an event in memory at a time. ([\#10533](https://github.com/matrix-org/synapse/issues/10533)) +- Preparation for faster-room-join work: return subsets of room state which we already have, immediately. ([\#12498](https://github.com/matrix-org/synapse/issues/12498)) +- Add `@cancellable` decorator, for use on endpoint methods that can be cancelled when clients disconnect. ([\#12586](https://github.com/matrix-org/synapse/issues/12586), [\#12588](https://github.com/matrix-org/synapse/issues/12588), [\#12630](https://github.com/matrix-org/synapse/issues/12630), [\#12694](https://github.com/matrix-org/synapse/issues/12694), [\#12698](https://github.com/matrix-org/synapse/issues/12698), [\#12699](https://github.com/matrix-org/synapse/issues/12699), [\#12700](https://github.com/matrix-org/synapse/issues/12700), [\#12705](https://github.com/matrix-org/synapse/issues/12705)) +- Enable cancellation of `GET /rooms/$room_id/members`, `GET /rooms/$room_id/state` and `GET /rooms/$room_id/state/$event_type/*` requests. ([\#12708](https://github.com/matrix-org/synapse/issues/12708)) +- Improve documentation of the `synapse.push` module. ([\#12676](https://github.com/matrix-org/synapse/issues/12676)) +- Refactor functions to on `PushRuleEvaluatorForEvent`. ([\#12677](https://github.com/matrix-org/synapse/issues/12677)) +- Preparation for database schema simplifications: stop writing to `event_reference_hashes`. ([\#12679](https://github.com/matrix-org/synapse/issues/12679)) +- Remove code which updates unused database column `application_services_state.last_txn`. ([\#12680](https://github.com/matrix-org/synapse/issues/12680)) +- Refactor `EventContext` class. ([\#12689](https://github.com/matrix-org/synapse/issues/12689)) +- Remove an unneeded class in the push code. ([\#12691](https://github.com/matrix-org/synapse/issues/12691)) +- Consolidate parsing of relation information from events. ([\#12693](https://github.com/matrix-org/synapse/issues/12693)) +- Convert namespace class `Codes` into a string enum. ([\#12703](https://github.com/matrix-org/synapse/issues/12703)) +- Optimize private read receipt filtering. ([\#12711](https://github.com/matrix-org/synapse/issues/12711)) +- Drop the logging level of status messages for the URL preview cache expiry job from INFO to DEBUG. ([\#12720](https://github.com/matrix-org/synapse/issues/12720)) +- Downgrade some OIDC errors to warnings in the logs, to reduce the noise of Sentry reports. ([\#12723](https://github.com/matrix-org/synapse/issues/12723)) +- Update configs used by Complement to allow more invites/3PID validations during tests. ([\#12731](https://github.com/matrix-org/synapse/issues/12731)) +- Tweak the mypy plugin so that `@cached` can accept `on_invalidate=None`. ([\#12769](https://github.com/matrix-org/synapse/issues/12769)) +- Move methods that call `add_push_rule` to the `PushRuleStore` class. ([\#12772](https://github.com/matrix-org/synapse/issues/12772)) +- Make handling of federation Authorization header (more) compliant with RFC7230. ([\#12774](https://github.com/matrix-org/synapse/issues/12774)) +- Refactor `resolve_state_groups_for_events` to not pull out full state when no state resolution happens. ([\#12775](https://github.com/matrix-org/synapse/issues/12775)) +- Do not keep going if there are 5 back-to-back background update failures. ([\#12781](https://github.com/matrix-org/synapse/issues/12781)) +- Fix federation when using the demo scripts. ([\#12783](https://github.com/matrix-org/synapse/issues/12783)) +- The `hash_password` script now fails when it is called without specifying a config file. Contributed by @jae1911. ([\#12789](https://github.com/matrix-org/synapse/issues/12789)) +- Improve and fix type hints. ([\#12567](https://github.com/matrix-org/synapse/issues/12567), [\#12477](https://github.com/matrix-org/synapse/issues/12477), [\#12717](https://github.com/matrix-org/synapse/issues/12717), [\#12753](https://github.com/matrix-org/synapse/issues/12753), [\#12695](https://github.com/matrix-org/synapse/issues/12695), [\#12734](https://github.com/matrix-org/synapse/issues/12734), [\#12716](https://github.com/matrix-org/synapse/issues/12716), [\#12726](https://github.com/matrix-org/synapse/issues/12726), [\#12790](https://github.com/matrix-org/synapse/issues/12790), [\#12833](https://github.com/matrix-org/synapse/issues/12833)) +- Update EventContext `get_current_event_ids` and `get_prev_event_ids` to accept state filters and update calls where possible. ([\#12791](https://github.com/matrix-org/synapse/issues/12791)) +- Remove Caddy from the Synapse workers image used in Complement. ([\#12818](https://github.com/matrix-org/synapse/issues/12818)) +- Add Complement's shared registration secret to the Complement worker image. This fixes tests that depend on it. ([\#12819](https://github.com/matrix-org/synapse/issues/12819)) +- Support registering Application Services when running with workers under Complement. ([\#12826](https://github.com/matrix-org/synapse/issues/12826)) +- Disable 'faster room join' Complement tests when testing against Synapse with workers. ([\#12842](https://github.com/matrix-org/synapse/issues/12842)) + + +Synapse 1.59.1 (2022-05-18) +=========================== + +This release fixes a long-standing issue which could prevent Synapse's user directory for updating properly. + +Bugfixes +---------------- + +- Fix a long-standing bug where the user directory background process would fail to make forward progress if a user included a null codepoint in their display name or avatar. Contributed by Nick @ Beeper. ([\#12762](https://github.com/matrix-org/synapse/issues/12762)) + + +Synapse 1.59.0 (2022-05-17) +=========================== + +Synapse 1.59 makes several changes that server administrators should be aware of: + +- Device name lookup over federation is now disabled by default. ([\#12616](https://github.com/matrix-org/synapse/issues/12616)) +- The `synapse.app.appservice` and `synapse.app.user_dir` worker application types are now deprecated. ([\#12452](https://github.com/matrix-org/synapse/issues/12452), [\#12654](https://github.com/matrix-org/synapse/issues/12654)) + +See [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1590) for more details. + +Additionally, this release removes the non-standard `m.login.jwt` login type from Synapse. It can be replaced with `org.matrix.login.jwt` for identical behaviour. This is only used if `jwt_config.enabled` is set to `true` in the configuration. ([\#12597](https://github.com/matrix-org/synapse/issues/12597)) + + +Bugfixes +-------- + +- Fix DB performance regression introduced in Synapse 1.59.0rc2. ([\#12745](https://github.com/matrix-org/synapse/issues/12745)) + + +Synapse 1.59.0rc2 (2022-05-16) +============================== + +Note: this release candidate includes a performance regression which can cause database disruption. Other release candidates in the v1.59.0 series are not affected, and a fix will be included in the v1.59.0 final release. + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.58.0 where `/sync` would fail if the most recent event in a room was rejected. ([\#12729](https://github.com/matrix-org/synapse/issues/12729)) + + +Synapse 1.59.0rc1 (2022-05-10) +============================== + +Features +-------- + +- Support [MSC3266](https://github.com/matrix-org/matrix-doc/pull/3266) room summaries over federation. ([\#11507](https://github.com/matrix-org/synapse/issues/11507)) +- Implement [changes](https://github.com/matrix-org/matrix-spec-proposals/pull/2285/commits/4a77139249c2e830aec3c7d6bd5501a514d1cc27) to [MSC2285 (hidden read receipts)](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). Contributed by @SimonBrandner. ([\#12168](https://github.com/matrix-org/synapse/issues/12168), [\#12635](https://github.com/matrix-org/synapse/issues/12635), [\#12636](https://github.com/matrix-org/synapse/issues/12636), [\#12670](https://github.com/matrix-org/synapse/issues/12670)) +- Extend the [module API](https://github.com/matrix-org/synapse/blob/release-v1.59/synapse/module_api/__init__.py) to allow modules to change actions for existing push rules of local users. ([\#12406](https://github.com/matrix-org/synapse/issues/12406)) +- Add the `notify_appservices_from_worker` configuration option (superseding `notify_appservices`) to allow a generic worker to be designated as the worker to send traffic to Application Services. ([\#12452](https://github.com/matrix-org/synapse/issues/12452)) +- Add the `update_user_directory_from_worker` configuration option (superseding `update_user_directory`) to allow a generic worker to be designated as the worker to update the user directory. ([\#12654](https://github.com/matrix-org/synapse/issues/12654)) +- Add new `enable_registration_token_3pid_bypass` configuration option to allow registrations via token as an alternative to verifying a 3pid. ([\#12526](https://github.com/matrix-org/synapse/issues/12526)) +- Implement [MSC3786](https://github.com/matrix-org/matrix-spec-proposals/pull/3786): Add a default push rule to ignore `m.room.server_acl` events. ([\#12601](https://github.com/matrix-org/synapse/issues/12601)) +- Add new `mau_appservice_trial_days` configuration option to specify a different trial period for users registered via an appservice. ([\#12619](https://github.com/matrix-org/synapse/issues/12619)) + + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.48.0 where the latest thread reply provided failed to include the proper bundled aggregations. ([\#12273](https://github.com/matrix-org/synapse/issues/12273)) +- Fix a bug introduced in Synapse 1.22.0 where attempting to send a large amount of read receipts to an application service all at once would result in duplicate content and abnormally high memory usage. Contributed by Brad & Nick @ Beeper. ([\#12544](https://github.com/matrix-org/synapse/issues/12544)) +- Fix a bug introduced in Synapse 1.57.0 which could cause `Failed to calculate hosts in room` errors to be logged for outbound federation. ([\#12570](https://github.com/matrix-org/synapse/issues/12570)) +- Fix a long-standing bug where status codes would almost always get logged as `200!`, irrespective of the actual status code, when clients disconnect before a request has finished processing. ([\#12580](https://github.com/matrix-org/synapse/issues/12580)) +- Fix race when persisting an event and deleting a room that could lead to outbound federation breaking. ([\#12594](https://github.com/matrix-org/synapse/issues/12594)) +- Fix a bug introduced in Synapse 1.53.0 where bundled aggregations for annotations/edits were incorrectly calculated. ([\#12633](https://github.com/matrix-org/synapse/issues/12633)) +- Fix a long-standing bug where rooms containing power levels with string values could not be upgraded. ([\#12657](https://github.com/matrix-org/synapse/issues/12657)) +- Prevent memory leak from reoccurring when presence is disabled. ([\#12656](https://github.com/matrix-org/synapse/issues/12656)) + + +Updates to the Docker image +--------------------------- + +- Explicitly opt-in to using [BuildKit-specific features](https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md) in the Dockerfile. This fixes issues with building images in some GitLab CI environments. ([\#12541](https://github.com/matrix-org/synapse/issues/12541)) +- Update the "Build docker images" GitHub Actions workflow to use `docker/metadata-action` to generate docker image tags, instead of a custom shell script. Contributed by @henryclw. ([\#12573](https://github.com/matrix-org/synapse/issues/12573)) + + +Improved Documentation +---------------------- + +- Update SQL statements and replace use of old table `user_stats_historical` in docs for Synapse Admins. ([\#12536](https://github.com/matrix-org/synapse/issues/12536)) +- Add missing linebreak to `pipx` install instructions. ([\#12579](https://github.com/matrix-org/synapse/issues/12579)) +- Add information about the TCP replication module to docs. ([\#12621](https://github.com/matrix-org/synapse/issues/12621)) +- Fixes to the formatting of `README.rst`. ([\#12627](https://github.com/matrix-org/synapse/issues/12627)) +- Fix docs on how to run specific Complement tests using the `complement.sh` test runner. ([\#12664](https://github.com/matrix-org/synapse/issues/12664)) + + +Deprecations and Removals +------------------------- + +- Remove unstable identifiers from [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069). ([\#12596](https://github.com/matrix-org/synapse/issues/12596)) +- Remove the unspecified `m.login.jwt` login type and the unstable `uk.half-shot.msc2778.login.application_service` from + [MSC2778](https://github.com/matrix-org/matrix-doc/pull/2778). ([\#12597](https://github.com/matrix-org/synapse/issues/12597)) +- Synapse now requires at least Python 3.7.1 (up from 3.7.0), for compatibility with the latest Twisted trunk. ([\#12613](https://github.com/matrix-org/synapse/issues/12613)) + + +Internal Changes +---------------- + +- Use supervisord to supervise Postgres and Caddy in the Complement image to reduce restart time. ([\#12480](https://github.com/matrix-org/synapse/issues/12480)) +- Immediately retry any requests that have backed off when a server comes back online. ([\#12500](https://github.com/matrix-org/synapse/issues/12500)) +- Use `make_awaitable` instead of `defer.succeed` for return values of mocks in tests. ([\#12505](https://github.com/matrix-org/synapse/issues/12505)) +- Consistently check if an object is a `frozendict`. ([\#12564](https://github.com/matrix-org/synapse/issues/12564)) +- Protect module callbacks with read semantics against cancellation. ([\#12568](https://github.com/matrix-org/synapse/issues/12568)) +- Improve comments and error messages around access tokens. ([\#12577](https://github.com/matrix-org/synapse/issues/12577)) +- Improve docstrings for the receipts store. ([\#12581](https://github.com/matrix-org/synapse/issues/12581)) +- Use constants for read-receipts in tests. ([\#12582](https://github.com/matrix-org/synapse/issues/12582)) +- Log status code of cancelled requests as 499 and avoid logging stack traces for them. ([\#12587](https://github.com/matrix-org/synapse/issues/12587), [\#12663](https://github.com/matrix-org/synapse/issues/12663)) +- Remove special-case for `twisted` logger from default log config. ([\#12589](https://github.com/matrix-org/synapse/issues/12589)) +- Use `getClientAddress` instead of the deprecated `getClientIP`. ([\#12599](https://github.com/matrix-org/synapse/issues/12599)) +- Add link to documentation in Grafana Dashboard. ([\#12602](https://github.com/matrix-org/synapse/issues/12602)) +- Reduce log spam when running multiple event persisters. ([\#12610](https://github.com/matrix-org/synapse/issues/12610)) +- Add extra debug logging to federation sender. ([\#12614](https://github.com/matrix-org/synapse/issues/12614)) +- Prevent remote homeservers from requesting local user device names by default. ([\#12616](https://github.com/matrix-org/synapse/issues/12616)) +- Add a consistency check on events which we read from the database. ([\#12620](https://github.com/matrix-org/synapse/issues/12620)) +- Remove use of the `constantly` library and switch to enums for `EventRedactBehaviour`. Contributed by @andrewdoh. ([\#12624](https://github.com/matrix-org/synapse/issues/12624)) +- Remove unused code related to receipts. ([\#12632](https://github.com/matrix-org/synapse/issues/12632)) +- Minor improvements to the scripts for running Synapse in worker mode under Complement. ([\#12637](https://github.com/matrix-org/synapse/issues/12637)) +- Move `pympler` back in to the `all` extras. ([\#12652](https://github.com/matrix-org/synapse/issues/12652)) +- Fix spelling of `M_UNRECOGNIZED` in comments. ([\#12665](https://github.com/matrix-org/synapse/issues/12665)) +- Release script: confirm the commit to be tagged before tagging. ([\#12556](https://github.com/matrix-org/synapse/issues/12556)) +- Fix a typo in the announcement text generated by the Synapse release development script. ([\#12612](https://github.com/matrix-org/synapse/issues/12612)) + +### Typechecking + +- Fix scripts-dev to pass typechecking. ([\#12356](https://github.com/matrix-org/synapse/issues/12356)) +- Add some type hints to datastore. ([\#12485](https://github.com/matrix-org/synapse/issues/12485)) +- Remove unused `# type: ignore`s. ([\#12531](https://github.com/matrix-org/synapse/issues/12531)) +- Allow unused `# type: ignore` comments in bleeding edge CI jobs. ([\#12576](https://github.com/matrix-org/synapse/issues/12576)) +- Remove redundant lines of config from `mypy.ini`. ([\#12608](https://github.com/matrix-org/synapse/issues/12608)) +- Update to mypy 0.950. ([\#12650](https://github.com/matrix-org/synapse/issues/12650)) +- Use `Concatenate` to better annotate `_do_execute`. ([\#12666](https://github.com/matrix-org/synapse/issues/12666)) +- Use `ParamSpec` to refine type hints. ([\#12667](https://github.com/matrix-org/synapse/issues/12667)) +- Fix mypy against latest pillow stubs. ([\#12671](https://github.com/matrix-org/synapse/issues/12671)) + +Synapse 1.58.1 (2022-05-05) +=========================== + +This patch release includes a fix to the Debian packages, installing the +`systemd` and `cache_memory` extra package groups, which were incorrectly +omitted in v1.58.0. This primarily prevented Synapse from starting +when the `systemd.journal.JournalHandler` log handler was configured. +See [#12631](https://github.com/matrix-org/synapse/issues/12631) for further information. + +Otherwise, no significant changes since 1.58.0. + + +Synapse 1.58.0 (2022-05-03) +=========================== + +As of this release, the groups/communities feature in Synapse is now disabled by default. See [\#11584](https://github.com/matrix-org/synapse/issues/11584) for details. As mentioned in [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1580), this feature will be removed in Synapse 1.61. + +No significant changes since 1.58.0rc2. + + +Synapse 1.58.0rc2 (2022-04-26) +============================== + +This release candidate fixes bugs related to Synapse 1.58.0rc1's logic for handling device list updates. + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.58.0rc1 where the main process could consume excessive amounts of CPU and memory while handling sentry logging failures. ([\#12554](https://github.com/matrix-org/synapse/issues/12554)) +- Fix a bug introduced in Synapse 1.58.0rc1 where opentracing contexts were not correctly sent to whitelisted remote servers with device lists updates. ([\#12555](https://github.com/matrix-org/synapse/issues/12555)) + + +Internal Changes +---------------- + +- Reduce unnecessary work when handling remote device list updates. ([\#12557](https://github.com/matrix-org/synapse/issues/12557)) + + +Synapse 1.58.0rc1 (2022-04-26) +============================== + +Features +-------- + +- Implement [MSC3383](https://github.com/matrix-org/matrix-spec-proposals/pull/3383) for including the destination in server-to-server authentication headers. Contributed by @Bubu and @jcgruenhage for Famedly. ([\#11398](https://github.com/matrix-org/synapse/issues/11398)) +- Docker images and Debian packages from matrix.org now contain a locked set of Python dependencies, greatly improving build reproducibility. ([Board](https://github.com/orgs/matrix-org/projects/54), [\#11537](https://github.com/matrix-org/synapse/issues/11537)) +- Enable processing of device list updates asynchronously. ([\#12365](https://github.com/matrix-org/synapse/issues/12365), [\#12465](https://github.com/matrix-org/synapse/issues/12465)) +- Implement [MSC2815](https://github.com/matrix-org/matrix-spec-proposals/pull/2815) to allow room moderators to view redacted event content. Contributed by @tulir @ Beeper. ([\#12427](https://github.com/matrix-org/synapse/issues/12427)) +- Build Debian packages for Ubuntu 22.04 "Jammy Jellyfish". ([\#12543](https://github.com/matrix-org/synapse/issues/12543)) + + +Bugfixes +-------- + +- Prevent a sync request from removing a user's busy presence status. ([\#12213](https://github.com/matrix-org/synapse/issues/12213)) +- Fix bug with incremental sync missing events when rejoining/backfilling. Contributed by Nick @ Beeper. ([\#12319](https://github.com/matrix-org/synapse/issues/12319)) +- Fix a long-standing bug which incorrectly caused `GET /_matrix/client/v3/rooms/{roomId}/event/{eventId}` to return edited events rather than the original. ([\#12476](https://github.com/matrix-org/synapse/issues/12476)) +- Fix a bug introduced in Synapse 1.27.0 where the admin API for [deleting forward extremities](https://github.com/matrix-org/synapse/blob/erikj/fix_delete_event_response_count/docs/admin_api/rooms.md#deleting-forward-extremities) would always return a count of 1, no matter how many extremities were deleted. ([\#12496](https://github.com/matrix-org/synapse/issues/12496)) +- Fix a long-standing bug where the image thumbnails embedded into email notifications were broken. ([\#12510](https://github.com/matrix-org/synapse/issues/12510)) +- Fix a bug in the implementation of [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202) where Synapse would use the field name `device_unused_fallback_keys`, rather than `device_unused_fallback_key_types`. ([\#12520](https://github.com/matrix-org/synapse/issues/12520)) +- Fix a bug introduced in Synapse 0.99.3 which could cause Synapse to consume large amounts of RAM when back-paginating in a large room. ([\#12522](https://github.com/matrix-org/synapse/issues/12522)) + + +Improved Documentation +---------------------- + +- Fix rendering of the documentation site when using the 'print' feature. ([\#12340](https://github.com/matrix-org/synapse/issues/12340)) +- Add a manual documenting config file options. ([\#12368](https://github.com/matrix-org/synapse/issues/12368), [\#12527](https://github.com/matrix-org/synapse/issues/12527)) +- Update documentation to reflect that both the `run_background_tasks_on` option and the options for moving stream writers off of the main process are no longer experimental. ([\#12451](https://github.com/matrix-org/synapse/issues/12451)) +- Update worker documentation and replace old `federation_reader` with `generic_worker`. ([\#12457](https://github.com/matrix-org/synapse/issues/12457)) +- Strongly recommend [Poetry](https://python-poetry.org/) for development. ([\#12475](https://github.com/matrix-org/synapse/issues/12475)) +- Add some example configurations for workers and update architectural diagram. ([\#12492](https://github.com/matrix-org/synapse/issues/12492)) +- Fix a broken link in `README.rst`. ([\#12495](https://github.com/matrix-org/synapse/issues/12495)) +- Add HAProxy delegation example with CORS headers to docs. ([\#12501](https://github.com/matrix-org/synapse/issues/12501)) +- Remove extraneous comma in User Admin API's device deletion section so that the example JSON is actually valid and works. Contributed by @olmari. ([\#12533](https://github.com/matrix-org/synapse/issues/12533)) + + +Deprecations and Removals +------------------------- + +- The groups/communities feature in Synapse is now disabled by default. ([\#12344](https://github.com/matrix-org/synapse/issues/12344)) +- Remove unstable identifiers from [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440). ([\#12382](https://github.com/matrix-org/synapse/issues/12382)) + + +Internal Changes +---------------- + +- Preparation for faster-room-join work: start a background process to resynchronise the room state after a room join. ([\#12394](https://github.com/matrix-org/synapse/issues/12394)) +- Preparation for faster-room-join work: Implement a tracking mechanism to allow functions to wait for full room state to arrive. ([\#12399](https://github.com/matrix-org/synapse/issues/12399)) +- Remove an unstable identifier from [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083). ([\#12395](https://github.com/matrix-org/synapse/issues/12395)) +- Run CI in the locked [Poetry](https://python-poetry.org/) environment, and remove corresponding `tox` jobs. ([\#12425](https://github.com/matrix-org/synapse/issues/12425), [\#12434](https://github.com/matrix-org/synapse/issues/12434), [\#12438](https://github.com/matrix-org/synapse/issues/12438), [\#12441](https://github.com/matrix-org/synapse/issues/12441), [\#12449](https://github.com/matrix-org/synapse/issues/12449), [\#12478](https://github.com/matrix-org/synapse/issues/12478), [\#12514](https://github.com/matrix-org/synapse/issues/12514), [\#12472](https://github.com/matrix-org/synapse/issues/12472)) +- Change Mutual Rooms' `unstable_features` flag to `uk.half-shot.msc2666.mutual_rooms` which matches the current iteration of [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666). ([\#12445](https://github.com/matrix-org/synapse/issues/12445)) +- Fix typo in the release script help string. ([\#12450](https://github.com/matrix-org/synapse/issues/12450)) +- Fix a minor typo in the Debian changelogs generated by the release script. ([\#12497](https://github.com/matrix-org/synapse/issues/12497)) +- Reintroduce the list of targets to the linter script, to avoid linting unwanted local-only directories during development. ([\#12455](https://github.com/matrix-org/synapse/issues/12455)) +- Limit length of `device_id` to less than 512 characters. ([\#12454](https://github.com/matrix-org/synapse/issues/12454)) +- Dockerfile-workers: reduce the amount we install in the image. ([\#12464](https://github.com/matrix-org/synapse/issues/12464)) +- Dockerfile-workers: give the master its own log config. ([\#12466](https://github.com/matrix-org/synapse/issues/12466)) +- complement-synapse-workers: factor out separate entry point script. ([\#12467](https://github.com/matrix-org/synapse/issues/12467)) +- Back out experimental implementation of [MSC2314](https://github.com/matrix-org/matrix-spec-proposals/pull/2314). ([\#12474](https://github.com/matrix-org/synapse/issues/12474)) +- Fix grammatical error in federation error response when the room version of a room is unknown. ([\#12483](https://github.com/matrix-org/synapse/issues/12483)) +- Remove unnecessary configuration overrides in tests. ([\#12511](https://github.com/matrix-org/synapse/issues/12511)) +- Refactor the relations code for clarity. ([\#12519](https://github.com/matrix-org/synapse/issues/12519)) +- Add type hints so `docker` and `stubs` directories pass `mypy --disallow-untyped-defs`. ([\#12528](https://github.com/matrix-org/synapse/issues/12528)) +- Update `delay_cancellation` to accept any awaitable, rather than just `Deferred`s. ([\#12468](https://github.com/matrix-org/synapse/issues/12468)) +- Handle cancellation in `EventsWorkerStore._get_events_from_cache_or_db`. ([\#12529](https://github.com/matrix-org/synapse/issues/12529)) + + +Synapse 1.57.1 (2022-04-20) +=========================== + +This is a patch release that only affects the Docker image. It is only of interest to administrators using [the LDAP module][LDAPModule] to authenticate their users. +If you have already upgraded to Synapse 1.57.0 without problem, then you have no need to upgrade to this patch release. + +[LDAPModule]: https://github.com/matrix-org/matrix-synapse-ldap3 + + +Updates to the Docker image +--------------------------- + +- Include version 0.2.0 of the Synapse LDAP Auth Provider module in the Docker image. This matches the version that was present in the Docker image for Synapse 1.56.0. ([\#12512](https://github.com/matrix-org/synapse/issues/12512)) + + +Synapse 1.57.0 (2022-04-19) +=========================== + +This version includes a [change](https://github.com/matrix-org/synapse/pull/12209) to the way transaction IDs are managed for application services. If your deployment uses a dedicated worker for application service traffic, **it must be stopped** when the database is upgraded (which normally happens when the main process is upgraded), to ensure the change is made safely without any risk of reusing transaction IDs. + +See the [upgrade notes](https://github.com/matrix-org/synapse/blob/v1.57.0rc1/docs/upgrade.md#upgrading-to-v1570) for more details. + +No significant changes since 1.57.0rc1. + + +Synapse 1.57.0rc1 (2022-04-12) +============================== + +Features +-------- + +- Send device list changes to application services as specified by [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202), using unstable prefixes. The `msc3202_transaction_extensions` experimental homeserver config option must be enabled and `org.matrix.msc3202: true` must be present in the application service registration file for device list changes to be sent. The "left" field is currently always empty. ([\#11881](https://github.com/matrix-org/synapse/issues/11881)) +- Optimise fetching large quantities of missing room state over federation. ([\#12040](https://github.com/matrix-org/synapse/issues/12040)) +- Offload the `update_client_ip` background job from the main process to the background worker, when using Redis-based replication. ([\#12251](https://github.com/matrix-org/synapse/issues/12251)) +- Move `update_client_ip` background job from the main process to the background worker. ([\#12252](https://github.com/matrix-org/synapse/issues/12252)) +- Add a module callback to react to new 3PID (email address, phone number) associations. ([\#12302](https://github.com/matrix-org/synapse/issues/12302)) +- Add a configuration option to remove a specific set of rooms from sync responses. ([\#12310](https://github.com/matrix-org/synapse/issues/12310)) +- Add a module callback to react to account data changes. ([\#12327](https://github.com/matrix-org/synapse/issues/12327)) +- Allow setting user admin status using the module API. Contributed by Famedly. ([\#12341](https://github.com/matrix-org/synapse/issues/12341)) +- Reduce overhead of restarting synchrotrons. ([\#12367](https://github.com/matrix-org/synapse/issues/12367), [\#12372](https://github.com/matrix-org/synapse/issues/12372)) +- Update `/messages` to use historic pagination tokens if no `from` query parameter is given. ([\#12370](https://github.com/matrix-org/synapse/issues/12370)) +- Add a module API for reading and writing global account data. ([\#12391](https://github.com/matrix-org/synapse/issues/12391)) +- Support the stable `v1` endpoint for `/relations`, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#12403](https://github.com/matrix-org/synapse/issues/12403)) +- Include bundled aggregations in search results + ([MSC3666](https://github.com/matrix-org/matrix-spec-proposals/pull/3666)). ([\#12436](https://github.com/matrix-org/synapse/issues/12436)) + + +Bugfixes +-------- + +- Fix a long-standing bug where updates to the server notices user profile (display name/avatar URL) in the configuration would not be applied to pre-existing rooms. Contributed by Jorge Florian. ([\#12115](https://github.com/matrix-org/synapse/issues/12115)) +- Fix a long-standing bug where events from ignored users were still considered for bundled aggregations. ([\#12235](https://github.com/matrix-org/synapse/issues/12235), [\#12338](https://github.com/matrix-org/synapse/issues/12338)) +- Fix non-member state events not resolving for historical events when used in [MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716) `/batch_send` `state_events_at_start`. ([\#12329](https://github.com/matrix-org/synapse/issues/12329)) +- Fix a long-standing bug affecting URL previews that would generate a 500 response instead of a 403 if the previewed URL includes a port that isn't allowed by the relevant blacklist. ([\#12333](https://github.com/matrix-org/synapse/issues/12333)) +- Default to `private` room visibility rather than `public` when a client does not specify one, according to spec. ([\#12350](https://github.com/matrix-org/synapse/issues/12350)) +- Fix a spec compliance issue where requests to the `/publicRooms` federation API would specify `limit` as a string. ([\#12364](https://github.com/matrix-org/synapse/issues/12364), [\#12410](https://github.com/matrix-org/synapse/issues/12410)) +- Fix a bug introduced in Synapse 1.49.0 which caused the `synapse_event_persisted_position` metric to have invalid values. ([\#12390](https://github.com/matrix-org/synapse/issues/12390)) + + +Updates to the Docker image +--------------------------- + +- Bundle locked versions of dependencies into the Docker image. ([\#12385](https://github.com/matrix-org/synapse/issues/12385), [\#12439](https://github.com/matrix-org/synapse/issues/12439)) +- Fix up healthcheck generation for workers docker image. ([\#12405](https://github.com/matrix-org/synapse/issues/12405)) + + +Improved Documentation +---------------------- + +- Clarify documentation for running SyTest against Synapse, including use of Postgres and worker mode. ([\#12271](https://github.com/matrix-org/synapse/issues/12271)) +- Document the behaviour of `LoggingTransaction.call_after` and `LoggingTransaction.call_on_exception` methods when transactions are retried. ([\#12315](https://github.com/matrix-org/synapse/issues/12315)) +- Update dead links in `check-newsfragment.sh` to point to the correct documentation URL. ([\#12331](https://github.com/matrix-org/synapse/issues/12331)) +- Upgrade the version of `mdbook` in CI to 0.4.17. ([\#12339](https://github.com/matrix-org/synapse/issues/12339)) +- Updates to the Room DAG concepts development document to clarify that we mark events as outliers because we don't have any state for them. ([\#12345](https://github.com/matrix-org/synapse/issues/12345)) +- Update the link to Redis pub/sub documentation in the workers documentation. ([\#12369](https://github.com/matrix-org/synapse/issues/12369)) +- Remove documentation for converting a legacy structured logging configuration to the new format. ([\#12392](https://github.com/matrix-org/synapse/issues/12392)) + + +Deprecations and Removals +------------------------- + +- Remove the unused and unstable `/aggregations` endpoint which was removed from [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#12293](https://github.com/matrix-org/synapse/issues/12293)) + + +Internal Changes +---------------- + +- Remove lingering unstable references to MSC2403 (knocking). ([\#12165](https://github.com/matrix-org/synapse/issues/12165)) +- Avoid trying to calculate the state at outlier events. ([\#12191](https://github.com/matrix-org/synapse/issues/12191), [\#12316](https://github.com/matrix-org/synapse/issues/12316), [\#12330](https://github.com/matrix-org/synapse/issues/12330), [\#12332](https://github.com/matrix-org/synapse/issues/12332), [\#12409](https://github.com/matrix-org/synapse/issues/12409)) +- Omit sending "offline" presence updates to application services after they are initially configured. ([\#12193](https://github.com/matrix-org/synapse/issues/12193)) +- Switch to using a sequence to generate AS transaction IDs. Contributed by Nick @ Beeper. If running synapse with a dedicated appservice worker, this MUST be stopped before upgrading the main process and database. ([\#12209](https://github.com/matrix-org/synapse/issues/12209)) +- Add missing type hints for storage. ([\#12267](https://github.com/matrix-org/synapse/issues/12267)) +- Add missing type definitions for scripts in docker folder. Contributed by Jorge Florian. ([\#12280](https://github.com/matrix-org/synapse/issues/12280)) +- Move [MSC2654](https://github.com/matrix-org/matrix-doc/pull/2654) support behind an experimental configuration flag. ([\#12295](https://github.com/matrix-org/synapse/issues/12295)) +- Update docstrings to explain how to decipher live and historic pagination tokens. ([\#12317](https://github.com/matrix-org/synapse/issues/12317)) +- Add ground work for speeding up device list updates for users in large numbers of rooms. ([\#12321](https://github.com/matrix-org/synapse/issues/12321)) +- Fix typechecker problems exposed by signedjson 1.1.2. ([\#12326](https://github.com/matrix-org/synapse/issues/12326)) +- Remove the `tox` packaging job: it will be redundant once #11537 lands. ([\#12334](https://github.com/matrix-org/synapse/issues/12334)) +- Ignore `.envrc` for `direnv` users. ([\#12335](https://github.com/matrix-org/synapse/issues/12335)) +- Remove the (broadly unused, dev-only) dockerfile for pg tests. ([\#12336](https://github.com/matrix-org/synapse/issues/12336)) +- Remove redundant `get_success` calls in test code. ([\#12346](https://github.com/matrix-org/synapse/issues/12346)) +- Add type annotations for `tests/unittest.py`. ([\#12347](https://github.com/matrix-org/synapse/issues/12347)) +- Move single-use methods out of `TestCase`. ([\#12348](https://github.com/matrix-org/synapse/issues/12348)) +- Remove broken and unused development scripts. ([\#12349](https://github.com/matrix-org/synapse/issues/12349), [\#12351](https://github.com/matrix-org/synapse/issues/12351), [\#12355](https://github.com/matrix-org/synapse/issues/12355)) +- Convert `Linearizer` tests from `inlineCallbacks` to async. ([\#12353](https://github.com/matrix-org/synapse/issues/12353)) +- Update docstrings for `ReadWriteLock` tests. ([\#12354](https://github.com/matrix-org/synapse/issues/12354)) +- Refactor `Linearizer`, convert methods to async and use an async context manager. ([\#12357](https://github.com/matrix-org/synapse/issues/12357)) +- Fix a long-standing bug where `Linearizer`s could get stuck if a cancellation were to happen at the wrong time. ([\#12358](https://github.com/matrix-org/synapse/issues/12358)) +- Make `StreamToken.from_string` and `RoomStreamToken.parse` propagate cancellations instead of replacing them with `SynapseError`s. ([\#12366](https://github.com/matrix-org/synapse/issues/12366)) +- Add type hints to tests files. ([\#12371](https://github.com/matrix-org/synapse/issues/12371)) +- Allow specifying the Postgres database's port when running unit tests with Postgres. ([\#12376](https://github.com/matrix-org/synapse/issues/12376)) +- Remove temporary pin of signedjson<=1.1.1 that was added in Synapse 1.56.0. ([\#12379](https://github.com/matrix-org/synapse/issues/12379)) +- Add opentracing spans to calls to external cache. ([\#12380](https://github.com/matrix-org/synapse/issues/12380)) +- Lay groundwork for using `poetry` to manage Synapse's dependencies. ([\#12381](https://github.com/matrix-org/synapse/issues/12381), [\#12407](https://github.com/matrix-org/synapse/issues/12407), [\#12412](https://github.com/matrix-org/synapse/issues/12412), [\#12418](https://github.com/matrix-org/synapse/issues/12418)) +- Make missing `importlib_metadata` dependency explicit. ([\#12384](https://github.com/matrix-org/synapse/issues/12384), [\#12400](https://github.com/matrix-org/synapse/issues/12400)) +- Update type annotations for compatiblity with prometheus_client 0.14. ([\#12389](https://github.com/matrix-org/synapse/issues/12389)) +- Remove support for the unstable identifiers specified in [MSC3288](https://github.com/matrix-org/matrix-doc/pull/3288). ([\#12398](https://github.com/matrix-org/synapse/issues/12398)) +- Add missing type hints to configuration classes. ([\#12402](https://github.com/matrix-org/synapse/issues/12402)) +- Add files used to build the Docker image used for complement testing into the Synapse repository. ([\#12404](https://github.com/matrix-org/synapse/issues/12404)) +- Do not include groups in the sync response when disabled. ([\#12408](https://github.com/matrix-org/synapse/issues/12408)) +- Improve type hints related to HTTP query parameters. ([\#12415](https://github.com/matrix-org/synapse/issues/12415)) +- Stop maintaining a list of lint targets. ([\#12420](https://github.com/matrix-org/synapse/issues/12420)) +- Make `synapse._scripts` pass type checks. ([\#12421](https://github.com/matrix-org/synapse/issues/12421), [\#12422](https://github.com/matrix-org/synapse/issues/12422)) +- Add some type hints to datastore. ([\#12423](https://github.com/matrix-org/synapse/issues/12423)) +- Enable certificate checking during complement tests. ([\#12435](https://github.com/matrix-org/synapse/issues/12435)) +- Explicitly specify the `tls` extra for Twisted dependency. ([\#12444](https://github.com/matrix-org/synapse/issues/12444)) + + +Synapse 1.56.0 (2022-04-05) +=========================== + +Synapse will now refuse to start up if open registration is enabled, in order to help mitigate +abuse across the federation. If you would like +to provide registration to anyone, consider adding [email](https://github.com/matrix-org/synapse/blob/8a519f8abc6de772167c2cca101d22ee2052fafc/docs/sample_config.yaml#L1285), +[recaptcha](https://matrix-org.github.io/synapse/v1.56/CAPTCHA_SETUP.html) +or [token-based](https://matrix-org.github.io/synapse/v1.56/usage/administration/admin_api/registration_tokens.html) verification +in order to prevent automated registration from bad actors. +This check can be disabled by setting the `enable_registration_without_verification` option in your +homeserver configuration file to `true`. More details are available in the +[upgrade notes](https://matrix-org.github.io/synapse/v1.56/upgrade.html#open-registration-without-verification-is-now-disabled-by-default). + +Synapse will additionally now refuse to start when using PostgreSQL with a non-`C` values for `COLLATE` and `CTYPE`, unless +the config flag `allow_unsafe_locale`, found in the database section of the configuration file, is set to `true`. See the +[upgrade notes](https://matrix-org.github.io/synapse/v1.56/upgrade#change-in-behaviour-for-postgresql-databases-with-unsafe-locale) +for details. + +Internal Changes +---------------- + +- Bump the version of `black` for compatibility with the latest `click` release. ([\#12320](https://github.com/matrix-org/synapse/issues/12320)) + + +Synapse 1.56.0rc1 (2022-03-29) +============================== + +Features +-------- + +- Allow modules to store already existing 3PID associations. ([\#12195](https://github.com/matrix-org/synapse/issues/12195)) +- Allow registering server administrators using the module API. Contributed by Famedly. ([\#12250](https://github.com/matrix-org/synapse/issues/12250)) + + +Bugfixes +-------- + +- Fix a long-standing bug which caused the `/_matrix/federation/v1/state` and `/_matrix/federation/v1/state_ids` endpoints to return incorrect or invalid data when called for an event which we have stored as an "outlier". ([\#12087](https://github.com/matrix-org/synapse/issues/12087)) +- Fix a long-standing bug where events from ignored users would still be considered for relations. ([\#12227](https://github.com/matrix-org/synapse/issues/12227), [\#12232](https://github.com/matrix-org/synapse/issues/12232), [\#12285](https://github.com/matrix-org/synapse/issues/12285)) +- Fix a bug introduced in Synapse 1.53.0 where an unnecessary query could be performed when fetching bundled aggregations for threads. ([\#12228](https://github.com/matrix-org/synapse/issues/12228)) +- Fix a bug introduced in Synapse 1.52.0 where admins could not deactivate and GDPR-erase a user if Synapse was configured with limits on avatars. ([\#12261](https://github.com/matrix-org/synapse/issues/12261)) + + +Improved Documentation +---------------------- + +- Fix the link to the module documentation in the legacy spam checker warning message. ([\#12231](https://github.com/matrix-org/synapse/issues/12231)) +- Remove incorrect prefixes in the worker documentation for some endpoints. ([\#12243](https://github.com/matrix-org/synapse/issues/12243)) +- Correct `check_username_for_spam` annotations and docs. ([\#12246](https://github.com/matrix-org/synapse/issues/12246)) +- Correct Authentik OpenID typo, and add notes on troubleshooting. Contributed by @IronTooch. ([\#12275](https://github.com/matrix-org/synapse/issues/12275)) +- HAProxy reverse proxy guide update to stop sending IPv4-mapped address to homeserver. Contributed by @villepeh. ([\#12279](https://github.com/matrix-org/synapse/issues/12279)) + + +Internal Changes +---------------- + +- Rename `shared_rooms` to `mutual_rooms` ([MSC2666](https://github.com/matrix-org/matrix-doc/pull/2666)), as per proposal changes. ([\#12036](https://github.com/matrix-org/synapse/issues/12036)) +- Remove check on `update_user_directory` for shared rooms handler ([MSC2666](https://github.com/matrix-org/matrix-doc/pull/2666)), and update/expand documentation. ([\#12038](https://github.com/matrix-org/synapse/issues/12038)) +- Refactor `create_new_client_event` to use a new parameter, `state_event_ids`, which accurately describes the usage with [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) instead of abusing `auth_event_ids`. ([\#12083](https://github.com/matrix-org/synapse/issues/12083), [\#12304](https://github.com/matrix-org/synapse/issues/12304)) +- Refuse to start if registration is enabled without email, captcha, or token-based verification unless the new config flag `enable_registration_without_verification` is set to `true`. ([\#12091](https://github.com/matrix-org/synapse/issues/12091), [\#12322](https://github.com/matrix-org/synapse/issues/12322)) +- Add tests for database transaction callbacks. ([\#12198](https://github.com/matrix-org/synapse/issues/12198)) +- Handle cancellation in `DatabasePool.runInteraction`. ([\#12199](https://github.com/matrix-org/synapse/issues/12199)) +- Add missing type hints for cache storage. ([\#12216](https://github.com/matrix-org/synapse/issues/12216)) +- Add missing type hints for storage. ([\#12248](https://github.com/matrix-org/synapse/issues/12248), [\#12255](https://github.com/matrix-org/synapse/issues/12255)) +- Add type hints to tests files. ([\#12224](https://github.com/matrix-org/synapse/issues/12224), [\#12240](https://github.com/matrix-org/synapse/issues/12240), [\#12256](https://github.com/matrix-org/synapse/issues/12256)) +- Use type stubs for `psycopg2`. ([\#12269](https://github.com/matrix-org/synapse/issues/12269)) +- Improve type annotations for `execute_values`. ([\#12311](https://github.com/matrix-org/synapse/issues/12311)) +- Clean-up logic around rebasing URLs for URL image previews. ([\#12219](https://github.com/matrix-org/synapse/issues/12219)) +- Use the `ignored_users` table in additional places instead of re-parsing the account data. ([\#12225](https://github.com/matrix-org/synapse/issues/12225)) +- Refactor the relations endpoints to add a `RelationsHandler`. ([\#12237](https://github.com/matrix-org/synapse/issues/12237)) +- Generate announcement links in the release script. ([\#12242](https://github.com/matrix-org/synapse/issues/12242)) +- Improve error message when dependencies check finds a broken installation. ([\#12244](https://github.com/matrix-org/synapse/issues/12244)) +- Compress metrics HTTP resource when enabled. Contributed by Nick @ Beeper. ([\#12258](https://github.com/matrix-org/synapse/issues/12258)) +- Refuse to start if the PostgreSQL database has a non-`C` locale, unless the config flag `allow_unsafe_db_locale` is set to true. ([\#12262](https://github.com/matrix-org/synapse/issues/12262), [\#12288](https://github.com/matrix-org/synapse/issues/12288)) +- Optionally include account validity expiration information to experimental [MSC3720](https://github.com/matrix-org/matrix-doc/pull/3720) account status responses. ([\#12266](https://github.com/matrix-org/synapse/issues/12266)) +- Add a new cache `_get_membership_from_event_id` to speed up push rule calculations in large rooms. ([\#12272](https://github.com/matrix-org/synapse/issues/12272)) +- Re-enable Complement concurrency in CI. ([\#12283](https://github.com/matrix-org/synapse/issues/12283)) +- Remove unused test utilities. ([\#12291](https://github.com/matrix-org/synapse/issues/12291)) +- Enhance logging for inbound federation events. ([\#12301](https://github.com/matrix-org/synapse/issues/12301)) +- Fix compatibility with the recently-released Jinja 3.1. ([\#12313](https://github.com/matrix-org/synapse/issues/12313)) +- Avoid trying to calculate the state at outlier events. ([\#12314](https://github.com/matrix-org/synapse/issues/12314)) + + +Synapse 1.55.2 (2022-03-24) +=========================== + +This patch version reverts the earlier fixes from Synapse 1.55.1, which could cause problems in certain deployments, and instead adds a cap to the version of Jinja to be installed. Again, this is to fix an incompatibility with version 3.1.0 of the [Jinja](https://pypi.org/project/Jinja2/) library, and again, deployments of Synapse using the `matrixdotorg/synapse` Docker image or Debian packages from packages.matrix.org are not affected. + +Internal Changes +---------------- + +- Pin Jinja to <3.1.0, as Synapse fails to start with Jinja 3.1.0. ([\#12297](https://github.com/matrix-org/synapse/issues/12297)) +- Revert changes from 1.55.1 as they caused problems with older versions of Jinja ([\#12296](https://github.com/matrix-org/synapse/issues/12296)) + + +Synapse 1.55.1 (2022-03-24) +=========================== + +This is a patch release that fixes an incompatibility with version 3.1.0 of the [Jinja](https://pypi.org/project/Jinja2/) library, released on March 24th, 2022. Deployments of Synapse using the `matrixdotorg/synapse` Docker image or Debian packages from packages.matrix.org are not affected. + +Internal Changes +---------------- + +- Remove uses of the long-deprecated `jinja2.Markup` which would prevent Synapse from starting with Jinja 3.1.0 or above installed. ([\#12289](https://github.com/matrix-org/synapse/issues/12289)) + + +Synapse 1.55.0 (2022-03-22) +=========================== + +This release removes a workaround introduced in Synapse 1.50.0 for Mjolnir compatibility. **This breaks compatibility with Mjolnir 1.3.1 and earlier. ([\#11700](https://github.com/matrix-org/synapse/issues/11700))**; Mjolnir users should upgrade Mjolnir before upgrading Synapse to this version. + +This release also moves the location of the `synctl` script; see the [upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#synctl-script-has-been-moved) for more details. + + +Internal Changes +---------------- + +- Tweak copy for default Single Sign-On account details template to better adhere to mobile app store guidelines. ([\#12265](https://github.com/matrix-org/synapse/issues/12265), [\#12260](https://github.com/matrix-org/synapse/issues/12260)) + + +Synapse 1.55.0rc1 (2022-03-15) +============================== + +Features +-------- + +- Add third-party rules callbacks `check_can_shutdown_room` and `check_can_deactivate_user`. ([\#12028](https://github.com/matrix-org/synapse/issues/12028)) +- Improve performance of logging in for large accounts. ([\#12132](https://github.com/matrix-org/synapse/issues/12132)) +- Add experimental env var `SYNAPSE_ASYNC_IO_REACTOR` that causes Synapse to use the asyncio reactor for Twisted. ([\#12135](https://github.com/matrix-org/synapse/issues/12135)) +- Support the stable identifiers from [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440): threads. ([\#12151](https://github.com/matrix-org/synapse/issues/12151)) +- Add a new Jinja2 template filter to extract the local part of an email address. ([\#12212](https://github.com/matrix-org/synapse/issues/12212)) + + +Bugfixes +-------- + +- Use the proper serialization format for bundled thread aggregations. The bug has existed since Synapse 1.48.0. ([\#12090](https://github.com/matrix-org/synapse/issues/12090)) +- Fix a long-standing bug when redacting events with relations. ([\#12113](https://github.com/matrix-org/synapse/issues/12113), [\#12121](https://github.com/matrix-org/synapse/issues/12121), [\#12130](https://github.com/matrix-org/synapse/issues/12130), [\#12189](https://github.com/matrix-org/synapse/issues/12189)) +- Fix a bug introduced in Synapse 1.7.2 whereby background updates are never run with the default background batch size. ([\#12157](https://github.com/matrix-org/synapse/issues/12157)) +- Fix a bug where non-standard information was returned from the `/hierarchy` API. Introduced in Synapse 1.41.0. ([\#12175](https://github.com/matrix-org/synapse/issues/12175)) +- Fix a bug introduced in Synapse 1.54.0 that broke background updates on sqlite homeservers while search was disabled. ([\#12215](https://github.com/matrix-org/synapse/issues/12215)) +- Fix a long-standing bug when a `filter` argument with `event_fields` which did not include the `unsigned` field could result in a 500 error on `/sync`. ([\#12234](https://github.com/matrix-org/synapse/issues/12234)) + + +Improved Documentation +---------------------- + +- Fix complexity checking config example in [Resource Constrained Devices](https://matrix-org.github.io/synapse/v1.54/other/running_synapse_on_single_board_computers.html) docs page. ([\#11998](https://github.com/matrix-org/synapse/issues/11998)) +- Improve documentation for demo scripts. ([\#12143](https://github.com/matrix-org/synapse/issues/12143)) +- Updates to the Room DAG concepts development document. ([\#12179](https://github.com/matrix-org/synapse/issues/12179)) +- Document that the `typing`, `to_device`, `account_data`, `receipts`, and `presence` stream writer can only be used on a single worker. ([\#12196](https://github.com/matrix-org/synapse/issues/12196)) +- Document that contributors can sign off privately by email. ([\#12204](https://github.com/matrix-org/synapse/issues/12204)) + + +Deprecations and Removals +------------------------- + +- **Remove workaround introduced in Synapse 1.50.0 for Mjolnir compatibility. Breaks compatibility with Mjolnir 1.3.1 and earlier. ([\#11700](https://github.com/matrix-org/synapse/issues/11700))** +- **`synctl` has been moved into into `synapse._scripts` and is exposed as an entry point; see [upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#synctl-script-has-been-moved). ([\#12140](https://github.com/matrix-org/synapse/issues/12140)) +- Remove backwards compatibilty with pagination tokens from the `/relations` and `/aggregations` endpoints generated from Synapse < v1.52.0. ([\#12138](https://github.com/matrix-org/synapse/issues/12138)) +- The groups/communities feature in Synapse has been deprecated. ([\#12200](https://github.com/matrix-org/synapse/issues/12200)) + + +Internal Changes +---------------- + +- Simplify the `ApplicationService` class' set of public methods related to interest checking. ([\#11915](https://github.com/matrix-org/synapse/issues/11915)) +- Add config settings for background update parameters. ([\#11980](https://github.com/matrix-org/synapse/issues/11980)) +- Correct type hints for txredis. ([\#12042](https://github.com/matrix-org/synapse/issues/12042)) +- Limit the size of `aggregation_key` on annotations. ([\#12101](https://github.com/matrix-org/synapse/issues/12101)) +- Add type hints to tests files. ([\#12108](https://github.com/matrix-org/synapse/issues/12108), [\#12146](https://github.com/matrix-org/synapse/issues/12146), [\#12207](https://github.com/matrix-org/synapse/issues/12207), [\#12208](https://github.com/matrix-org/synapse/issues/12208)) +- Move scripts to Synapse package and expose as setuptools entry points. ([\#12118](https://github.com/matrix-org/synapse/issues/12118)) +- Add support for cancellation to `ReadWriteLock`. ([\#12120](https://github.com/matrix-org/synapse/issues/12120)) +- Fix data validation to compare to lists, not sequences. ([\#12128](https://github.com/matrix-org/synapse/issues/12128)) +- Fix CI not attaching source distributions and wheels to the GitHub releases. ([\#12131](https://github.com/matrix-org/synapse/issues/12131)) +- Remove unused mocks from `test_typing`. ([\#12136](https://github.com/matrix-org/synapse/issues/12136)) +- Give `scripts-dev` scripts suffixes for neater CI config. ([\#12137](https://github.com/matrix-org/synapse/issues/12137)) +- Move the snapcraft configuration file to `contrib`. ([\#12142](https://github.com/matrix-org/synapse/issues/12142)) +- Enable [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) Complement tests in CI. ([\#12144](https://github.com/matrix-org/synapse/issues/12144)) +- Enable [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) Complement tests in CI. ([\#12145](https://github.com/matrix-org/synapse/issues/12145)) +- Add test for `ObservableDeferred`'s cancellation behaviour. ([\#12149](https://github.com/matrix-org/synapse/issues/12149)) +- Use `ParamSpec` in type hints for `synapse.logging.context`. ([\#12150](https://github.com/matrix-org/synapse/issues/12150)) +- Prune unused jobs from `tox` config. ([\#12152](https://github.com/matrix-org/synapse/issues/12152)) +- Move CI checks out of tox, to facilitate a move to using poetry. ([\#12153](https://github.com/matrix-org/synapse/issues/12153)) +- Avoid generating state groups for local out-of-band leaves. ([\#12154](https://github.com/matrix-org/synapse/issues/12154)) +- Avoid trying to calculate the state at outlier events. ([\#12155](https://github.com/matrix-org/synapse/issues/12155), [\#12173](https://github.com/matrix-org/synapse/issues/12173), [\#12202](https://github.com/matrix-org/synapse/issues/12202)) +- Fix some type annotations. ([\#12156](https://github.com/matrix-org/synapse/issues/12156)) +- Add type hints for `ObservableDeferred` attributes. ([\#12159](https://github.com/matrix-org/synapse/issues/12159)) +- Use a prebuilt Action for the `tests-done` CI job. ([\#12161](https://github.com/matrix-org/synapse/issues/12161)) +- Reduce number of DB queries made during processing of `/sync`. ([\#12163](https://github.com/matrix-org/synapse/issues/12163)) +- Add `delay_cancellation` utility function, which behaves like `stop_cancellation` but waits until the original `Deferred` resolves before raising a `CancelledError`. ([\#12180](https://github.com/matrix-org/synapse/issues/12180)) +- Retry HTTP replication failures, this should prevent 502's when restarting stateful workers (main, event persisters, stream writers). Contributed by Nick @ Beeper. ([\#12182](https://github.com/matrix-org/synapse/issues/12182)) +- Add cancellation support to `@cached` and `@cachedList` decorators. ([\#12183](https://github.com/matrix-org/synapse/issues/12183)) +- Remove unused variables. ([\#12187](https://github.com/matrix-org/synapse/issues/12187)) +- Add combined test for HTTP pusher and push rule. Contributed by Nick @ Beeper. ([\#12188](https://github.com/matrix-org/synapse/issues/12188)) +- Rename `HomeServer.get_tcp_replication` to `get_replication_command_handler`. ([\#12192](https://github.com/matrix-org/synapse/issues/12192)) +- Remove some dead code. ([\#12197](https://github.com/matrix-org/synapse/issues/12197)) +- Fix a misleading comment in the function `check_event_for_spam`. ([\#12203](https://github.com/matrix-org/synapse/issues/12203)) +- Remove unnecessary `pass` statements. ([\#12206](https://github.com/matrix-org/synapse/issues/12206)) +- Update the SSO username picker template to comply with SIWA guidelines. ([\#12210](https://github.com/matrix-org/synapse/issues/12210)) +- Improve code documentation for the typing stream over replication. ([\#12211](https://github.com/matrix-org/synapse/issues/12211)) + + +Synapse 1.54.0 (2022-03-08) +=========================== + +Please note that this will be the last release of Synapse that is compatible with Mjolnir 1.3.1 and earlier. +Administrators of servers which have the Mjolnir module installed are advised to upgrade Mjolnir to version 1.3.2 or later. + + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.54.0rc1 preventing the new module callbacks introduced in this release from being registered by modules. ([\#12141](https://github.com/matrix-org/synapse/issues/12141)) +- Fix a bug introduced in Synapse 1.54.0rc1 where runtime dependency version checks would mistakenly check development dependencies if they were present and would not accept pre-release versions of dependencies. ([\#12129](https://github.com/matrix-org/synapse/issues/12129), [\#12177](https://github.com/matrix-org/synapse/issues/12177)) + + +Internal Changes +---------------- + +- Update release script to insert the previous version when writing "No significant changes" line in the changelog. ([\#12127](https://github.com/matrix-org/synapse/issues/12127)) +- Relax the version guard for "packaging" added in [\#12088](https://github.com/matrix-org/synapse/issues/12088). ([\#12166](https://github.com/matrix-org/synapse/issues/12166)) + + +Synapse 1.54.0rc1 (2022-03-02) +============================== + + +Features +-------- + +- Add support for [MSC3202](https://github.com/matrix-org/matrix-doc/pull/3202): sending one-time key counts and fallback key usage states to Application Services. ([\#11617](https://github.com/matrix-org/synapse/issues/11617)) +- Improve the generated URL previews for some web pages. Contributed by @AndrewRyanChama. ([\#11985](https://github.com/matrix-org/synapse/issues/11985)) +- Track cache invalidations in Prometheus metrics, as already happens for cache eviction based on size or time. ([\#12000](https://github.com/matrix-org/synapse/issues/12000)) +- Implement experimental support for [MSC3720](https://github.com/matrix-org/matrix-doc/pull/3720) (account status endpoints). ([\#12001](https://github.com/matrix-org/synapse/issues/12001), [\#12067](https://github.com/matrix-org/synapse/issues/12067)) +- Enable modules to set a custom display name when registering a user. ([\#12009](https://github.com/matrix-org/synapse/issues/12009)) +- Advertise Matrix 1.1 and 1.2 support on `/_matrix/client/versions`. ([\#12020](https://github.com/matrix-org/synapse/issues/12020), ([\#12022](https://github.com/matrix-org/synapse/issues/12022)) +- Support only the stable identifier for [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069)'s `is_guest` on `/_matrix/client/v3/account/whoami`. ([\#12021](https://github.com/matrix-org/synapse/issues/12021)) +- Use room version 9 as the default room version (per [MSC3589](https://github.com/matrix-org/matrix-doc/pull/3589)). ([\#12058](https://github.com/matrix-org/synapse/issues/12058)) +- Add module callbacks to react to user deactivation status changes (i.e. deactivations and reactivations) and profile updates. ([\#12062](https://github.com/matrix-org/synapse/issues/12062)) + + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.48.0 where an edit of the latest event in a thread would not be properly applied to the thread summary. ([\#11992](https://github.com/matrix-org/synapse/issues/11992)) +- Fix long-standing bug where the `get_rooms_for_user` cache was not correctly invalidated for remote users when the server left a room. ([\#11999](https://github.com/matrix-org/synapse/issues/11999)) +- Fix a 500 error with Postgres when looking backwards with the [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) `/timestamp_to_event?dir=b` endpoint. ([\#12024](https://github.com/matrix-org/synapse/issues/12024)) +- Properly fix a long-standing bug where wrong data could be inserted into the `event_search` table when using SQLite. This could block running `synapse_port_db` with an `argument of type 'int' is not iterable` error. This bug was partially fixed by a change in Synapse 1.44.0. ([\#12037](https://github.com/matrix-org/synapse/issues/12037)) +- Fix slow performance of `/logout` in some cases where refresh tokens are in use. The slowness existed since the initial implementation of refresh tokens in version 1.38.0. ([\#12056](https://github.com/matrix-org/synapse/issues/12056)) +- Fix a long-standing bug where Synapse would make additional failing requests over federation for missing data. ([\#12077](https://github.com/matrix-org/synapse/issues/12077)) +- Fix occasional `Unhandled error in Deferred` error message. ([\#12089](https://github.com/matrix-org/synapse/issues/12089)) +- Fix a bug introduced in Synapse 1.51.0 where incoming federation transactions containing at least one EDU would be dropped if debug logging was enabled for `synapse.8631_debug`. ([\#12098](https://github.com/matrix-org/synapse/issues/12098)) +- Fix a long-standing bug which could cause push notifications to malfunction if `use_frozen_dicts` was set in the configuration. ([\#12100](https://github.com/matrix-org/synapse/issues/12100)) +- Fix an extremely rare, long-standing bug in `ReadWriteLock` that would cause an error when a newly unblocked writer completes instantly. ([\#12105](https://github.com/matrix-org/synapse/issues/12105)) +- Make a `POST` to `/rooms//receipt/m.read/` only trigger a push notification if the count of unread messages is different to the one in the last successfully sent push. This reduces server load and load on the receiving device. ([\#11835](https://github.com/matrix-org/synapse/issues/11835)) + + +Updates to the Docker image +--------------------------- + +- The Docker image no longer automatically creates a temporary volume at `/data`. This is not expected to affect normal usage. ([\#11997](https://github.com/matrix-org/synapse/issues/11997)) +- Use Python 3.9 in Docker images by default. ([\#12112](https://github.com/matrix-org/synapse/issues/12112)) + + +Improved Documentation +---------------------- + +- Document support for the `to_device`, `account_data`, `receipts`, and `presence` stream writers for workers. ([\#11599](https://github.com/matrix-org/synapse/issues/11599)) +- Explain the meaning of spam checker callbacks' return values. ([\#12003](https://github.com/matrix-org/synapse/issues/12003)) +- Clarify information about external Identity Provider IDs. ([\#12004](https://github.com/matrix-org/synapse/issues/12004)) + + +Deprecations and Removals +------------------------- + +- Deprecate using `synctl` with the config option `synctl_cache_factor` and print a warning if a user still uses this option. ([\#11865](https://github.com/matrix-org/synapse/issues/11865)) +- Remove support for the legacy structured logging configuration (please see the the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#legacy-structured-logging-configuration-removal) if you are using `structured: true` in the Synapse configuration). ([\#12008](https://github.com/matrix-org/synapse/issues/12008)) +- Drop support for [MSC3283](https://github.com/matrix-org/matrix-doc/pull/3283) unstable flags now that the stable flags are supported. ([\#12018](https://github.com/matrix-org/synapse/issues/12018)) +- Remove the unstable `/spaces` endpoint from [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946). ([\#12073](https://github.com/matrix-org/synapse/issues/12073)) + + +Internal Changes +---------------- + +- Make the `get_room_version` method use `get_room_version_id` to benefit from caching. ([\#11808](https://github.com/matrix-org/synapse/issues/11808)) +- Remove unnecessary condition on knock -> leave auth rule check. ([\#11900](https://github.com/matrix-org/synapse/issues/11900)) +- Add tests for device list changes between local users. ([\#11972](https://github.com/matrix-org/synapse/issues/11972)) +- Optimise calculating `device_list` changes in `/sync`. ([\#11974](https://github.com/matrix-org/synapse/issues/11974)) +- Add missing type hints to storage classes. ([\#11984](https://github.com/matrix-org/synapse/issues/11984)) +- Refactor the search code for improved readability. ([\#11991](https://github.com/matrix-org/synapse/issues/11991)) +- Move common deduplication code down into `_auth_and_persist_outliers`. ([\#11994](https://github.com/matrix-org/synapse/issues/11994)) +- Limit concurrent joins from applications services. ([\#11996](https://github.com/matrix-org/synapse/issues/11996)) +- Preparation for faster-room-join work: when parsing the `send_join` response, get the `m.room.create` event from `state`, not `auth_chain`. ([\#12005](https://github.com/matrix-org/synapse/issues/12005), [\#12039](https://github.com/matrix-org/synapse/issues/12039)) +- Preparation for faster-room-join work: parse MSC3706 fields in send_join response. ([\#12011](https://github.com/matrix-org/synapse/issues/12011)) +- Preparation for faster-room-join work: persist information on which events and rooms have partial state to the database. ([\#12012](https://github.com/matrix-org/synapse/issues/12012)) +- Preparation for faster-room-join work: Support for calling `/federation/v1/state` on a remote server. ([\#12013](https://github.com/matrix-org/synapse/issues/12013)) +- Configure `tox` to use `venv` rather than `virtualenv`. ([\#12015](https://github.com/matrix-org/synapse/issues/12015)) +- Fix bug in `StateFilter.return_expanded()` and add some tests. ([\#12016](https://github.com/matrix-org/synapse/issues/12016)) +- Use Matrix v1.1 endpoints (`/_matrix/client/v3/auth/...`) in fallback auth HTML forms. ([\#12019](https://github.com/matrix-org/synapse/issues/12019)) +- Update the `olddeps` CI job to use an old version of `markupsafe`. ([\#12025](https://github.com/matrix-org/synapse/issues/12025)) +- Upgrade Mypy to version 0.931. ([\#12030](https://github.com/matrix-org/synapse/issues/12030)) +- Remove legacy `HomeServer.get_datastore()`. ([\#12031](https://github.com/matrix-org/synapse/issues/12031), [\#12070](https://github.com/matrix-org/synapse/issues/12070)) +- Minor typing fixes. ([\#12034](https://github.com/matrix-org/synapse/issues/12034), [\#12069](https://github.com/matrix-org/synapse/issues/12069)) +- After joining a room, create a dedicated logcontext to process the queued events. ([\#12041](https://github.com/matrix-org/synapse/issues/12041)) +- Tidy up GitHub Actions config which builds distributions for PyPI. ([\#12051](https://github.com/matrix-org/synapse/issues/12051)) +- Move configuration out of `setup.cfg`. ([\#12052](https://github.com/matrix-org/synapse/issues/12052), [\#12059](https://github.com/matrix-org/synapse/issues/12059)) +- Fix error message when a worker process fails to talk to another worker process. ([\#12060](https://github.com/matrix-org/synapse/issues/12060)) +- Fix using the `complement.sh` script without specifying a directory or a branch. Contributed by Nico on behalf of Famedly. ([\#12063](https://github.com/matrix-org/synapse/issues/12063)) +- Add type hints to `tests/rest/client`. ([\#12066](https://github.com/matrix-org/synapse/issues/12066), [\#12072](https://github.com/matrix-org/synapse/issues/12072), [\#12084](https://github.com/matrix-org/synapse/issues/12084), [\#12094](https://github.com/matrix-org/synapse/issues/12094)) +- Add some logging to `/sync` to try and track down #11916. ([\#12068](https://github.com/matrix-org/synapse/issues/12068)) +- Inspect application dependencies using `importlib.metadata` or its backport. ([\#12088](https://github.com/matrix-org/synapse/issues/12088)) +- Use `assertEqual` instead of the deprecated `assertEquals` in test code. ([\#12092](https://github.com/matrix-org/synapse/issues/12092)) +- Move experimental support for [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440) to `/versions`. ([\#12099](https://github.com/matrix-org/synapse/issues/12099)) +- Add `stop_cancellation` utility function to stop `Deferred`s from being cancelled. ([\#12106](https://github.com/matrix-org/synapse/issues/12106)) +- Improve exception handling for concurrent execution. ([\#12109](https://github.com/matrix-org/synapse/issues/12109)) +- Advertise support for Python 3.10 in packaging files. ([\#12111](https://github.com/matrix-org/synapse/issues/12111)) +- Move CI checks out of tox, to facilitate a move to using poetry. ([\#12119](https://github.com/matrix-org/synapse/issues/12119)) + + +Synapse 1.53.0 (2022-02-22) +=========================== + +No significant changes since 1.53.0rc1. + + +Synapse 1.53.0rc1 (2022-02-15) +============================== + +Features +-------- + +- Add experimental support for sending to-device messages to application services, as specified by [MSC2409](https://github.com/matrix-org/matrix-doc/pull/2409). ([\#11215](https://github.com/matrix-org/synapse/issues/11215), [\#11966](https://github.com/matrix-org/synapse/issues/11966)) +- Add a background database update to purge account data for deactivated users. ([\#11655](https://github.com/matrix-org/synapse/issues/11655)) +- Experimental support for [MSC3666](https://github.com/matrix-org/matrix-doc/pull/3666): including bundled aggregations in server side search results. ([\#11837](https://github.com/matrix-org/synapse/issues/11837)) +- Enable cache time-based expiry by default. The `expiry_time` config flag has been superseded by `expire_caches` and `cache_entry_ttl`. ([\#11849](https://github.com/matrix-org/synapse/issues/11849)) +- Add a callback to allow modules to allow or forbid a 3PID (email address, phone number) from being associated to a local account. ([\#11854](https://github.com/matrix-org/synapse/issues/11854)) +- Stabilize support and remove unstable endpoints for [MSC3231](https://github.com/matrix-org/matrix-doc/pull/3231). Clients must switch to the stable identifier and endpoint. See the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#stablisation-of-msc3231) for more information. ([\#11867](https://github.com/matrix-org/synapse/issues/11867)) +- Allow modules to retrieve the current instance's server name and worker name. ([\#11868](https://github.com/matrix-org/synapse/issues/11868)) +- Use a dedicated configurable rate limiter for 3PID invites. ([\#11892](https://github.com/matrix-org/synapse/issues/11892)) +- Support the stable API endpoint for [MSC3283](https://github.com/matrix-org/matrix-doc/pull/3283): new settings in `/capabilities` endpoint. ([\#11933](https://github.com/matrix-org/synapse/issues/11933), [\#11989](https://github.com/matrix-org/synapse/issues/11989)) +- Support the `dir` parameter on the `/relations` endpoint, per [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715). ([\#11941](https://github.com/matrix-org/synapse/issues/11941)) +- Experimental implementation of [MSC3706](https://github.com/matrix-org/matrix-doc/pull/3706): extensions to `/send_join` to support reduced response size. ([\#11967](https://github.com/matrix-org/synapse/issues/11967)) + + +Bugfixes +-------- + +- Fix [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) historical messages backfilling in random order on remote homeservers. ([\#11114](https://github.com/matrix-org/synapse/issues/11114)) +- Fix a bug introduced in Synapse 1.51.0 where incoming federation transactions containing at least one EDU would be dropped if debug logging was enabled for `synapse.8631_debug`. ([\#11890](https://github.com/matrix-org/synapse/issues/11890)) +- Fix a long-standing bug where some unknown endpoints would return HTML error pages instead of JSON `M_UNRECOGNIZED` errors. ([\#11930](https://github.com/matrix-org/synapse/issues/11930)) +- Implement an allow list of content types for which we will attempt to preview a URL. This prevents Synapse from making useless longer-lived connections to streaming media servers. ([\#11936](https://github.com/matrix-org/synapse/issues/11936)) +- Fix a long-standing bug where pagination tokens from `/sync` and `/messages` could not be provided to the `/relations` API. ([\#11952](https://github.com/matrix-org/synapse/issues/11952)) +- Require that modules register their callbacks using keyword arguments. ([\#11975](https://github.com/matrix-org/synapse/issues/11975)) +- Fix a long-standing bug where `M_WRONG_ROOM_KEYS_VERSION` errors would not include the specced `current_version` field. ([\#11988](https://github.com/matrix-org/synapse/issues/11988)) + + +Improved Documentation +---------------------- + +- Fix typo in User Admin API: unpind -> unbind. ([\#11859](https://github.com/matrix-org/synapse/issues/11859)) +- Document images returned by the User List Media Admin API can include those generated by URL previews. ([\#11862](https://github.com/matrix-org/synapse/issues/11862)) +- Remove outdated MSC1711 FAQ document. ([\#11907](https://github.com/matrix-org/synapse/issues/11907)) +- Correct the structured logging configuration example. Contributed by Brad Jones. ([\#11946](https://github.com/matrix-org/synapse/issues/11946)) +- Add information on the Synapse release cycle. ([\#11954](https://github.com/matrix-org/synapse/issues/11954)) +- Fix broken link in the README to the admin API for password reset. ([\#11955](https://github.com/matrix-org/synapse/issues/11955)) + + +Deprecations and Removals +------------------------- + +- Drop support for `webclient` listeners and configuring `web_client_location` to a non-HTTP(S) URL. Deprecated configurations are a configuration error. ([\#11895](https://github.com/matrix-org/synapse/issues/11895)) +- Remove deprecated `user_may_create_room_with_invites` spam checker callback. See the [upgrade notes](https://matrix-org.github.io/synapse/latest/upgrade.html#removal-of-user_may_create_room_with_invites) for more information. ([\#11950](https://github.com/matrix-org/synapse/issues/11950)) +- No longer build `.deb` packages for Ubuntu 21.04 Hirsute Hippo, which has now EOLed. ([\#11961](https://github.com/matrix-org/synapse/issues/11961)) + + +Internal Changes +---------------- + +- Enhance user registration test helpers to make them more useful for tests involving application services and devices. ([\#11615](https://github.com/matrix-org/synapse/issues/11615), [\#11616](https://github.com/matrix-org/synapse/issues/11616)) +- Improve performance when fetching bundled aggregations for multiple events. ([\#11660](https://github.com/matrix-org/synapse/issues/11660), [\#11752](https://github.com/matrix-org/synapse/issues/11752)) +- Fix type errors introduced by new annotations in the Prometheus Client library. ([\#11832](https://github.com/matrix-org/synapse/issues/11832)) +- Add missing type hints to replication code. ([\#11856](https://github.com/matrix-org/synapse/issues/11856), [\#11938](https://github.com/matrix-org/synapse/issues/11938)) +- Ensure that `opentracing` scopes are activated and closed at the right time. ([\#11869](https://github.com/matrix-org/synapse/issues/11869)) +- Improve opentracing for incoming federation requests. ([\#11870](https://github.com/matrix-org/synapse/issues/11870)) +- Improve internal docstrings in `synapse.util.caches`. ([\#11876](https://github.com/matrix-org/synapse/issues/11876)) +- Do not needlessly clear the `get_users_in_room` and `get_users_in_room_with_profiles` caches when any room state changes. ([\#11878](https://github.com/matrix-org/synapse/issues/11878)) +- Convert `ApplicationServiceTestCase` to use `simple_async_mock`. ([\#11880](https://github.com/matrix-org/synapse/issues/11880)) +- Remove experimental changes to the default push rules which were introduced in Synapse 1.19.0 but never enabled. ([\#11884](https://github.com/matrix-org/synapse/issues/11884)) +- Disable coverage calculation for olddeps build. ([\#11888](https://github.com/matrix-org/synapse/issues/11888)) +- Preparation to support sending device list updates to application services. ([\#11905](https://github.com/matrix-org/synapse/issues/11905)) +- Add a test that checks users receive their own device list updates down `/sync`. ([\#11909](https://github.com/matrix-org/synapse/issues/11909)) +- Run Complement tests sequentially. ([\#11910](https://github.com/matrix-org/synapse/issues/11910)) +- Various refactors to the application service notifier code. ([\#11911](https://github.com/matrix-org/synapse/issues/11911), [\#11912](https://github.com/matrix-org/synapse/issues/11912)) +- Tests: replace mocked `Authenticator` with the real thing. ([\#11913](https://github.com/matrix-org/synapse/issues/11913)) +- Various refactors to the typing notifications code. ([\#11914](https://github.com/matrix-org/synapse/issues/11914)) +- Use the proper type for the `Content-Length` header in the `UploadResource`. ([\#11927](https://github.com/matrix-org/synapse/issues/11927)) +- Remove an unnecessary ignoring of type hints due to fixes in upstream packages. ([\#11939](https://github.com/matrix-org/synapse/issues/11939)) +- Add missing type hints. ([\#11953](https://github.com/matrix-org/synapse/issues/11953)) +- Fix an import cycle in `synapse.event_auth`. ([\#11965](https://github.com/matrix-org/synapse/issues/11965)) +- Unpin `frozendict` but exclude the known bad version 2.1.2. ([\#11969](https://github.com/matrix-org/synapse/issues/11969)) +- Prepare for rename of default Complement branch. ([\#11971](https://github.com/matrix-org/synapse/issues/11971)) +- Fetch Synapse's version using a helper from `matrix-common`. ([\#11979](https://github.com/matrix-org/synapse/issues/11979)) + + +Synapse 1.52.0 (2022-02-08) +=========================== + +No significant changes since 1.52.0rc1. + +Note that [Twisted 22.1.0](https://github.com/twisted/twisted/releases/tag/twisted-22.1.0) +has recently been released, which fixes a [security issue](https://github.com/twisted/twisted/security/advisories/GHSA-92x2-jw7w-xvvx) +within the Twisted library. We do not believe Synapse is affected by this vulnerability, +though we advise server administrators who installed Synapse via pip to upgrade Twisted +with `pip install --upgrade Twisted treq` as a matter of good practice. The Docker image +`matrixdotorg/synapse` and the Debian packages from `packages.matrix.org` are using the +updated library. + + +Synapse 1.52.0rc1 (2022-02-01) +============================== + +Features +-------- + +- Remove account data (including client config, push rules and ignored users) upon user deactivation. ([\#11621](https://github.com/matrix-org/synapse/issues/11621), [\#11788](https://github.com/matrix-org/synapse/issues/11788), [\#11789](https://github.com/matrix-org/synapse/issues/11789)) +- Add an admin API to reset connection timeouts for remote server. ([\#11639](https://github.com/matrix-org/synapse/issues/11639)) +- Add an admin API to get a list of rooms that federate with a given remote homeserver. ([\#11658](https://github.com/matrix-org/synapse/issues/11658)) +- Add a config flag to inhibit `M_USER_IN_USE` during registration. ([\#11743](https://github.com/matrix-org/synapse/issues/11743)) +- Add a module callback to set username at registration. ([\#11790](https://github.com/matrix-org/synapse/issues/11790)) +- Allow configuring a maximum file size as well as a list of allowed content types for avatars. ([\#11846](https://github.com/matrix-org/synapse/issues/11846)) + + +Bugfixes +-------- + +- Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11612](https://github.com/matrix-org/synapse/issues/11612)) +- Fix a long-standing bug when previewing Reddit URLs which do not contain an image. ([\#11767](https://github.com/matrix-org/synapse/issues/11767)) +- Fix a long-standing bug that media streams could cause long-lived connections when generating URL previews. ([\#11784](https://github.com/matrix-org/synapse/issues/11784)) +- Include a `prev_content` field in state events sent to Application Services. Contributed by @totallynotvaishnav. ([\#11798](https://github.com/matrix-org/synapse/issues/11798)) +- Fix a bug introduced in Synapse 0.33.3 causing requests to sometimes log strings such as `HTTPStatus.OK` instead of integer status codes. ([\#11827](https://github.com/matrix-org/synapse/issues/11827)) + + +Improved Documentation +---------------------- + +- Update pypi installation docs to indicate that we now support Python 3.10. ([\#11820](https://github.com/matrix-org/synapse/issues/11820)) +- Add missing steps to the contribution submission process in the documentation. Contributed by @sequentialread. ([\#11821](https://github.com/matrix-org/synapse/issues/11821)) +- Remove not needed old table of contents in documentation. ([\#11860](https://github.com/matrix-org/synapse/issues/11860)) +- Consolidate the `access_token` information at the top of each relevant page in the Admin API documentation. ([\#11861](https://github.com/matrix-org/synapse/issues/11861)) + + +Deprecations and Removals +------------------------- + +- Drop support for Python 3.6, which is EOL. ([\#11683](https://github.com/matrix-org/synapse/issues/11683)) +- Remove the `experimental_msc1849_support_enabled` flag as the features are now stable. ([\#11843](https://github.com/matrix-org/synapse/issues/11843)) + + +Internal Changes +---------------- + +- Preparation for database schema simplifications: add `state_key` and `rejection_reason` columns to `events` table. ([\#11792](https://github.com/matrix-org/synapse/issues/11792)) +- Add `FrozenEvent.get_state_key` and use it in a couple of places. ([\#11793](https://github.com/matrix-org/synapse/issues/11793)) +- Preparation for database schema simplifications: stop reading from `event_reference_hashes`. ([\#11794](https://github.com/matrix-org/synapse/issues/11794)) +- Drop unused table `public_room_list_stream`. ([\#11795](https://github.com/matrix-org/synapse/issues/11795)) +- Preparation for reducing Postgres serialization errors: allow setting transaction isolation level. Contributed by Nick @ Beeper. ([\#11799](https://github.com/matrix-org/synapse/issues/11799), [\#11847](https://github.com/matrix-org/synapse/issues/11847)) +- Docker: skip the initial amd64-only build and go straight to multiarch. ([\#11810](https://github.com/matrix-org/synapse/issues/11810)) +- Run Complement on the Github Actions VM and not inside a Docker container. ([\#11811](https://github.com/matrix-org/synapse/issues/11811)) +- Log module names at startup. ([\#11813](https://github.com/matrix-org/synapse/issues/11813)) +- Improve type safety of bundled aggregations code. ([\#11815](https://github.com/matrix-org/synapse/issues/11815)) +- Correct a type annotation in the event validation logic. ([\#11817](https://github.com/matrix-org/synapse/issues/11817), [\#11830](https://github.com/matrix-org/synapse/issues/11830)) +- Minor updates and documentation for database schema delta files. ([\#11823](https://github.com/matrix-org/synapse/issues/11823)) +- Workaround a type annotation problem in `prometheus_client` 0.13.0. ([\#11834](https://github.com/matrix-org/synapse/issues/11834)) +- Minor performance improvement in room state lookup. ([\#11836](https://github.com/matrix-org/synapse/issues/11836)) +- Fix some indentation inconsistencies in the sample config. ([\#11838](https://github.com/matrix-org/synapse/issues/11838)) +- Add type hints to `tests/rest/admin`. ([\#11851](https://github.com/matrix-org/synapse/issues/11851)) + + +Synapse 1.51.0 (2022-01-25) +=========================== + +No significant changes since 1.51.0rc2. + +Synapse 1.51.0 deprecates `webclient` listeners and non-HTTP(S) `web_client_location`s. Support for these will be removed in Synapse 1.53.0, at which point Synapse will not be capable of directly serving a web client for Matrix. See the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#upgrading-to-v1510). + +Synapse 1.51.0rc2 (2022-01-24) +============================== + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.40.0 that caused Synapse to fail to process incoming federation traffic after handling a large amount of events in a v1 room. ([\#11806](https://github.com/matrix-org/synapse/issues/11806)) + + +Synapse 1.50.2 (2022-01-24) +=========================== + +This release includes the same bugfix as Synapse 1.51.0rc2. + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.40.0 that caused Synapse to fail to process incoming federation traffic after handling a large amount of events in a v1 room. ([\#11806](https://github.com/matrix-org/synapse/issues/11806)) + + +Synapse 1.51.0rc1 (2022-01-21) +============================== + +Features +-------- + +- Add `track_puppeted_user_ips` config flag to record client IP addresses against puppeted users, and include the puppeted users in monthly active user counts. ([\#11561](https://github.com/matrix-org/synapse/issues/11561), [\#11749](https://github.com/matrix-org/synapse/issues/11749), [\#11757](https://github.com/matrix-org/synapse/issues/11757)) +- Include whether the requesting user has participated in a thread when generating a summary for [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440). ([\#11577](https://github.com/matrix-org/synapse/issues/11577)) +- Return an `M_FORBIDDEN` error code instead of `M_UNKNOWN` when a spam checker module prevents a user from creating a room. ([\#11672](https://github.com/matrix-org/synapse/issues/11672)) +- Add a flag to the `synapse_review_recent_signups` script to ignore and filter appservice users. ([\#11675](https://github.com/matrix-org/synapse/issues/11675), [\#11770](https://github.com/matrix-org/synapse/issues/11770)) + + +Bugfixes +-------- + +- Fix a long-standing issue which could cause Synapse to incorrectly accept data in the unsigned field of events + received over federation. ([\#11530](https://github.com/matrix-org/synapse/issues/11530)) +- Fix a long-standing bug where Synapse wouldn't cache a response indicating that a remote user has no devices. ([\#11587](https://github.com/matrix-org/synapse/issues/11587)) +- Fix an error that occurs whilst trying to get the federation status of a destination server that was working normally. This admin API was newly introduced in Synapse 1.49.0. ([\#11593](https://github.com/matrix-org/synapse/issues/11593)) +- Fix bundled aggregations not being included in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11612](https://github.com/matrix-org/synapse/issues/11612), [\#11659](https://github.com/matrix-org/synapse/issues/11659), [\#11791](https://github.com/matrix-org/synapse/issues/11791)) +- Fix the `/_matrix/client/v1/room/{roomId}/hierarchy` endpoint returning incorrect fields which have been present since Synapse 1.49.0. ([\#11667](https://github.com/matrix-org/synapse/issues/11667)) +- Fix preview of some GIF URLs (like tenor.com). Contributed by Philippe Daouadi. ([\#11669](https://github.com/matrix-org/synapse/issues/11669)) +- Fix a bug where only the first 50 rooms from a space were returned from the `/hierarchy` API. This has existed since the introduction of the API in Synapse 1.41.0. ([\#11695](https://github.com/matrix-org/synapse/issues/11695)) +- Fix a bug introduced in Synapse 1.18.0 where password reset and address validation emails would not be sent if their subject was configured to use the 'app' template variable. Contributed by @br4nnigan. ([\#11710](https://github.com/matrix-org/synapse/issues/11710), [\#11745](https://github.com/matrix-org/synapse/issues/11745)) +- Make the 'List Rooms' Admin API sort stable. Contributed by Daniël Sonck. ([\#11737](https://github.com/matrix-org/synapse/issues/11737)) +- Fix a long-standing bug where space hierarchy over federation would only work correctly some of the time. ([\#11775](https://github.com/matrix-org/synapse/issues/11775)) +- Fix a bug introduced in Synapse 1.46.0 that prevented `on_logged_out` module callbacks from being correctly awaited by Synapse. ([\#11786](https://github.com/matrix-org/synapse/issues/11786)) + + +Improved Documentation +---------------------- + +- Warn against using a Let's Encrypt certificate for TLS/DTLS TURN server client connections, and suggest using ZeroSSL certificate instead. This works around client-side connectivity errors caused by WebRTC libraries that reject Let's Encrypt certificates. Contibuted by @AndrewFerr. ([\#11686](https://github.com/matrix-org/synapse/issues/11686)) +- Document the new `SYNAPSE_TEST_PERSIST_SQLITE_DB` environment variable in the contributing guide. ([\#11715](https://github.com/matrix-org/synapse/issues/11715)) +- Document that the minimum supported PostgreSQL version is now 10. ([\#11725](https://github.com/matrix-org/synapse/issues/11725)) +- Fix typo in demo docs: differnt. ([\#11735](https://github.com/matrix-org/synapse/issues/11735)) +- Update room spec URL in config files. ([\#11739](https://github.com/matrix-org/synapse/issues/11739)) +- Mention `python3-venv` and `libpq-dev` dependencies in the contribution guide. ([\#11740](https://github.com/matrix-org/synapse/issues/11740)) +- Update documentation for configuring login with Facebook. ([\#11755](https://github.com/matrix-org/synapse/issues/11755)) +- Update installation instructions to note that Python 3.6 is no longer supported. ([\#11781](https://github.com/matrix-org/synapse/issues/11781)) + + +Deprecations and Removals +------------------------- + +- Remove the unstable `/send_relation` endpoint. ([\#11682](https://github.com/matrix-org/synapse/issues/11682)) +- Remove `python_twisted_reactor_pending_calls` Prometheus metric. ([\#11724](https://github.com/matrix-org/synapse/issues/11724)) +- Remove the `password_hash` field from the response dictionaries of the [Users Admin API](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html). ([\#11576](https://github.com/matrix-org/synapse/issues/11576)) +- **Deprecate support for `webclient` listeners and non-HTTP(S) `web_client_location` configuration. ([\#11774](https://github.com/matrix-org/synapse/issues/11774), [\#11783](https://github.com/matrix-org/synapse/issues/11783))** + + +Internal Changes +---------------- + +- Run `pyupgrade --py37-plus --keep-percent-format` on Synapse. ([\#11685](https://github.com/matrix-org/synapse/issues/11685)) +- Use buildkit's cache feature to speed up docker builds. ([\#11691](https://github.com/matrix-org/synapse/issues/11691)) +- Use `auto_attribs` and native type hints for attrs classes. ([\#11692](https://github.com/matrix-org/synapse/issues/11692), [\#11768](https://github.com/matrix-org/synapse/issues/11768)) +- Remove debug logging for #4422, which has been closed since Synapse 0.99. ([\#11693](https://github.com/matrix-org/synapse/issues/11693)) +- Remove fallback code for Python 2. ([\#11699](https://github.com/matrix-org/synapse/issues/11699)) +- Add a test for [an edge case](https://github.com/matrix-org/synapse/pull/11532#discussion_r769104461) in the `/sync` logic. ([\#11701](https://github.com/matrix-org/synapse/issues/11701)) +- Add the option to write SQLite test dbs to disk when running tests. ([\#11702](https://github.com/matrix-org/synapse/issues/11702)) +- Improve Complement test output for Gitub Actions. ([\#11707](https://github.com/matrix-org/synapse/issues/11707)) +- Fix docstring on `add_account_data_for_user`. ([\#11716](https://github.com/matrix-org/synapse/issues/11716)) +- Complement environment variable name change and update `.gitignore`. ([\#11718](https://github.com/matrix-org/synapse/issues/11718)) +- Simplify calculation of Prometheus metrics for garbage collection. ([\#11723](https://github.com/matrix-org/synapse/issues/11723)) +- Improve accuracy of `python_twisted_reactor_tick_time` Prometheus metric. ([\#11724](https://github.com/matrix-org/synapse/issues/11724), [\#11771](https://github.com/matrix-org/synapse/issues/11771)) +- Minor efficiency improvements when inserting many values into the database. ([\#11742](https://github.com/matrix-org/synapse/issues/11742)) +- Invite PR authors to give themselves credit in the changelog. ([\#11744](https://github.com/matrix-org/synapse/issues/11744)) +- Add optional debugging to investigate [issue 8631](https://github.com/matrix-org/synapse/issues/8631). ([\#11760](https://github.com/matrix-org/synapse/issues/11760)) +- Remove `log_function` utility function and its uses. ([\#11761](https://github.com/matrix-org/synapse/issues/11761)) +- Add a unit test that checks both `client` and `webclient` resources will function when simultaneously enabled. ([\#11765](https://github.com/matrix-org/synapse/issues/11765)) +- Allow overriding complement commit using `COMPLEMENT_REF`. ([\#11766](https://github.com/matrix-org/synapse/issues/11766)) +- Add some comments and type annotations for `_update_outliers_txn`. ([\#11776](https://github.com/matrix-org/synapse/issues/11776)) + + +Synapse 1.50.1 (2022-01-18) +=========================== + +This release fixes a bug in Synapse 1.50.0 that could prevent clients from being able to connect to Synapse if the `webclient` resource was enabled. Further details are available in [this issue](https://github.com/matrix-org/synapse/issues/11763). + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.50.0rc1 that could cause Matrix clients to be unable to connect to Synapse instances with the `webclient` resource enabled. ([\#11764](https://github.com/matrix-org/synapse/issues/11764)) + + +Synapse 1.50.0 (2022-01-18) +=========================== + +**This release contains a critical bug that may prevent clients from being able to connect. +As such, it is not recommended to upgrade to 1.50.0. Instead, please upgrade straight to +to 1.50.1. Further details are available in [this issue](https://github.com/matrix-org/synapse/issues/11763).** + +Please note that we now only support Python 3.7+ and PostgreSQL 10+ (if applicable), because Python 3.6 and PostgreSQL 9.6 have reached end-of-life. + +No significant changes since 1.50.0rc2. + + +Synapse 1.50.0rc2 (2022-01-14) +============================== + +This release candidate fixes a federation-breaking regression introduced in Synapse 1.50.0rc1. + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.0.0 whereby some device list updates would not be sent to remote homeservers if there were too many to send at once. ([\#11729](https://github.com/matrix-org/synapse/issues/11729)) +- Fix a bug introduced in Synapse 1.50.0rc1 whereby outbound federation could fail because too many EDUs were produced for device updates. ([\#11730](https://github.com/matrix-org/synapse/issues/11730)) + + +Improved Documentation +---------------------- + +- Document that now the minimum supported PostgreSQL version is 10. ([\#11725](https://github.com/matrix-org/synapse/issues/11725)) + + +Internal Changes +---------------- + +- Fix a typechecker problem related to our (ab)use of `nacl.signing.SigningKey`s. ([\#11714](https://github.com/matrix-org/synapse/issues/11714)) + + +Synapse 1.50.0rc1 (2022-01-05) +============================== + + +Features +-------- + +- Allow guests to send state events per [MSC3419](https://github.com/matrix-org/matrix-doc/pull/3419). ([\#11378](https://github.com/matrix-org/synapse/issues/11378)) +- Add experimental support for part of [MSC3202](https://github.com/matrix-org/matrix-doc/pull/3202): allowing application services to masquerade as specific devices. ([\#11538](https://github.com/matrix-org/synapse/issues/11538)) +- Add admin API to get users' account data. ([\#11664](https://github.com/matrix-org/synapse/issues/11664)) +- Include the room topic in the stripped state included with invites and knocking. ([\#11666](https://github.com/matrix-org/synapse/issues/11666)) +- Send and handle cross-signing messages using the stable prefix. ([\#10520](https://github.com/matrix-org/synapse/issues/10520)) +- Support unprefixed versions of fallback key property names. ([\#11541](https://github.com/matrix-org/synapse/issues/11541)) + + +Bugfixes +-------- + +- Fix a long-standing bug where relations from other rooms could be included in the bundled aggregations of an event. ([\#11516](https://github.com/matrix-org/synapse/issues/11516)) +- Fix a long-standing bug which could cause `AssertionError`s to be written to the log when Synapse was restarted after purging events from the database. ([\#11536](https://github.com/matrix-org/synapse/issues/11536), [\#11642](https://github.com/matrix-org/synapse/issues/11642)) +- Fix a bug introduced in Synapse 1.17.0 where a pusher created for an email with capital letters would fail to be created. ([\#11547](https://github.com/matrix-org/synapse/issues/11547)) +- Fix a long-standing bug where responses included bundled aggregations when they should not, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11592](https://github.com/matrix-org/synapse/issues/11592), [\#11623](https://github.com/matrix-org/synapse/issues/11623)) +- Fix a long-standing bug that some unknown endpoints would return HTML error pages instead of JSON `M_UNRECOGNIZED` errors. ([\#11602](https://github.com/matrix-org/synapse/issues/11602)) +- Fix a bug introduced in Synapse 1.19.3 which could sometimes cause `AssertionError`s when backfilling rooms over federation. ([\#11632](https://github.com/matrix-org/synapse/issues/11632)) + + +Improved Documentation +---------------------- + +- Update Synapse install command for FreeBSD as the package is now prefixed with `py38`. Contributed by @itchychips. ([\#11267](https://github.com/matrix-org/synapse/issues/11267)) +- Document the usage of refresh tokens. ([\#11427](https://github.com/matrix-org/synapse/issues/11427)) +- Add details for how to configure a TURN server when behind a NAT. Contibuted by @AndrewFerr. ([\#11553](https://github.com/matrix-org/synapse/issues/11553)) +- Add references for using Postgres to the Docker documentation. ([\#11640](https://github.com/matrix-org/synapse/issues/11640)) +- Fix the documentation link in newly-generated configuration files. ([\#11678](https://github.com/matrix-org/synapse/issues/11678)) +- Correct the documentation for `nginx` to use a case-sensitive url pattern. Fixes an error introduced in v1.21.0. ([\#11680](https://github.com/matrix-org/synapse/issues/11680)) +- Clarify SSO mapping provider documentation by writing `def` or `async def` before the names of methods, as appropriate. ([\#11681](https://github.com/matrix-org/synapse/issues/11681)) + + +Deprecations and Removals +------------------------- + +- Replace `mock` package by its standard library version. ([\#11588](https://github.com/matrix-org/synapse/issues/11588)) +- Drop support for Python 3.6 and Ubuntu 18.04. ([\#11633](https://github.com/matrix-org/synapse/issues/11633)) + + +Internal Changes +---------------- + +- Allow specific, experimental events to be created without `prev_events`. Used by [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716). ([\#11243](https://github.com/matrix-org/synapse/issues/11243)) +- A test helper (`wait_for_background_updates`) no longer depends on classes defining a `store` property. ([\#11331](https://github.com/matrix-org/synapse/issues/11331)) +- Add type hints to `synapse.appservice`. ([\#11360](https://github.com/matrix-org/synapse/issues/11360)) +- Add missing type hints to `synapse.config` module. ([\#11480](https://github.com/matrix-org/synapse/issues/11480)) +- Add test to ensure we share the same `state_group` across the whole historical batch when using the [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send` endpoint. ([\#11487](https://github.com/matrix-org/synapse/issues/11487)) +- Refactor `tests.util.setup_test_homeserver` and `tests.server.setup_test_homeserver`. ([\#11503](https://github.com/matrix-org/synapse/issues/11503)) +- Move `glob_to_regex` and `re_word_boundary` to `matrix-python-common`. ([\#11505](https://github.com/matrix-org/synapse/issues/11505), [\#11687](https://github.com/matrix-org/synapse/issues/11687)) +- Use `HTTPStatus` constants in place of literals in `tests.rest.client.test_auth`. ([\#11520](https://github.com/matrix-org/synapse/issues/11520)) +- Add a receipt types constant for `m.read`. ([\#11531](https://github.com/matrix-org/synapse/issues/11531)) +- Clean up `synapse.rest.admin`. ([\#11535](https://github.com/matrix-org/synapse/issues/11535)) +- Add missing `errcode` to `parse_string` and `parse_boolean`. ([\#11542](https://github.com/matrix-org/synapse/issues/11542)) +- Use `HTTPStatus` constants in place of literals in `synapse.http`. ([\#11543](https://github.com/matrix-org/synapse/issues/11543)) +- Add missing type hints to storage classes. ([\#11546](https://github.com/matrix-org/synapse/issues/11546), [\#11549](https://github.com/matrix-org/synapse/issues/11549), [\#11551](https://github.com/matrix-org/synapse/issues/11551), [\#11555](https://github.com/matrix-org/synapse/issues/11555), [\#11575](https://github.com/matrix-org/synapse/issues/11575), [\#11589](https://github.com/matrix-org/synapse/issues/11589), [\#11594](https://github.com/matrix-org/synapse/issues/11594), [\#11652](https://github.com/matrix-org/synapse/issues/11652), [\#11653](https://github.com/matrix-org/synapse/issues/11653), [\#11654](https://github.com/matrix-org/synapse/issues/11654), [\#11657](https://github.com/matrix-org/synapse/issues/11657)) +- Fix an inaccurate and misleading comment in the `/sync` code. ([\#11550](https://github.com/matrix-org/synapse/issues/11550)) +- Add missing type hints to `synapse.logging.context`. ([\#11556](https://github.com/matrix-org/synapse/issues/11556)) +- Stop populating unused database column `state_events.prev_state`. ([\#11558](https://github.com/matrix-org/synapse/issues/11558)) +- Minor efficiency improvements in event persistence. ([\#11560](https://github.com/matrix-org/synapse/issues/11560)) +- Add some safety checks that storage functions are used correctly. ([\#11564](https://github.com/matrix-org/synapse/issues/11564), [\#11580](https://github.com/matrix-org/synapse/issues/11580)) +- Make `get_device` return `None` if the device doesn't exist rather than raising an exception. ([\#11565](https://github.com/matrix-org/synapse/issues/11565)) +- Split the HTML parsing code from the URL preview resource code. ([\#11566](https://github.com/matrix-org/synapse/issues/11566)) +- Remove redundant `COALESCE()`s around `COUNT()`s in database queries. ([\#11570](https://github.com/matrix-org/synapse/issues/11570)) +- Add missing type hints to `synapse.http`. ([\#11571](https://github.com/matrix-org/synapse/issues/11571)) +- Add [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) and [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) to `/versions` -> `unstable_features` to detect server support. ([\#11582](https://github.com/matrix-org/synapse/issues/11582)) +- Add type hints to `synapse/tests/rest/admin`. ([\#11590](https://github.com/matrix-org/synapse/issues/11590)) +- Drop end-of-life Python 3.6 and Postgres 9.6 from CI. ([\#11595](https://github.com/matrix-org/synapse/issues/11595)) +- Update black version and run it on all the files. ([\#11596](https://github.com/matrix-org/synapse/issues/11596)) +- Add opentracing type stubs and fix associated mypy errors. ([\#11603](https://github.com/matrix-org/synapse/issues/11603), [\#11622](https://github.com/matrix-org/synapse/issues/11622)) +- Improve OpenTracing support for requests which use a `ResponseCache`. ([\#11607](https://github.com/matrix-org/synapse/issues/11607)) +- Improve OpenTracing support for incoming HTTP requests. ([\#11618](https://github.com/matrix-org/synapse/issues/11618)) +- A number of improvements to opentracing support. ([\#11619](https://github.com/matrix-org/synapse/issues/11619)) +- Refactor the way that the `outlier` flag is set on events received over federation. ([\#11634](https://github.com/matrix-org/synapse/issues/11634)) +- Improve the error messages from `get_create_event_for_room`. ([\#11638](https://github.com/matrix-org/synapse/issues/11638)) +- Remove redundant `get_current_events_token` method. ([\#11643](https://github.com/matrix-org/synapse/issues/11643)) +- Convert `namedtuples` to `attrs`. ([\#11665](https://github.com/matrix-org/synapse/issues/11665), [\#11574](https://github.com/matrix-org/synapse/issues/11574)) +- Update the `/capabilities` response to include whether support for [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440) is available. ([\#11690](https://github.com/matrix-org/synapse/issues/11690)) +- Send the `Accept` header in HTTP requests made using `SimpleHttpClient.get_json`. ([\#11677](https://github.com/matrix-org/synapse/issues/11677)) +- Work around Mjolnir compatibility issue by adding an import for `glob_to_regex` in `synapse.util`, where it moved from. ([\#11696](https://github.com/matrix-org/synapse/issues/11696)) + + +**Changelogs for older versions can be found [here](CHANGES-2021.md).** From 9345361c6beacff8cf68d3c9c5c55dd19b3f930c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 12:33:27 +0200 Subject: [PATCH 111/153] Bump authlib from 1.2.0 to 1.2.1 (#15864) Bumps [authlib](https://github.com/lepture/authlib) from 1.2.0 to 1.2.1. - [Release notes](https://github.com/lepture/authlib/releases) - [Changelog](https://github.com/lepture/authlib/blob/master/docs/changelog.rst) - [Commits](https://github.com/lepture/authlib/compare/v1.2.0...v1.2.1) --- updated-dependencies: - dependency-name: authlib dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index ee19c246f375..6fa15dfaaa7a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -53,13 +53,13 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "authlib" -version = "1.2.0" +version = "1.2.1" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = true python-versions = "*" files = [ - {file = "Authlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a"}, - {file = "Authlib-1.2.0.tar.gz", hash = "sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d"}, + {file = "Authlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:c88984ea00149a90e3537c964327da930779afa4564e354edfd98410bea01911"}, + {file = "Authlib-1.2.1.tar.gz", hash = "sha256:421f7c6b468d907ca2d9afede256f068f87e34d23dd221c07d13d4c234726afb"}, ] [package.dependencies] From aea94ca8cd2ec04c115357e7395704a8cee195ae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 12:33:47 +0200 Subject: [PATCH 112/153] Bump importlib-metadata from 6.6.0 to 6.7.0 (#15865) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 6.6.0 to 6.7.0. - [Release notes](https://github.com/python/importlib_metadata/releases) - [Changelog](https://github.com/python/importlib_metadata/blob/main/NEWS.rst) - [Commits](https://github.com/python/importlib_metadata/compare/v6.6.0...v6.7.0) --- updated-dependencies: - dependency-name: importlib-metadata dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6fa15dfaaa7a..38913bad3612 100644 --- a/poetry.lock +++ b/poetry.lock @@ -837,13 +837,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "6.7.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, ] [package.dependencies] @@ -853,7 +853,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" From 411ba44790d98b2244dadd17281db2daa3ccfb7a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 12:34:20 +0200 Subject: [PATCH 113/153] Bump types-pyopenssl from 23.2.0.0 to 23.2.0.1 (#15866) Bumps [types-pyopenssl](https://github.com/python/typeshed) from 23.2.0.0 to 23.2.0.1. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pyopenssl dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 38913bad3612..87ed919e43a2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2964,13 +2964,13 @@ files = [ [[package]] name = "types-pyopenssl" -version = "23.2.0.0" +version = "23.2.0.1" description = "Typing stubs for pyOpenSSL" optional = false python-versions = "*" files = [ - {file = "types-pyOpenSSL-23.2.0.0.tar.gz", hash = "sha256:43e307e8dfb3a7a8208a19874ca060305f460c529d4eaca8a2669ea89499f244"}, - {file = "types_pyOpenSSL-23.2.0.0-py3-none-any.whl", hash = "sha256:ba803a99440b0c2e9ab4e197084aeefc55bdfe8a580d367b2aa4210810a21240"}, + {file = "types-pyOpenSSL-23.2.0.1.tar.gz", hash = "sha256:beeb5d22704c625a1e4b6dc756355c5b4af0b980138b702a9d9f932acf020903"}, + {file = "types_pyOpenSSL-23.2.0.1-py3-none-any.whl", hash = "sha256:0568553f104466f1b8e0db3360fbe6770137d02e21a1a45c209bf2b1b03d90d4"}, ] [package.dependencies] From a587de96b81e945677622ff0e60549b00f2b1689 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 12:34:57 +0200 Subject: [PATCH 114/153] Bump sentry-sdk from 1.25.1 to 1.26.0 (#15867) Bumps [sentry-sdk](https://github.com/getsentry/sentry-python) from 1.25.1 to 1.26.0. - [Release notes](https://github.com/getsentry/sentry-python/releases) - [Changelog](https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md) - [Commits](https://github.com/getsentry/sentry-python/compare/1.25.1...1.26.0) --- updated-dependencies: - dependency-name: sentry-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 87ed919e43a2..2802e42cf4b8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2301,13 +2301,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.25.1" +version = "1.26.0" description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.25.1.tar.gz", hash = "sha256:aa796423eb6a2f4a8cd7a5b02ba6558cb10aab4ccdc0537f63a47b038c520c38"}, - {file = "sentry_sdk-1.25.1-py2.py3-none-any.whl", hash = "sha256:79afb7c896014038e358401ad1d36889f97a129dfa8031c49b3f238cd1aa3935"}, + {file = "sentry-sdk-1.26.0.tar.gz", hash = "sha256:760e4fb6d01c994110507133e08ecd4bdf4d75ee4be77f296a3579796cf73134"}, + {file = "sentry_sdk-1.26.0-py2.py3-none-any.whl", hash = "sha256:0c9f858337ec3781cf4851972ef42bba8c9828aea116b0dbed8f38c5f9a1896c"}, ] [package.dependencies] From 53aa26eddc772a6719bf0da64b0684c333294d05 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Mon, 3 Jul 2023 10:38:57 +0000 Subject: [PATCH 115/153] Add a timeout that aborts any Postgres statement taking more than 1 hour. (#15853) * Add a timeout to Postgres statements * Newsfile Signed-off-by: Olivier Wilkinson (reivilibre) --------- Signed-off-by: Olivier Wilkinson (reivilibre) --- changelog.d/15853.misc | 1 + synapse/storage/engines/postgres.py | 13 +++++++++++++ 2 files changed, 14 insertions(+) create mode 100644 changelog.d/15853.misc diff --git a/changelog.d/15853.misc b/changelog.d/15853.misc new file mode 100644 index 000000000000..3e9516b1ad82 --- /dev/null +++ b/changelog.d/15853.misc @@ -0,0 +1 @@ +Add a timeout that aborts any Postgres statement taking more than 1 hour. \ No newline at end of file diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index b350f57ccb4a..05a72dc55435 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -45,6 +45,15 @@ def _disable_bytes_adapter(_: bytes) -> NoReturn: psycopg2.extensions.register_adapter(bytes, _disable_bytes_adapter) self.synchronous_commit: bool = database_config.get("synchronous_commit", True) + # Set the statement timeout to 1 hour by default. + # Any query taking more than 1 hour should probably be considered a bug; + # most of the time this is a sign that work needs to be split up or that + # some degenerate query plan has been created and the client has probably + # timed out/walked off anyway. + # This is in milliseconds. + self.statement_timeout: Optional[int] = database_config.get( + "statement_timeout", 60 * 60 * 1000 + ) self._version: Optional[int] = None # unknown as yet self.isolation_level_map: Mapping[int, int] = { @@ -157,6 +166,10 @@ def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: if not self.synchronous_commit: cursor.execute("SET synchronous_commit TO OFF") + # Abort really long-running statements and turn them into errors. + if self.statement_timeout is not None: + cursor.execute("SET statement_timeout TO ?", (self.statement_timeout,)) + cursor.close() db_conn.commit() From cd8b73aa97cb6d06079f67aa73ff24679038d7c6 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Mon, 3 Jul 2023 10:39:52 +0000 Subject: [PATCH 116/153] Fix the `devenv up` configuration which was ignoring the config overrides. (#15854) * Fix use of config override directory in `devenv up` `--config-directory` is for the generate config script; `-c` is for usage * Add homeserver config override directory to gitignore * Newsfile Signed-off-by: Olivier Wilkinson (reivilibre) --------- Signed-off-by: Olivier Wilkinson (reivilibre) --- .gitignore | 1 + changelog.d/15854.misc | 1 + flake.nix | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15854.misc diff --git a/.gitignore b/.gitignore index 8cf504324b9e..a89f149ec158 100644 --- a/.gitignore +++ b/.gitignore @@ -34,6 +34,7 @@ __pycache__/ /logs /media_store/ /uploads +/homeserver-config-overrides.d # For direnv users /.envrc diff --git a/changelog.d/15854.misc b/changelog.d/15854.misc new file mode 100644 index 000000000000..8c940dd9c5aa --- /dev/null +++ b/changelog.d/15854.misc @@ -0,0 +1 @@ +Fix the `devenv up` configuration which was ignoring the config overrides. \ No newline at end of file diff --git a/flake.nix b/flake.nix index 8b7c0e6a5b2b..bb42c9ff9b1a 100644 --- a/flake.nix +++ b/flake.nix @@ -178,7 +178,7 @@ EOF ''; # Start synapse when `devenv up` is run. - processes.synapse.exec = "poetry run python -m synapse.app.homeserver -c homeserver.yaml --config-directory homeserver-config-overrides.d"; + processes.synapse.exec = "poetry run python -m synapse.app.homeserver -c homeserver.yaml -c homeserver-config-overrides.d"; # Define the perl modules we require to run SyTest. # From 07d7cbfe69c239a7ffe5668c1166799370eef0d6 Mon Sep 17 00:00:00 2001 From: pacien Date: Mon, 3 Jul 2023 16:39:38 +0200 Subject: [PATCH 117/153] devices: use combined ANY clause for faster cleanup (#15861) Old device entries for the same user were being removed in individual SQL commands, making the batch take way longer than necessary. This combines the commands into a single one with a IN/ANY clause. Example of log entry before the change, regularly observed with "log_min_duration_statement = 10000" in PostgreSQL's config: LOG: duration: 42538.282 ms statement: DELETE FROM device_lists_stream WHERE user_id = '@someone' AND device_id = 'someid1' AND stream_id < 123456789 ; DELETE FROM device_lists_stream WHERE user_id = '@someone' AND device_id = 'someid2' AND stream_id < 123456789 ; [repeated for each device ID of that user, potentially a lot...] With the patch applied on my instance for the past couple of days, I no longer notice overly long statements of that particular kind. Signed-off-by: pacien --- changelog.d/15861.misc | 1 + synapse/storage/databases/main/devices.py | 14 +++++++++----- 2 files changed, 10 insertions(+), 5 deletions(-) create mode 100644 changelog.d/15861.misc diff --git a/changelog.d/15861.misc b/changelog.d/15861.misc new file mode 100644 index 000000000000..6f320eab81c9 --- /dev/null +++ b/changelog.d/15861.misc @@ -0,0 +1 @@ +Optimised cleanup of old entries in device_lists_stream. diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index f677d048aafb..d9df437e518a 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1950,12 +1950,16 @@ def _add_device_change_to_stream_txn( # Delete older entries in the table, as we really only care about # when the latest change happened. - txn.execute_batch( - """ + cleanup_obsolete_stmt = """ DELETE FROM device_lists_stream - WHERE user_id = ? AND device_id = ? AND stream_id < ? - """, - [(user_id, device_id, min_stream_id) for device_id in device_ids], + WHERE user_id = ? AND stream_id < ? AND %s + """ + device_ids_clause, device_ids_args = make_in_list_sql_clause( + txn.database_engine, "device_id", device_ids + ) + txn.execute( + cleanup_obsolete_stmt % (device_ids_clause,), + [user_id, min_stream_id] + device_ids_args, ) self.db_pool.simple_insert_many_txn( From 670d590f8a167d909f8d76a5ecbd240d9609d2b0 Mon Sep 17 00:00:00 2001 From: Paarth Shah Date: Tue, 4 Jul 2023 00:33:24 -0700 Subject: [PATCH 118/153] Pin `pydantic` to <2.0.0 (#15862) Signed-off-by: Paarth Shah --- changelog.d/15862.bugfix | 3 ++ poetry.lock | 74 ++++++++++++++++++++-------------------- pyproject.toml | 3 +- 3 files changed, 42 insertions(+), 38 deletions(-) create mode 100644 changelog.d/15862.bugfix diff --git a/changelog.d/15862.bugfix b/changelog.d/15862.bugfix new file mode 100644 index 000000000000..8eb6aa9a7f8c --- /dev/null +++ b/changelog.d/15862.bugfix @@ -0,0 +1,3 @@ +Pin `pydantic` to ^=1.7.4 to avoid backwards-incompatible API changes from the 2.0.0 release. +Resolves https://github.com/matrix-org/synapse/issues/15858. +Contributed by @PaarthShah. diff --git a/poetry.lock b/poetry.lock index 2802e42cf4b8..9aaf5c7de724 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1829,47 +1829,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.9" +version = "1.10.10" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca"}, - {file = "pydantic-1.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f"}, - {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896"}, - {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d"}, - {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f"}, - {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4"}, - {file = "pydantic-1.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f"}, - {file = "pydantic-1.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0"}, - {file = "pydantic-1.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7"}, - {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d"}, - {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c"}, - {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91"}, - {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8"}, - {file = "pydantic-1.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f"}, - {file = "pydantic-1.10.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece"}, - {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a"}, - {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a"}, - {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60"}, - {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf"}, - {file = "pydantic-1.10.9-cp37-cp37m-win_amd64.whl", hash = "sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29"}, - {file = "pydantic-1.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82"}, - {file = "pydantic-1.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6"}, - {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766"}, - {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3"}, - {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572"}, - {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e"}, - {file = "pydantic-1.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb"}, - {file = "pydantic-1.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298"}, - {file = "pydantic-1.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276"}, - {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60"}, - {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc"}, - {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a"}, - {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4"}, - {file = "pydantic-1.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1"}, - {file = "pydantic-1.10.9-py3-none-any.whl", hash = "sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305"}, - {file = "pydantic-1.10.9.tar.gz", hash = "sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be"}, + {file = "pydantic-1.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adad1ee4ab9888f12dac2529276704e719efcf472e38df7813f5284db699b4ec"}, + {file = "pydantic-1.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a7db03339893feef2092ff7b1afc9497beed15ebd4af84c3042a74abce02d48"}, + {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b3714b97ff84b2689654851c2426389bcabfac9080617bcf4306c69db606f6"}, + {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edfdf0a5abc5c9bf2052ebaec20e67abd52e92d257e4f2d30e02c354ed3e6030"}, + {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a3b30fd255eeeb63caa9483502ba96b7795ce5bf895c6a179b3d909d9f53a6"}, + {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db4c7f7e60ca6f7d6c1785070f3e5771fcb9b2d88546e334d2f2c3934d949028"}, + {file = "pydantic-1.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:a2d5be50ac4a0976817144c7d653e34df2f9436d15555189f5b6f61161d64183"}, + {file = "pydantic-1.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:566a04ba755e8f701b074ffb134ddb4d429f75d5dced3fbd829a527aafe74c71"}, + {file = "pydantic-1.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f79db3652ed743309f116ba863dae0c974a41b688242482638b892246b7db21d"}, + {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62376890b819bebe3c717a9ac841a532988372b7e600e76f75c9f7c128219d5"}, + {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4870f13a4fafd5bc3e93cff3169222534fad867918b188e83ee0496452978437"}, + {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:990027e77cda6072a566e433b6962ca3b96b4f3ae8bd54748e9d62a58284d9d7"}, + {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c40964596809eb616d94f9c7944511f620a1103d63d5510440ed2908fc410af"}, + {file = "pydantic-1.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:ea9eebc2ebcba3717e77cdeee3f6203ffc0e78db5f7482c68b1293e8cc156e5e"}, + {file = "pydantic-1.10.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:762aa598f79b4cac2f275d13336b2dd8662febee2a9c450a49a2ab3bec4b385f"}, + {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dab5219659f95e357d98d70577b361383057fb4414cfdb587014a5f5c595f7b"}, + {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3d4ee957a727ccb5a36f1b0a6dbd9fad5dedd2a41eada99a8df55c12896e18d"}, + {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b69f9138dec566962ec65623c9d57bee44412d2fc71065a5f3ebb3820bdeee96"}, + {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7aa75d1bd9cc275cf9782f50f60cddaf74cbaae19b6ada2a28e737edac420312"}, + {file = "pydantic-1.10.10-cp37-cp37m-win_amd64.whl", hash = "sha256:9f62a727f5c590c78c2d12fda302d1895141b767c6488fe623098f8792255fe5"}, + {file = "pydantic-1.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aac218feb4af73db8417ca7518fb3bade4534fcca6e3fb00f84966811dd94450"}, + {file = "pydantic-1.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88546dc10a40b5b52cae87d64666787aeb2878f9a9b37825aedc2f362e7ae1da"}, + {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c41bbaae89e32fc582448e71974de738c055aef5ab474fb25692981a08df808a"}, + {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b71bd504d1573b0b722ae536e8ffb796bedeef978979d076bf206e77dcc55a5"}, + {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e088e3865a2270ecbc369924cd7d9fbc565667d9158e7f304e4097ebb9cf98dd"}, + {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3403a090db45d4027d2344859d86eb797484dfda0706cf87af79ace6a35274ef"}, + {file = "pydantic-1.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:e0014e29637125f4997c174dd6167407162d7af0da73414a9340461ea8573252"}, + {file = "pydantic-1.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9965e49c6905840e526e5429b09e4c154355b6ecc0a2f05492eda2928190311d"}, + {file = "pydantic-1.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:748d10ab6089c5d196e1c8be9de48274f71457b01e59736f7a09c9dc34f51887"}, + {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86936c383f7c38fd26d35107eb669c85d8f46dfceae873264d9bab46fe1c7dde"}, + {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a26841be620309a9697f5b1ffc47dce74909e350c5315ccdac7a853484d468a"}, + {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:409b810f387610cc7405ab2fa6f62bdf7ea485311845a242ebc0bd0496e7e5ac"}, + {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ce937a2a2c020bcad1c9fde02892392a1123de6dda906ddba62bfe8f3e5989a2"}, + {file = "pydantic-1.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:37ebddef68370e6f26243acc94de56d291e01227a67b2ace26ea3543cf53dd5f"}, + {file = "pydantic-1.10.10-py3-none-any.whl", hash = "sha256:a5939ec826f7faec434e2d406ff5e4eaf1716eb1f247d68cd3d0b3612f7b4c8a"}, + {file = "pydantic-1.10.10.tar.gz", hash = "sha256:3b8d5bd97886f9eb59260594207c9f57dce14a6f869c6ceea90188715d29921a"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index fc47b1ef7101..67ee3f1738a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -207,7 +207,8 @@ packaging = ">=16.1" # which shipped in Python 3.8. This corresponds to version 1.4 of the backport. importlib_metadata = { version = ">=1.4", python = "<3.8" } # This is the most recent version of Pydantic with available on common distros. -pydantic = ">=1.7.4" +# We are currently incompatible with >=2.0.0: (https://github.com/matrix-org/synapse/issues/15858) +pydantic = "^1.7.4" # This is for building the rust components during "poetry install", which # currently ignores the `build-system.requires` directive (c.f. From 649848627c50777f8c93e8b980e903ab39ea019a Mon Sep 17 00:00:00 2001 From: Paarth Shah Date: Tue, 4 Jul 2023 00:33:24 -0700 Subject: [PATCH 119/153] Pin `pydantic` to <2.0.0 (#15862) Signed-off-by: Paarth Shah --- changelog.d/15862.bugfix | 3 ++ poetry.lock | 74 ++++++++++++++++++++-------------------- pyproject.toml | 3 +- 3 files changed, 42 insertions(+), 38 deletions(-) create mode 100644 changelog.d/15862.bugfix diff --git a/changelog.d/15862.bugfix b/changelog.d/15862.bugfix new file mode 100644 index 000000000000..8eb6aa9a7f8c --- /dev/null +++ b/changelog.d/15862.bugfix @@ -0,0 +1,3 @@ +Pin `pydantic` to ^=1.7.4 to avoid backwards-incompatible API changes from the 2.0.0 release. +Resolves https://github.com/matrix-org/synapse/issues/15858. +Contributed by @PaarthShah. diff --git a/poetry.lock b/poetry.lock index ee19c246f375..5e0d192c96af 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1829,47 +1829,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.9" +version = "1.10.10" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca"}, - {file = "pydantic-1.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f"}, - {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896"}, - {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d"}, - {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f"}, - {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4"}, - {file = "pydantic-1.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f"}, - {file = "pydantic-1.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0"}, - {file = "pydantic-1.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7"}, - {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d"}, - {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c"}, - {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91"}, - {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8"}, - {file = "pydantic-1.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f"}, - {file = "pydantic-1.10.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece"}, - {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a"}, - {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a"}, - {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60"}, - {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf"}, - {file = "pydantic-1.10.9-cp37-cp37m-win_amd64.whl", hash = "sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29"}, - {file = "pydantic-1.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82"}, - {file = "pydantic-1.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6"}, - {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766"}, - {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3"}, - {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572"}, - {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e"}, - {file = "pydantic-1.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb"}, - {file = "pydantic-1.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298"}, - {file = "pydantic-1.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276"}, - {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60"}, - {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc"}, - {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a"}, - {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4"}, - {file = "pydantic-1.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1"}, - {file = "pydantic-1.10.9-py3-none-any.whl", hash = "sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305"}, - {file = "pydantic-1.10.9.tar.gz", hash = "sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be"}, + {file = "pydantic-1.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adad1ee4ab9888f12dac2529276704e719efcf472e38df7813f5284db699b4ec"}, + {file = "pydantic-1.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a7db03339893feef2092ff7b1afc9497beed15ebd4af84c3042a74abce02d48"}, + {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b3714b97ff84b2689654851c2426389bcabfac9080617bcf4306c69db606f6"}, + {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edfdf0a5abc5c9bf2052ebaec20e67abd52e92d257e4f2d30e02c354ed3e6030"}, + {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a3b30fd255eeeb63caa9483502ba96b7795ce5bf895c6a179b3d909d9f53a6"}, + {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db4c7f7e60ca6f7d6c1785070f3e5771fcb9b2d88546e334d2f2c3934d949028"}, + {file = "pydantic-1.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:a2d5be50ac4a0976817144c7d653e34df2f9436d15555189f5b6f61161d64183"}, + {file = "pydantic-1.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:566a04ba755e8f701b074ffb134ddb4d429f75d5dced3fbd829a527aafe74c71"}, + {file = "pydantic-1.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f79db3652ed743309f116ba863dae0c974a41b688242482638b892246b7db21d"}, + {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62376890b819bebe3c717a9ac841a532988372b7e600e76f75c9f7c128219d5"}, + {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4870f13a4fafd5bc3e93cff3169222534fad867918b188e83ee0496452978437"}, + {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:990027e77cda6072a566e433b6962ca3b96b4f3ae8bd54748e9d62a58284d9d7"}, + {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c40964596809eb616d94f9c7944511f620a1103d63d5510440ed2908fc410af"}, + {file = "pydantic-1.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:ea9eebc2ebcba3717e77cdeee3f6203ffc0e78db5f7482c68b1293e8cc156e5e"}, + {file = "pydantic-1.10.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:762aa598f79b4cac2f275d13336b2dd8662febee2a9c450a49a2ab3bec4b385f"}, + {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dab5219659f95e357d98d70577b361383057fb4414cfdb587014a5f5c595f7b"}, + {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3d4ee957a727ccb5a36f1b0a6dbd9fad5dedd2a41eada99a8df55c12896e18d"}, + {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b69f9138dec566962ec65623c9d57bee44412d2fc71065a5f3ebb3820bdeee96"}, + {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7aa75d1bd9cc275cf9782f50f60cddaf74cbaae19b6ada2a28e737edac420312"}, + {file = "pydantic-1.10.10-cp37-cp37m-win_amd64.whl", hash = "sha256:9f62a727f5c590c78c2d12fda302d1895141b767c6488fe623098f8792255fe5"}, + {file = "pydantic-1.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aac218feb4af73db8417ca7518fb3bade4534fcca6e3fb00f84966811dd94450"}, + {file = "pydantic-1.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88546dc10a40b5b52cae87d64666787aeb2878f9a9b37825aedc2f362e7ae1da"}, + {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c41bbaae89e32fc582448e71974de738c055aef5ab474fb25692981a08df808a"}, + {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b71bd504d1573b0b722ae536e8ffb796bedeef978979d076bf206e77dcc55a5"}, + {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e088e3865a2270ecbc369924cd7d9fbc565667d9158e7f304e4097ebb9cf98dd"}, + {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3403a090db45d4027d2344859d86eb797484dfda0706cf87af79ace6a35274ef"}, + {file = "pydantic-1.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:e0014e29637125f4997c174dd6167407162d7af0da73414a9340461ea8573252"}, + {file = "pydantic-1.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9965e49c6905840e526e5429b09e4c154355b6ecc0a2f05492eda2928190311d"}, + {file = "pydantic-1.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:748d10ab6089c5d196e1c8be9de48274f71457b01e59736f7a09c9dc34f51887"}, + {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86936c383f7c38fd26d35107eb669c85d8f46dfceae873264d9bab46fe1c7dde"}, + {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a26841be620309a9697f5b1ffc47dce74909e350c5315ccdac7a853484d468a"}, + {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:409b810f387610cc7405ab2fa6f62bdf7ea485311845a242ebc0bd0496e7e5ac"}, + {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ce937a2a2c020bcad1c9fde02892392a1123de6dda906ddba62bfe8f3e5989a2"}, + {file = "pydantic-1.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:37ebddef68370e6f26243acc94de56d291e01227a67b2ace26ea3543cf53dd5f"}, + {file = "pydantic-1.10.10-py3-none-any.whl", hash = "sha256:a5939ec826f7faec434e2d406ff5e4eaf1716eb1f247d68cd3d0b3612f7b4c8a"}, + {file = "pydantic-1.10.10.tar.gz", hash = "sha256:3b8d5bd97886f9eb59260594207c9f57dce14a6f869c6ceea90188715d29921a"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index fc47b1ef7101..67ee3f1738a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -207,7 +207,8 @@ packaging = ">=16.1" # which shipped in Python 3.8. This corresponds to version 1.4 of the backport. importlib_metadata = { version = ">=1.4", python = "<3.8" } # This is the most recent version of Pydantic with available on common distros. -pydantic = ">=1.7.4" +# We are currently incompatible with >=2.0.0: (https://github.com/matrix-org/synapse/issues/15858) +pydantic = "^1.7.4" # This is for building the rust components during "poetry install", which # currently ignores the `build-system.requires` directive (c.f. From 664ba140807750137e2e383eceaa98471f47b575 Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Tue, 4 Jul 2023 16:25:33 +0100 Subject: [PATCH 120/153] 1.87.0 --- CHANGES.md | 13 +++++++++++++ changelog.d/15846.misc | 1 - changelog.d/15862.bugfix | 3 --- debian/changelog | 6 ++++++ pyproject.toml | 2 +- 5 files changed, 20 insertions(+), 5 deletions(-) delete mode 100644 changelog.d/15846.misc delete mode 100644 changelog.d/15862.bugfix diff --git a/CHANGES.md b/CHANGES.md index 2765045a139a..134537f6ef73 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,16 @@ +# Synapse 1.87.0 (2023-07-04) + +### Bugfixes + +- Pin `pydantic` to `^1.7.4` to avoid backwards-incompatible API changes from the 2.0.0 release. + Resolves https://github.com/matrix-org/synapse/issues/15858. + Contributed by @PaarthShah. ([\#15862](https://github.com/matrix-org/synapse/issues/15862)) + +### Internal Changes + +- Split out 2022 changes from the changelog so the rendered version in GitHub doesn't timeout as much. ([\#15846](https://github.com/matrix-org/synapse/issues/15846)) + + # Synapse 1.87.0rc1 (2023-06-27) Please note that this will be the last release of Synapse that is compatible with diff --git a/changelog.d/15846.misc b/changelog.d/15846.misc deleted file mode 100644 index f1c31d6663ab..000000000000 --- a/changelog.d/15846.misc +++ /dev/null @@ -1 +0,0 @@ -Split out 2022 changes from the changelog so the rendered version in GitHub doesn't timeout as much. diff --git a/changelog.d/15862.bugfix b/changelog.d/15862.bugfix deleted file mode 100644 index 8eb6aa9a7f8c..000000000000 --- a/changelog.d/15862.bugfix +++ /dev/null @@ -1,3 +0,0 @@ -Pin `pydantic` to ^=1.7.4 to avoid backwards-incompatible API changes from the 2.0.0 release. -Resolves https://github.com/matrix-org/synapse/issues/15858. -Contributed by @PaarthShah. diff --git a/debian/changelog b/debian/changelog index 2fa8d30fe1ed..0d9216bee8ad 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.87.0) stable; urgency=medium + + * New Synapse release 1.87.0. + + -- Synapse Packaging team Tue, 04 Jul 2023 16:24:00 +0100 + matrix-synapse-py3 (1.87.0~rc1) stable; urgency=medium * New synapse release 1.87.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 67ee3f1738a4..192a07756b6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.87.0rc1" +version = "1.87.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 718d7dfef2f8668c4708536d3701030eea3c207d Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Tue, 4 Jul 2023 16:26:50 +0100 Subject: [PATCH 121/153] Move warning up to the top --- CHANGES.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 134537f6ef73..4c8ecbb352ed 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,8 @@ # Synapse 1.87.0 (2023-07-04) +Please note that this will be the last release of Synapse that is compatible with +Python 3.7 and earlier. + ### Bugfixes - Pin `pydantic` to `^1.7.4` to avoid backwards-incompatible API changes from the 2.0.0 release. @@ -13,9 +16,6 @@ # Synapse 1.87.0rc1 (2023-06-27) -Please note that this will be the last release of Synapse that is compatible with -Python 3.7 and earlier. - ### Features - Improve `/messages` response time by avoiding backfill when we already have messages to return. ([\#15737](https://github.com/matrix-org/synapse/issues/15737)) From 1294d10c704a891392caf8c358fa8e00b1492874 Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Tue, 4 Jul 2023 16:34:41 +0100 Subject: [PATCH 122/153] Add notes about Python 3.7 EOL --- CHANGES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 4c8ecbb352ed..860e89ed99ff 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -2,6 +2,8 @@ Please note that this will be the last release of Synapse that is compatible with Python 3.7 and earlier. +This is due to Python 3.7 now having reached End of Life; see our [deprecation policy](https://matrix-org.github.io/synapse/v1.87/deprecation_policy.html) +for more details. ### Bugfixes From c8e81898b66086ee8bdfd18bd24452c26033e480 Mon Sep 17 00:00:00 2001 From: Michael Weimann Date: Wed, 5 Jul 2023 00:03:20 +0200 Subject: [PATCH 123/153] Add not_user_type param to the list accounts admin API (#15844) Signed-off-by: Michael Weimann --- changelog.d/15844.feature | 1 + docs/admin_api/user_admin_api.md | 3 + synapse/rest/admin/users.py | 9 +++ synapse/storage/databases/main/__init__.py | 37 ++++++++++ tests/rest/admin/test_user.py | 78 ++++++++++++++++++++++ 5 files changed, 128 insertions(+) create mode 100644 changelog.d/15844.feature diff --git a/changelog.d/15844.feature b/changelog.d/15844.feature new file mode 100644 index 000000000000..c220055d41c9 --- /dev/null +++ b/changelog.d/15844.feature @@ -0,0 +1 @@ +Add `not_user_type` param to the list accounts admin API. diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md index 229942b3112f..f17e60b1cb60 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md @@ -242,6 +242,9 @@ The following parameters should be set in the URL: - `dir` - Direction of media order. Either `f` for forwards or `b` for backwards. Setting this value to `b` will reverse the above sort order. Defaults to `f`. +- `not_user_type` - Exclude certain user types, such as bot users, from the request. + Can be provided multiple times. Possible values are `bot`, `support` or "empty string". + "empty string" here means to exclude users without a type. Caution. The database only has indexes on the columns `name` and `creation_ts`. This means that if a different sort order is used (`is_guest`, `admin`, diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 407fe9c8043a..e0257daa751d 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -28,6 +28,7 @@ parse_integer, parse_json_object_from_request, parse_string, + parse_strings_from_args, ) from synapse.http.site import SynapseRequest from synapse.rest.admin._base import ( @@ -64,6 +65,9 @@ class UsersRestServletV2(RestServlet): The parameter `guests` can be used to exclude guest users. The parameter `deactivated` can be used to include deactivated users. The parameter `order_by` can be used to order the result. + The parameter `not_user_type` can be used to exclude certain user types. + Possible values are `bot`, `support` or "empty string". + "empty string" here means to exclude users without a type. """ def __init__(self, hs: "HomeServer"): @@ -131,6 +135,10 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS) + # twisted.web.server.Request.args is incorrectly defined as Optional[Any] + args: Dict[bytes, List[bytes]] = request.args # type: ignore + not_user_types = parse_strings_from_args(args, "not_user_type") + users, total = await self.store.get_users_paginate( start, limit, @@ -141,6 +149,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: order_by, direction, approved, + not_user_types, ) # If support for MSC3866 is not enabled, don't show the approval flag. diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 3a10c265c9ea..80c0304b1917 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -19,6 +19,7 @@ from synapse.api.constants import Direction from synapse.config.homeserver import HomeServerConfig +from synapse.storage._base import make_in_list_sql_clause from synapse.storage.database import ( DatabasePool, LoggingDatabaseConnection, @@ -170,6 +171,7 @@ async def get_users_paginate( order_by: str = UserSortOrder.NAME.value, direction: Direction = Direction.FORWARDS, approved: bool = True, + not_user_types: Optional[List[str]] = None, ) -> Tuple[List[JsonDict], int]: """Function to retrieve a paginated list of users from users list. This will return a json list of users and the @@ -185,6 +187,7 @@ async def get_users_paginate( order_by: the sort order of the returned list direction: sort ascending or descending approved: whether to include approved users + not_user_types: list of user types to exclude Returns: A tuple of a list of mappings from user to information and a count of total users. """ @@ -222,6 +225,40 @@ def get_users_paginate_txn( # be already existing users that we consider as already approved. filters.append("approved IS FALSE") + if not_user_types: + if len(not_user_types) == 1 and not_user_types[0] == "": + # Only exclude NULL type users + filters.append("user_type IS NOT NULL") + else: + not_user_types_has_empty = False + not_user_types_without_empty = [] + + for not_user_type in not_user_types: + if not_user_type == "": + not_user_types_has_empty = True + else: + not_user_types_without_empty.append(not_user_type) + + not_user_type_clause, not_user_type_args = make_in_list_sql_clause( + self.database_engine, + "u.user_type", + not_user_types_without_empty, + ) + + if not_user_types_has_empty: + # NULL values should be excluded. + # They evaluate to false > nothing to do here. + filters.append("NOT %s" % (not_user_type_clause)) + else: + # NULL values should *not* be excluded. + # Add a special predicate to the query. + filters.append( + "(NOT %s OR %s IS NULL)" + % (not_user_type_clause, "u.user_type") + ) + + args.extend(not_user_type_args) + where_clause = "WHERE " + " AND ".join(filters) if len(filters) > 0 else "" sql_base = f""" diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index 434bb56d4451..a17a1bb1d8b1 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -933,6 +933,84 @@ def test_filter_out_approved(self) -> None: self.assertEqual(1, len(non_admin_user_ids), non_admin_user_ids) self.assertEqual(not_approved_user, non_admin_user_ids[0]) + def test_filter_not_user_types(self) -> None: + """Tests that the endpoint handles the not_user_types param""" + + regular_user_id = self.register_user("normalo", "secret") + + bot_user_id = self.register_user("robo", "secret") + self.make_request( + "PUT", + "/_synapse/admin/v2/users/" + urllib.parse.quote(bot_user_id), + {"user_type": UserTypes.BOT}, + access_token=self.admin_user_tok, + ) + + support_user_id = self.register_user("foo", "secret") + self.make_request( + "PUT", + "/_synapse/admin/v2/users/" + urllib.parse.quote(support_user_id), + {"user_type": UserTypes.SUPPORT}, + access_token=self.admin_user_tok, + ) + + def test_user_type( + expected_user_ids: List[str], not_user_types: Optional[List[str]] = None + ) -> None: + """Runs a test for the not_user_types param + Args: + expected_user_ids: Ids of the users that are expected to be returned + not_user_types: List of values for the not_user_types param + """ + + user_type_query = "" + + if not_user_types is not None: + user_type_query = "&".join( + [f"not_user_type={u}" for u in not_user_types] + ) + + test_url = f"{self.url}?{user_type_query}" + channel = self.make_request( + "GET", + test_url, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, channel.code) + self.assertEqual(channel.json_body["total"], len(expected_user_ids)) + self.assertEqual( + expected_user_ids, + [u["name"] for u in channel.json_body["users"]], + ) + + # Request without user_types → all users expected + test_user_type([self.admin_user, support_user_id, regular_user_id, bot_user_id]) + + # Request and exclude bot users + test_user_type( + [self.admin_user, support_user_id, regular_user_id], + not_user_types=[UserTypes.BOT], + ) + + # Request and exclude bot and support users + test_user_type( + [self.admin_user, regular_user_id], + not_user_types=[UserTypes.BOT, UserTypes.SUPPORT], + ) + + # Request and exclude empty user types → only expected the bot and support user + test_user_type([support_user_id, bot_user_id], not_user_types=[""]) + + # Request and exclude empty user types and bots → only expected the support user + test_user_type([support_user_id], not_user_types=["", UserTypes.BOT]) + + # Request and exclude a custom type (neither service nor bot) → expect all users + test_user_type( + [self.admin_user, support_user_id, regular_user_id, bot_user_id], + not_user_types=["custom"], + ) + def test_erasure_status(self) -> None: # Create a new user. user_id = self.register_user("eraseme", "eraseme") From c303eca8cc31e5eb9edb10019f02c3a9e39a47ab Mon Sep 17 00:00:00 2001 From: an0nfunc <40771419+an0nfunc@users.noreply.github.com> Date: Wed, 5 Jul 2023 10:52:12 +0200 Subject: [PATCH 124/153] use Image.LANCZOS instead of Image.ANTIALIAS for thumbnail resize (#15876) Image.ANTIALIAS is not defined in current pillow releases. Since ANTIALIAS was just using LANCZOS anyways, this is just a cosmetic change, but makes synapse work with most recent pillow releases. Signed-off-by: Giovanni Harting <539@idlegandalf.com> --- changelog.d/15876.bugfix | 1 + synapse/media/thumbnailer.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15876.bugfix diff --git a/changelog.d/15876.bugfix b/changelog.d/15876.bugfix new file mode 100644 index 000000000000..9dbae04c4f40 --- /dev/null +++ b/changelog.d/15876.bugfix @@ -0,0 +1 @@ +Correctly resize thumbnails with pillow version >=10. diff --git a/synapse/media/thumbnailer.py b/synapse/media/thumbnailer.py index f909a4fb9a19..73d2272f0583 100644 --- a/synapse/media/thumbnailer.py +++ b/synapse/media/thumbnailer.py @@ -131,7 +131,7 @@ def _resize(self, width: int, height: int) -> Image.Image: else: with self.image: self.image = self.image.convert("RGB") - return self.image.resize((width, height), Image.ANTIALIAS) + return self.image.resize((width, height), Image.LANCZOS) def scale(self, width: int, height: int, output_type: str) -> BytesIO: """Rescales the image to the given dimensions. From 95a96b21eb98c638ae36814ec74ba468226e373c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 5 Jul 2023 10:43:19 +0100 Subject: [PATCH 125/153] Add foreign key constraint to `event_forward_extremities`. (#15751) --- changelog.d/15751.misc | 1 + synapse/_scripts/synapse_port_db.py | 2 + synapse/storage/background_updates.py | 335 +++++++++++++++++- synapse/storage/database.py | 37 ++ .../databases/main/event_federation.py | 10 + synapse/storage/databases/main/events.py | 12 +- .../78/03event_extremities_constraints.py | 51 +++ tests/storage/test_background_update.py | 227 +++++++++++- tests/storage/test_event_federation.py | 35 +- 9 files changed, 699 insertions(+), 11 deletions(-) create mode 100644 changelog.d/15751.misc create mode 100644 synapse/storage/schema/main/delta/78/03event_extremities_constraints.py diff --git a/changelog.d/15751.misc b/changelog.d/15751.misc new file mode 100644 index 000000000000..e0ecea6c2fdb --- /dev/null +++ b/changelog.d/15751.misc @@ -0,0 +1 @@ +Add foreign key constraint to `event_forward_extremities`. diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index a803ada8ad06..e126a2e0c573 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -61,6 +61,7 @@ from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyBackgroundStore from synapse.storage.databases.main.end_to_end_keys import EndToEndKeyBackgroundStore +from synapse.storage.databases.main.event_federation import EventFederationWorkerStore from synapse.storage.databases.main.event_push_actions import EventPushActionsStore from synapse.storage.databases.main.events_bg_updates import ( EventsBackgroundUpdatesStore, @@ -239,6 +240,7 @@ class Store( PresenceBackgroundUpdateStore, ReceiptsBackgroundUpdateStore, RelationsWorkerStore, + EventFederationWorkerStore, ): def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]: return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs) diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index edc97a9d6105..5dce0a01599d 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -11,8 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import abc import logging -from enum import IntEnum +from enum import Enum, IntEnum from types import TracebackType from typing import ( TYPE_CHECKING, @@ -24,12 +25,16 @@ Iterable, List, Optional, + Sequence, + Tuple, Type, ) import attr +from pydantic import BaseModel from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.storage.engines import PostgresEngine from synapse.storage.types import Connection, Cursor from synapse.types import JsonDict from synapse.util import Clock, json_encoder @@ -48,6 +53,78 @@ MIN_BATCH_SIZE_CALLBACK = Callable[[str, str], Awaitable[int]] +class Constraint(metaclass=abc.ABCMeta): + """Base class representing different constraints. + + Used by `register_background_validate_constraint_and_delete_rows`. + """ + + @abc.abstractmethod + def make_check_clause(self, table: str) -> str: + """Returns an SQL expression that checks the row passes the constraint.""" + pass + + @abc.abstractmethod + def make_constraint_clause_postgres(self) -> str: + """Returns an SQL clause for creating the constraint. + + Only used on Postgres DBs + """ + pass + + +@attr.s(auto_attribs=True) +class ForeignKeyConstraint(Constraint): + """A foreign key constraint. + + Attributes: + referenced_table: The "parent" table name. + columns: The list of mappings of columns from table to referenced table + """ + + referenced_table: str + columns: Sequence[Tuple[str, str]] + + def make_check_clause(self, table: str) -> str: + join_clause = " AND ".join( + f"{col1} = {table}.{col2}" for col1, col2 in self.columns + ) + return f"EXISTS (SELECT 1 FROM {self.referenced_table} WHERE {join_clause})" + + def make_constraint_clause_postgres(self) -> str: + column1_list = ", ".join(col1 for col1, col2 in self.columns) + column2_list = ", ".join(col2 for col1, col2 in self.columns) + return f"FOREIGN KEY ({column1_list}) REFERENCES {self.referenced_table} ({column2_list})" + + +@attr.s(auto_attribs=True) +class NotNullConstraint(Constraint): + """A NOT NULL column constraint""" + + column: str + + def make_check_clause(self, table: str) -> str: + return f"{self.column} IS NOT NULL" + + def make_constraint_clause_postgres(self) -> str: + return f"CHECK ({self.column} IS NOT NULL)" + + +class ValidateConstraintProgress(BaseModel): + """The format of the progress JSON for validate constraint background + updates. + + Used by `register_background_validate_constraint_and_delete_rows`. + """ + + class State(str, Enum): + check = "check" + validate = "validate" + + state: State = State.validate + lower_bound: Sequence[Any] = () + + @attr.s(slots=True, frozen=True, auto_attribs=True) class _BackgroundUpdateHandler: """A handler for a given background update. @@ -740,6 +817,179 @@ def create_index_sqlite(conn: Connection) -> None: logger.info("Adding index %s to %s", index_name, table) await self.db_pool.runWithConnection(runner) + def register_background_validate_constraint_and_delete_rows( + self, + update_name: str, + table: str, + constraint_name: str, + constraint: Constraint, + unique_columns: Sequence[str], + ) -> None: + """Helper for store classes to do a background validate constraint, and + delete rows that do not pass the constraint check. + + Note: This deletes rows that don't match the constraint. This may not be + appropriate in all situations, and so the suitability of using this + method should be considered on a case-by-case basis. + + This only applies on PostgreSQL. + + For SQLite the table gets recreated as part of the schema delta and the + data is copied over synchronously (or whatever the correct way to + describe it as). + + Args: + update_name: The name of the background update. + table: The table with the invalid constraint. + constraint_name: The name of the constraint + constraint: A `Constraint` object matching the type of constraint. + unique_columns: A sequence of columns that form a unique constraint + on the table. Used to iterate over the table. + """ + + assert isinstance( + self.db_pool.engine, engines.PostgresEngine + ), "validate constraint background update registered for non-Postres database" + + async def updater(progress: JsonDict, batch_size: int) -> int: + return await self.validate_constraint_and_delete_in_background( + update_name=update_name, + table=table, + constraint_name=constraint_name, + constraint=constraint, + unique_columns=unique_columns, + progress=progress, + batch_size=batch_size, + ) + + self._background_update_handlers[update_name] = _BackgroundUpdateHandler( + updater, oneshot=True + ) + + async def validate_constraint_and_delete_in_background( + self, + update_name: str, + table: str, + constraint_name: str, + constraint: Constraint, + unique_columns: Sequence[str], + progress: JsonDict, + batch_size: int, + ) -> int: + """Validates a table constraint that has been marked as `NOT VALID`, + deleting rows that don't pass the constraint check. + + This will delete rows that do not meet the validation check. + + update_name: str, + table: str, + constraint_name: str, + constraint: Constraint, + unique_columns: Sequence[str], + """ + + # We validate the constraint by: + # 1. Trying to validate the constraint as is. If this succeeds then + # we're done. + # 2. Otherwise, we manually scan the table to remove rows that don't + # match the constraint. + # 3. We try re-validating the constraint. + + parsed_progress = ValidateConstraintProgress.parse_obj(progress) + + if parsed_progress.state == ValidateConstraintProgress.State.check: + return_columns = ", ".join(unique_columns) + order_columns = ", ".join(unique_columns) + + where_clause = "" + args: List[Any] = [] + if parsed_progress.lower_bound: + where_clause = f"""WHERE ({order_columns}) > ({", ".join("?" for _ in unique_columns)})""" + args.extend(parsed_progress.lower_bound) + + args.append(batch_size) + + sql = f""" + SELECT + {return_columns}, + {constraint.make_check_clause(table)} AS check + FROM {table} + {where_clause} + ORDER BY {order_columns} + LIMIT ? + """ + + def validate_constraint_in_background_check( + txn: "LoggingTransaction", + ) -> None: + txn.execute(sql, args) + rows = txn.fetchall() + + new_progress = parsed_progress.copy() + + if not rows: + new_progress.state = ValidateConstraintProgress.State.validate + self._background_update_progress_txn( + txn, update_name, new_progress.dict() + ) + return + + new_progress.lower_bound = rows[-1][:-1] + + to_delete = [row[:-1] for row in rows if not row[-1]] + + if to_delete: + logger.warning( + "Deleting %d rows that do not pass new constraint", + len(to_delete), + ) + + self.db_pool.simple_delete_many_batch_txn( + txn, table=table, keys=unique_columns, values=to_delete + ) + + self._background_update_progress_txn( + txn, update_name, new_progress.dict() + ) + + await self.db_pool.runInteraction( + "validate_constraint_in_background_check", + validate_constraint_in_background_check, + ) + + return batch_size + + elif parsed_progress.state == ValidateConstraintProgress.State.validate: + sql = f"ALTER TABLE {table} VALIDATE CONSTRAINT {constraint_name}" + + def validate_constraint_in_background_validate( + txn: "LoggingTransaction", + ) -> None: + txn.execute(sql) + + try: + await self.db_pool.runInteraction( + "validate_constraint_in_background_validate", + validate_constraint_in_background_validate, + ) + + await self._end_background_update(update_name) + except self.db_pool.engine.module.IntegrityError as e: + # If we get an integrity error here, then we go back and recheck the table. + logger.warning("Integrity error when validating constraint: %s", e) + await self._background_update_progress( + update_name, + ValidateConstraintProgress( + state=ValidateConstraintProgress.State.check + ).dict(), + ) + + return batch_size + else: + raise Exception( + f"Unrecognized state '{parsed_progress.state}' when trying to validate_constraint_and_delete_in_background" + ) + async def _end_background_update(self, update_name: str) -> None: """Removes a completed background update task from the queue. @@ -795,3 +1045,86 @@ def _background_update_progress_txn( keyvalues={"update_name": update_name}, updatevalues={"progress_json": progress_json}, ) + + +def run_validate_constraint_and_delete_rows_schema_delta( + txn: "LoggingTransaction", + ordering: int, + update_name: str, + table: str, + constraint_name: str, + constraint: Constraint, + sqlite_table_name: str, + sqlite_table_schema: str, +) -> None: + """Runs a schema delta to add a constraint to the table. This should be run + in a schema delta file. + + For PostgreSQL the constraint is added and validated in the background. + + For SQLite the table is recreated and data copied across immediately. This + is done by the caller passing in a script to create the new table. Note that + table indexes and triggers are copied over automatically. + + There must be a corresponding call to + `register_background_validate_constraint_and_delete_rows` to register the + background update in one of the data store classes. + + Attributes: + txn ordering, update_name: For adding a row to background_updates table. + table: The table to add constraint to. constraint_name: The name of the + new constraint constraint: A `Constraint` object describing the + constraint sqlite_table_name: For SQLite the name of the empty copy of + table sqlite_table_schema: A SQL script for creating the above table. + """ + + if isinstance(txn.database_engine, PostgresEngine): + # For postgres we can just add the constraint and mark it as NOT VALID, + # and then insert a background update to go and check the validity in + # the background. + txn.execute( + f""" + ALTER TABLE {table} + ADD CONSTRAINT {constraint_name} {constraint.make_constraint_clause_postgres()} + NOT VALID + """ + ) + + txn.execute( + "INSERT INTO background_updates (ordering, update_name, progress_json) VALUES (?, ?, '{}')", + (ordering, update_name), + ) + else: + # For SQLite, we: + # 1. fetch all indexes/triggers/etc related to the table + # 2. create an empty copy of the table + # 3. copy across the rows (that satisfy the check) + # 4. replace the old table with the new able. + # 5. add back all the indexes/triggers/etc + + # Fetch the indexes/triggers/etc. Note that `sql` column being null is + # due to indexes being auto created based on the class definition (e.g. + # PRIMARY KEY), and so don't need to be recreated. + txn.execute( + """ + SELECT sql FROM sqlite_master + WHERE tbl_name = ? AND type != 'table' AND sql IS NOT NULL + """, + (table,), + ) + extras = [row[0] for row in txn] + + txn.execute(sqlite_table_schema) + + sql = f""" + INSERT INTO {sqlite_table_name} SELECT * FROM {table} + WHERE {constraint.make_check_clause(table)} + """ + + txn.execute(sql) + + txn.execute(f"DROP TABLE {table}") + txn.execute(f"ALTER TABLE {sqlite_table_name} RENAME TO {table}") + + for extra in extras: + txn.execute(extra) diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 7e49ae11bc0e..a1c8fb0f46a4 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -2313,6 +2313,43 @@ def simple_delete_many_txn( return txn.rowcount + @staticmethod + def simple_delete_many_batch_txn( + txn: LoggingTransaction, + table: str, + keys: Collection[str], + values: Iterable[Iterable[Any]], + ) -> None: + """Executes a DELETE query on the named table. + + The input is given as a list of rows, where each row is a list of values. + (Actually any iterable is fine.) + + Args: + txn: The transaction to use. + table: string giving the table name + keys: list of column names + values: for each row, a list of values in the same order as `keys` + """ + + if isinstance(txn.database_engine, PostgresEngine): + # We use `execute_values` as it can be a lot faster than `execute_batch`, + # but it's only available on postgres. + sql = "DELETE FROM %s WHERE (%s) IN (VALUES ?)" % ( + table, + ", ".join(k for k in keys), + ) + + txn.execute_values(sql, values, fetch=False) + else: + sql = "DELETE FROM %s WHERE (%s) = (%s)" % ( + table, + ", ".join(k for k in keys), + ", ".join("?" for _ in keys), + ) + + txn.execute_batch(sql, values) + def get_cache_dict( self, db_conn: LoggingDatabaseConnection, diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index 8b6e3c1dc734..dabe603c8cba 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -38,6 +38,7 @@ from synapse.logging.opentracing import tag_args, trace from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause +from synapse.storage.background_updates import ForeignKeyConstraint from synapse.storage.database import ( DatabasePool, LoggingDatabaseConnection, @@ -140,6 +141,15 @@ def __init__( self._clock.looping_call(self._get_stats_for_federation_staging, 30 * 1000) + if isinstance(self.database_engine, PostgresEngine): + self.db_pool.updates.register_background_validate_constraint_and_delete_rows( + update_name="event_forward_extremities_event_id_foreign_key_constraint_update", + table="event_forward_extremities", + constraint_name="event_forward_extremities_event_id", + constraint=ForeignKeyConstraint("events", [("event_id", "event_id")]), + unique_columns=("event_id", "room_id"), + ) + async def get_auth_chain( self, room_id: str, event_ids: Collection[str], include_given: bool = False ) -> List[EventBase]: diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index 5c9db7554ef4..2b83a694269f 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -415,12 +415,6 @@ def _persist_events_txn( backfilled=False, ) - self._update_forward_extremities_txn( - txn, - new_forward_extremities=new_forward_extremities, - max_stream_order=max_stream_order, - ) - # Ensure that we don't have the same event twice. events_and_contexts = self._filter_events_and_contexts_for_duplicates( events_and_contexts @@ -439,6 +433,12 @@ def _persist_events_txn( self._store_event_txn(txn, events_and_contexts=events_and_contexts) + self._update_forward_extremities_txn( + txn, + new_forward_extremities=new_forward_extremities, + max_stream_order=max_stream_order, + ) + self._persist_transaction_ids_txn(txn, events_and_contexts) # Insert into event_to_state_groups. diff --git a/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py b/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py new file mode 100644 index 000000000000..f12e2a8f3ee9 --- /dev/null +++ b/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py @@ -0,0 +1,51 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +This migration adds foreign key constraint to `event_forward_extremities` table. +""" +from synapse.storage.background_updates import ( + ForeignKeyConstraint, + run_validate_constraint_and_delete_rows_schema_delta, +) +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import BaseDatabaseEngine + +FORWARD_EXTREMITIES_TABLE_SCHEMA = """ + CREATE TABLE event_forward_extremities2( + event_id TEXT NOT NULL, + room_id TEXT NOT NULL, + UNIQUE (event_id, room_id), + CONSTRAINT event_forward_extremities_event_id FOREIGN KEY (event_id) REFERENCES events (event_id) + ) +""" + + +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: + run_validate_constraint_and_delete_rows_schema_delta( + cur, + ordering=7803, + update_name="event_forward_extremities_event_id_foreign_key_constraint_update", + table="event_forward_extremities", + constraint_name="event_forward_extremities_event_id", + constraint=ForeignKeyConstraint("events", [("event_id", "event_id")]), + sqlite_table_name="event_forward_extremities2", + sqlite_table_schema=FORWARD_EXTREMITIES_TABLE_SCHEMA, + ) + + # We can't add a similar constraint to `event_backward_extremities` as the + # events in there don't exist in the `events` table and `event_edges` + # doesn't have a unique constraint on `prev_event_id` (so we can't make a + # foreign key point to it). diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index fd619b64d4dd..6ca546f3f76a 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -20,7 +20,14 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.server import HomeServer -from synapse.storage.background_updates import BackgroundUpdater +from synapse.storage.background_updates import ( + BackgroundUpdater, + ForeignKeyConstraint, + NotNullConstraint, + run_validate_constraint_and_delete_rows_schema_delta, +) +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.types import JsonDict from synapse.util import Clock @@ -404,3 +411,221 @@ def test_controller(self) -> None: self.pump() self._update_ctx_manager.__aexit__.assert_called() self.get_success(do_update_d) + + +class BackgroundUpdateValidateConstraintTestCase(unittest.HomeserverTestCase): + """Tests the validate contraint and delete background handlers.""" + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.updates: BackgroundUpdater = self.hs.get_datastores().main.db_pool.updates + # the base test class should have run the real bg updates for us + self.assertTrue( + self.get_success(self.updates.has_completed_background_updates()) + ) + + self.store = self.hs.get_datastores().main + + def test_not_null_constraint(self) -> None: + # Create the initial tables, where we have some invalid data. + """Tests adding a not null constraint.""" + table_sql = """ + CREATE TABLE test_constraint( + a INT PRIMARY KEY, + b INT + ); + """ + self.get_success( + self.store.db_pool.execute( + "test_not_null_constraint", lambda _: None, table_sql + ) + ) + + # We add an index so that we can check that its correctly recreated when + # using SQLite. + index_sql = "CREATE INDEX test_index ON test_constraint(a)" + self.get_success( + self.store.db_pool.execute( + "test_not_null_constraint", lambda _: None, index_sql + ) + ) + + self.get_success( + self.store.db_pool.simple_insert("test_constraint", {"a": 1, "b": 1}) + ) + self.get_success( + self.store.db_pool.simple_insert("test_constraint", {"a": 2, "b": None}) + ) + self.get_success( + self.store.db_pool.simple_insert("test_constraint", {"a": 3, "b": 3}) + ) + + # Now lets do the migration + + table2_sqlite = """ + CREATE TABLE test_constraint2( + a INT PRIMARY KEY, + b INT, + CONSTRAINT test_constraint_name CHECK (b is NOT NULL) + ); + """ + + def delta(txn: LoggingTransaction) -> None: + run_validate_constraint_and_delete_rows_schema_delta( + txn, + ordering=1000, + update_name="test_bg_update", + table="test_constraint", + constraint_name="test_constraint_name", + constraint=NotNullConstraint("b"), + sqlite_table_name="test_constraint2", + sqlite_table_schema=table2_sqlite, + ) + + self.get_success( + self.store.db_pool.runInteraction( + "test_not_null_constraint", + delta, + ) + ) + + if isinstance(self.store.database_engine, PostgresEngine): + # Postgres uses a background update + self.updates.register_background_validate_constraint_and_delete_rows( + "test_bg_update", + table="test_constraint", + constraint_name="test_constraint_name", + constraint=NotNullConstraint("b"), + unique_columns=["a"], + ) + + # Tell the DataStore that it hasn't finished all updates yet + self.store.db_pool.updates._all_done = False + + # Now let's actually drive the updates to completion + self.wait_for_background_updates() + + # Check the correct values are in the new table. + rows = self.get_success( + self.store.db_pool.simple_select_list( + table="test_constraint", + keyvalues={}, + retcols=("a", "b"), + ) + ) + + self.assertCountEqual(rows, [{"a": 1, "b": 1}, {"a": 3, "b": 3}]) + + # And check that invalid rows get correctly rejected. + self.get_failure( + self.store.db_pool.simple_insert("test_constraint", {"a": 2, "b": None}), + exc=self.store.database_engine.module.IntegrityError, + ) + + # Check the index is still there for SQLite. + if isinstance(self.store.database_engine, Sqlite3Engine): + # Ensure the index exists in the schema. + self.get_success( + self.store.db_pool.simple_select_one_onecol( + table="sqlite_master", + keyvalues={"tbl_name": "test_constraint"}, + retcol="name", + ) + ) + + def test_foreign_constraint(self) -> None: + """Tests adding a not foreign key constraint.""" + + # Create the initial tables, where we have some invalid data. + base_sql = """ + CREATE TABLE base_table( + b INT PRIMARY KEY + ); + """ + + table_sql = """ + CREATE TABLE test_constraint( + a INT PRIMARY KEY, + b INT NOT NULL + ); + """ + self.get_success( + self.store.db_pool.execute( + "test_foreign_key_constraint", lambda _: None, base_sql + ) + ) + self.get_success( + self.store.db_pool.execute( + "test_foreign_key_constraint", lambda _: None, table_sql + ) + ) + + self.get_success(self.store.db_pool.simple_insert("base_table", {"b": 1})) + self.get_success( + self.store.db_pool.simple_insert("test_constraint", {"a": 1, "b": 1}) + ) + self.get_success( + self.store.db_pool.simple_insert("test_constraint", {"a": 2, "b": 2}) + ) + self.get_success(self.store.db_pool.simple_insert("base_table", {"b": 3})) + self.get_success( + self.store.db_pool.simple_insert("test_constraint", {"a": 3, "b": 3}) + ) + + table2_sqlite = """ + CREATE TABLE test_constraint2( + a INT PRIMARY KEY, + b INT NOT NULL, + CONSTRAINT test_constraint_name FOREIGN KEY (b) REFERENCES base_table (b) + ); + """ + + def delta(txn: LoggingTransaction) -> None: + run_validate_constraint_and_delete_rows_schema_delta( + txn, + ordering=1000, + update_name="test_bg_update", + table="test_constraint", + constraint_name="test_constraint_name", + constraint=ForeignKeyConstraint("base_table", [("b", "b")]), + sqlite_table_name="test_constraint2", + sqlite_table_schema=table2_sqlite, + ) + + self.get_success( + self.store.db_pool.runInteraction( + "test_foreign_key_constraint", + delta, + ) + ) + + if isinstance(self.store.database_engine, PostgresEngine): + # Postgres uses a background update + self.updates.register_background_validate_constraint_and_delete_rows( + "test_bg_update", + table="test_constraint", + constraint_name="test_constraint_name", + constraint=ForeignKeyConstraint("base_table", [("b", "b")]), + unique_columns=["a"], + ) + + # Tell the DataStore that it hasn't finished all updates yet + self.store.db_pool.updates._all_done = False + + # Now let's actually drive the updates to completion + self.wait_for_background_updates() + + # Check the correct values are in the new table. + rows = self.get_success( + self.store.db_pool.simple_select_list( + table="test_constraint", + keyvalues={}, + retcols=("a", "b"), + ) + ) + self.assertCountEqual(rows, [{"a": 1, "b": 1}, {"a": 3, "b": 3}]) + + # And check that invalid rows get correctly rejected. + self.get_failure( + self.store.db_pool.simple_insert("test_constraint", {"a": 2, "b": 2}), + exc=self.store.database_engine.module.IntegrityError, + ) diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index 0f3b0744f184..9c151a5e62d1 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -20,6 +20,7 @@ from twisted.test.proto_helpers import MemoryReactor +from synapse.api.constants import EventTypes from synapse.api.room_versions import ( KNOWN_ROOM_VERSIONS, EventFormatVersions, @@ -98,8 +99,32 @@ def test_get_rooms_with_many_extremities(self) -> None: room2 = "#room2" room3 = "#room3" - def insert_event(txn: Cursor, i: int, room_id: str) -> None: + def insert_event(txn: LoggingTransaction, i: int, room_id: str) -> None: event_id = "$event_%i:local" % i + + # We need to insert into events table to get around the foreign key constraint. + self.store.db_pool.simple_insert_txn( + txn, + table="events", + values={ + "instance_name": "master", + "stream_ordering": self.store._stream_id_gen.get_next_txn(txn), + "topological_ordering": 1, + "depth": 1, + "event_id": event_id, + "room_id": room_id, + "type": EventTypes.Message, + "processed": True, + "outlier": False, + "origin_server_ts": 0, + "received_ts": 0, + "sender": "@user:local", + "contains_url": False, + "state_key": None, + "rejection_reason": None, + }, + ) + txn.execute( ( "INSERT INTO event_forward_extremities (room_id, event_id) " @@ -113,10 +138,14 @@ def insert_event(txn: Cursor, i: int, room_id: str) -> None: self.store.db_pool.runInteraction("insert", insert_event, i, room1) ) self.get_success( - self.store.db_pool.runInteraction("insert", insert_event, i, room2) + self.store.db_pool.runInteraction( + "insert", insert_event, i + 100, room2 + ) ) self.get_success( - self.store.db_pool.runInteraction("insert", insert_event, i, room3) + self.store.db_pool.runInteraction( + "insert", insert_event, i + 200, room3 + ) ) # Test simple case From 4cf9f92f395e8c448b94eccb48fbfe2e7e61d7cd Mon Sep 17 00:00:00 2001 From: Jason Little Date: Wed, 5 Jul 2023 05:44:02 -0500 Subject: [PATCH 126/153] Fix could not serialize access due to concurrent `DELETE` from presence_stream (#15826) * Change update_presence to have a isolation level of READ_COMMITTED * changelog --- changelog.d/15826.misc | 1 + synapse/storage/databases/main/presence.py | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15826.misc diff --git a/changelog.d/15826.misc b/changelog.d/15826.misc new file mode 100644 index 000000000000..88903f3f7c7a --- /dev/null +++ b/changelog.d/15826.misc @@ -0,0 +1 @@ +Use lower isolation level when cleaning old presence stream data to avoid serialization errors. diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py index beb210f8eefd..b51d20ac266c 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple, cast from synapse.api.presence import PresenceState, UserPresenceState @@ -24,6 +23,7 @@ ) from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore from synapse.storage.engines import PostgresEngine +from synapse.storage.engines._base import IsolationLevel from synapse.storage.types import Connection from synapse.storage.util.id_generators import ( AbstractStreamIdGenerator, @@ -115,11 +115,16 @@ async def update_presence( ) async with stream_ordering_manager as stream_orderings: + # Run the interaction with an isolation level of READ_COMMITTED to avoid + # serialization errors(and rollbacks) in the database. This way it will + # ignore new rows during the DELETE, but will pick them up the next time + # this is run. Currently, that is between 5-60 seconds. await self.db_pool.runInteraction( "update_presence", self._update_presence_txn, stream_orderings, presence_states, + isolation_level=IsolationLevel.READ_COMMITTED, ) return stream_orderings[-1], self._presence_id_gen.get_current_token() From cc780b3f7711b422698ef3392c2caf146312855d Mon Sep 17 00:00:00 2001 From: Sumner Evans Date: Wed, 5 Jul 2023 08:15:56 -0600 Subject: [PATCH 127/153] docs/admin_api: fix header level on 'Users' page (#15852) Signed-off-by: Sumner Evans --- changelog.d/15852.doc | 1 + docs/admin_api/user_admin_api.md | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15852.doc diff --git a/changelog.d/15852.doc b/changelog.d/15852.doc new file mode 100644 index 000000000000..060b55d106de --- /dev/null +++ b/changelog.d/15852.doc @@ -0,0 +1 @@ +Fixed header levels on the Admin API "Users" documentation page. Contributed by @sumnerevans at @beeper. diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md index f17e60b1cb60..23f465e98db2 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md @@ -1183,7 +1183,7 @@ The following parameters should be set in the URL: - `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must be local. -### Check username availability +## Check username availability Checks to see if a username is available, and valid, for the server. See [the client-server API](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) @@ -1201,7 +1201,7 @@ GET /_synapse/admin/v1/username_available?username=$localpart The request and response format is the same as the [/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API. -### Find a user based on their ID in an auth provider +## Find a user based on their ID in an auth provider The API is: @@ -1240,7 +1240,7 @@ Returns a `404` HTTP status code if no user was found, with a response body like _Added in Synapse 1.68.0._ -### Find a user based on their Third Party ID (ThreePID or 3PID) +## Find a user based on their Third Party ID (ThreePID or 3PID) The API is: From ce857c05d5f6fa6b66c4a59c4917c440c9b98047 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 5 Jul 2023 10:22:21 -0500 Subject: [PATCH 128/153] Add tracing to media `/upload` endpoint (#15850) Add tracing instrumentation to media `/upload` code paths to investigate https://github.com/matrix-org/synapse/issues/15841 --- changelog.d/15850.misc | 1 + synapse/media/media_repository.py | 3 +++ synapse/media/media_storage.py | 7 +++++++ synapse/media/storage_provider.py | 5 +++++ synapse/media/thumbnailer.py | 5 +++++ synapse/module_api/callbacks/spamchecker_callbacks.py | 1 + synapse/storage/databases/main/media_repository.py | 5 +++++ 7 files changed, 27 insertions(+) create mode 100644 changelog.d/15850.misc diff --git a/changelog.d/15850.misc b/changelog.d/15850.misc new file mode 100644 index 000000000000..0e49ab23fed3 --- /dev/null +++ b/changelog.d/15850.misc @@ -0,0 +1 @@ +Add tracing to media `/upload` code paths. diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index e81c987b10be..4b750c700b89 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -35,6 +35,7 @@ from synapse.config.repository import ThumbnailRequirement from synapse.http.site import SynapseRequest from synapse.logging.context import defer_to_thread +from synapse.logging.opentracing import trace from synapse.media._base import ( FileInfo, Responder, @@ -174,6 +175,7 @@ def mark_recently_accessed(self, server_name: Optional[str], media_id: str) -> N else: self.recently_accessed_locals.add(media_id) + @trace async def create_content( self, media_type: str, @@ -710,6 +712,7 @@ async def generate_remote_exact_thumbnail( # Could not generate thumbnail. return None + @trace async def _generate_thumbnails( self, server_name: Optional[str], diff --git a/synapse/media/media_storage.py b/synapse/media/media_storage.py index a819d954070a..eebcbc48e8c9 100644 --- a/synapse/media/media_storage.py +++ b/synapse/media/media_storage.py @@ -38,6 +38,7 @@ from synapse.api.errors import NotFoundError from synapse.logging.context import defer_to_thread, make_deferred_yieldable +from synapse.logging.opentracing import trace from synapse.util import Clock from synapse.util.file_consumer import BackgroundFileConsumer @@ -76,6 +77,7 @@ def __init__( self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.clock = hs.get_clock() + @trace async def store_file(self, source: IO, file_info: FileInfo) -> str: """Write `source` to the on disk media store, and also any other configured storage providers @@ -95,10 +97,12 @@ async def store_file(self, source: IO, file_info: FileInfo) -> str: return fname + @trace async def write_to_file(self, source: IO, output: IO) -> None: """Asynchronously write the `source` to `output`.""" await defer_to_thread(self.reactor, _write_file_synchronously, source, output) + @trace @contextlib.contextmanager def store_into_file( self, file_info: FileInfo @@ -214,6 +218,7 @@ async def fetch_media(self, file_info: FileInfo) -> Optional[Responder]: return None + @trace async def ensure_media_is_in_local_cache(self, file_info: FileInfo) -> str: """Ensures that the given file is in the local cache. Attempts to download it from storage providers if it isn't. @@ -259,6 +264,7 @@ async def ensure_media_is_in_local_cache(self, file_info: FileInfo) -> str: raise NotFoundError() + @trace def _file_info_to_path(self, file_info: FileInfo) -> str: """Converts file_info into a relative path. @@ -301,6 +307,7 @@ def _file_info_to_path(self, file_info: FileInfo) -> str: return self.filepaths.local_media_filepath_rel(file_info.file_id) +@trace def _write_file_synchronously(source: IO, dest: IO) -> None: """Write `source` to the file like `dest` synchronously. Should be called from a thread. diff --git a/synapse/media/storage_provider.py b/synapse/media/storage_provider.py index 1c9b71d69c77..0aea3a7a0d87 100644 --- a/synapse/media/storage_provider.py +++ b/synapse/media/storage_provider.py @@ -20,6 +20,7 @@ from synapse.config._base import Config from synapse.logging.context import defer_to_thread, run_in_background +from synapse.logging.opentracing import trace from synapse.util.async_helpers import maybe_awaitable from ._base import FileInfo, Responder @@ -86,6 +87,7 @@ def __init__( def __str__(self) -> str: return "StorageProviderWrapper[%s]" % (self.backend,) + @trace async def store_file(self, path: str, file_info: FileInfo) -> None: if not file_info.server_name and not self.store_local: return None @@ -114,6 +116,7 @@ async def store() -> None: run_in_background(store) + @trace async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]: if file_info.url_cache: # Files in the URL preview cache definitely aren't stored here, @@ -141,6 +144,7 @@ def __init__(self, hs: "HomeServer", config: str): def __str__(self) -> str: return "FileStorageProviderBackend[%s]" % (self.base_directory,) + @trace async def store_file(self, path: str, file_info: FileInfo) -> None: """See StorageProvider.store_file""" @@ -159,6 +163,7 @@ async def store_file(self, path: str, file_info: FileInfo) -> None: backup_fname, ) + @trace async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]: """See StorageProvider.fetch""" diff --git a/synapse/media/thumbnailer.py b/synapse/media/thumbnailer.py index 73d2272f0583..2bfa58ceee5d 100644 --- a/synapse/media/thumbnailer.py +++ b/synapse/media/thumbnailer.py @@ -19,6 +19,8 @@ from PIL import Image +from synapse.logging.opentracing import trace + logger = logging.getLogger(__name__) EXIF_ORIENTATION_TAG = 0x0112 @@ -82,6 +84,7 @@ def __init__(self, input_path: str): # A lot of parsing errors can happen when parsing EXIF logger.info("Error parsing image EXIF information: %s", e) + @trace def transpose(self) -> Tuple[int, int]: """Transpose the image using its EXIF Orientation tag @@ -133,6 +136,7 @@ def _resize(self, width: int, height: int) -> Image.Image: self.image = self.image.convert("RGB") return self.image.resize((width, height), Image.LANCZOS) + @trace def scale(self, width: int, height: int, output_type: str) -> BytesIO: """Rescales the image to the given dimensions. @@ -142,6 +146,7 @@ def scale(self, width: int, height: int, output_type: str) -> BytesIO: with self._resize(width, height) as scaled: return self._encode_image(scaled, output_type) + @trace def crop(self, width: int, height: int, output_type: str) -> BytesIO: """Rescales and crops the image to the given dimensions preserving aspect:: diff --git a/synapse/module_api/callbacks/spamchecker_callbacks.py b/synapse/module_api/callbacks/spamchecker_callbacks.py index 7cee442145b9..e19145032389 100644 --- a/synapse/module_api/callbacks/spamchecker_callbacks.py +++ b/synapse/module_api/callbacks/spamchecker_callbacks.py @@ -788,6 +788,7 @@ async def check_registration_for_spam( return RegistrationBehaviour.ALLOW + @trace async def check_media_file_for_spam( self, file_wrapper: ReadableFileWrapper, file_info: FileInfo ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py index fa8be214ce90..8cebeb51893e 100644 --- a/synapse/storage/databases/main/media_repository.py +++ b/synapse/storage/databases/main/media_repository.py @@ -27,6 +27,7 @@ ) from synapse.api.constants import Direction +from synapse.logging.opentracing import trace from synapse.storage._base import SQLBaseStore from synapse.storage.database import ( DatabasePool, @@ -328,6 +329,7 @@ def _get_local_media_ids_txn(txn: LoggingTransaction) -> List[str]: "get_local_media_ids", _get_local_media_ids_txn ) + @trace async def store_local_media( self, media_id: str, @@ -447,6 +449,7 @@ async def get_local_media_thumbnails(self, media_id: str) -> List[Dict[str, Any] desc="get_local_media_thumbnails", ) + @trace async def store_local_thumbnail( self, media_id: str, @@ -568,6 +571,7 @@ async def get_remote_media_thumbnails( desc="get_remote_media_thumbnails", ) + @trace async def get_remote_media_thumbnail( self, origin: str, @@ -599,6 +603,7 @@ async def get_remote_media_thumbnail( desc="get_remote_media_thumbnail", ) + @trace async def store_remote_media_thumbnail( self, origin: str, From 39d131b016673bbd4d3c28095c8838b8c6dc0953 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 5 Jul 2023 17:25:00 +0100 Subject: [PATCH 129/153] Add basic read/write lock (#15782) --- changelog.d/15782.misc | 1 + synapse/_scripts/synapse_port_db.py | 9 +- synapse/storage/databases/main/lock.py | 224 ++++++++++---- .../04_read_write_locks_triggers.sql.postgres | 152 ++++++++++ .../04_read_write_locks_triggers.sql.sqlite | 119 ++++++++ tests/storage/databases/main/test_lock.py | 283 +++++++++++++++++- 6 files changed, 730 insertions(+), 58 deletions(-) create mode 100644 changelog.d/15782.misc create mode 100644 synapse/storage/schema/main/delta/78/04_read_write_locks_triggers.sql.postgres create mode 100644 synapse/storage/schema/main/delta/78/04_read_write_locks_triggers.sql.sqlite diff --git a/changelog.d/15782.misc b/changelog.d/15782.misc new file mode 100644 index 000000000000..aae493b97378 --- /dev/null +++ b/changelog.d/15782.misc @@ -0,0 +1 @@ +Add read/write style cross-worker locks. diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index e126a2e0c573..7c4aa0afa269 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -197,6 +197,11 @@ "ui_auth_sessions", "ui_auth_sessions_credentials", "ui_auth_sessions_ips", + # Ignore the worker locks table, as a) there shouldn't be any acquired locks + # after porting, and b) the circular foreign key constraints make it hard to + # port. + "worker_read_write_locks_mode", + "worker_read_write_locks", } @@ -805,7 +810,9 @@ def alter_table(txn: LoggingTransaction) -> None: ) # Map from table name to args passed to `handle_table`, i.e. a tuple # of: `postgres_size`, `table_size`, `forward_chunk`, `backward_chunk`. - tables_to_port_info_map = {r[0]: r[1:] for r in setup_res} + tables_to_port_info_map = { + r[0]: r[1:] for r in setup_res if r[0] not in IGNORED_TABLES + } # Step 5. Do the copying. # diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py index 7270ef09da9e..c89b4f7919a4 100644 --- a/synapse/storage/databases/main/lock.py +++ b/synapse/storage/databases/main/lock.py @@ -25,6 +25,7 @@ LoggingDatabaseConnection, LoggingTransaction, ) +from synapse.storage.engines import PostgresEngine from synapse.util import Clock from synapse.util.stringutils import random_string @@ -68,12 +69,20 @@ def __init__( self._reactor = hs.get_reactor() self._instance_name = hs.get_instance_id() - # A map from `(lock_name, lock_key)` to the token of any locks that we - # think we currently hold. - self._live_tokens: WeakValueDictionary[ + # A map from `(lock_name, lock_key)` to lock that we think we + # currently hold. + self._live_lock_tokens: WeakValueDictionary[ Tuple[str, str], Lock ] = WeakValueDictionary() + # A map from `(lock_name, lock_key, token)` to read/write lock that we + # think we currently hold. For a given lock_name/lock_key, there can be + # multiple read locks at a time but only one write lock (no mixing read + # and write locks at the same time). + self._live_read_write_lock_tokens: WeakValueDictionary[ + Tuple[str, str, str], Lock + ] = WeakValueDictionary() + # When we shut down we want to remove the locks. Technically this can # lead to a race, as we may drop the lock while we are still processing. # However, a) it should be a small window, b) the lock is best effort @@ -91,11 +100,13 @@ async def _on_shutdown(self) -> None: """Called when the server is shutting down""" logger.info("Dropping held locks due to shutdown") - # We need to take a copy of the tokens dict as dropping the locks will - # cause the dictionary to change. - locks = dict(self._live_tokens) + # We need to take a copy of the locks as dropping the locks will cause + # the dictionary to change. + locks = list(self._live_lock_tokens.values()) + list( + self._live_read_write_lock_tokens.values() + ) - for lock in locks.values(): + for lock in locks: await lock.release() logger.info("Dropped locks due to shutdown") @@ -122,7 +133,7 @@ async def _try_acquire_lock( """ # Check if this process has taken out a lock and if it's still valid. - lock = self._live_tokens.get((lock_name, lock_key)) + lock = self._live_lock_tokens.get((lock_name, lock_key)) if lock and await lock.is_still_valid(): return None @@ -176,61 +187,111 @@ def _try_acquire_lock_txn(txn: LoggingTransaction) -> bool: self._reactor, self._clock, self, + read_write=False, lock_name=lock_name, lock_key=lock_key, token=token, ) - self._live_tokens[(lock_name, lock_key)] = lock + self._live_lock_tokens[(lock_name, lock_key)] = lock return lock - async def _is_lock_still_valid( - self, lock_name: str, lock_key: str, token: str - ) -> bool: - """Checks whether this instance still holds the lock.""" - last_renewed_ts = await self.db_pool.simple_select_one_onecol( - table="worker_locks", - keyvalues={ - "lock_name": lock_name, - "lock_key": lock_key, - "token": token, - }, - retcol="last_renewed_ts", - allow_none=True, - desc="is_lock_still_valid", - ) - return ( - last_renewed_ts is not None - and self._clock.time_msec() - _LOCK_TIMEOUT_MS < last_renewed_ts - ) + async def try_acquire_read_write_lock( + self, + lock_name: str, + lock_key: str, + write: bool, + ) -> Optional["Lock"]: + """Try to acquire a lock for the given name/key. Will return an async + context manager if the lock is successfully acquired, which *must* be + used (otherwise the lock will leak). + """ - async def _renew_lock(self, lock_name: str, lock_key: str, token: str) -> None: - """Attempt to renew the lock if we still hold it.""" - await self.db_pool.simple_update( - table="worker_locks", - keyvalues={ - "lock_name": lock_name, - "lock_key": lock_key, - "token": token, - }, - updatevalues={"last_renewed_ts": self._clock.time_msec()}, - desc="renew_lock", - ) + now = self._clock.time_msec() + token = random_string(6) - async def _drop_lock(self, lock_name: str, lock_key: str, token: str) -> None: - """Attempt to drop the lock, if we still hold it""" - await self.db_pool.simple_delete( - table="worker_locks", - keyvalues={ - "lock_name": lock_name, - "lock_key": lock_key, - "token": token, - }, - desc="drop_lock", + def _try_acquire_read_write_lock_txn(txn: LoggingTransaction) -> None: + # We attempt to acquire the lock by inserting into + # `worker_read_write_locks` and seeing if that fails any + # constraints. If it doesn't then we have acquired the lock, + # otherwise we haven't. + # + # Before that though we clear the table of any stale locks. + + delete_sql = """ + DELETE FROM worker_read_write_locks + WHERE last_renewed_ts < ? AND lock_name = ? AND lock_key = ?; + """ + + insert_sql = """ + INSERT INTO worker_read_write_locks (lock_name, lock_key, write_lock, instance_name, token, last_renewed_ts) + VALUES (?, ?, ?, ?, ?, ?) + """ + + if isinstance(self.database_engine, PostgresEngine): + # For Postgres we can send these queries at the same time. + txn.execute( + delete_sql + ";" + insert_sql, + ( + # DELETE args + now - _LOCK_TIMEOUT_MS, + lock_name, + lock_key, + # UPSERT args + lock_name, + lock_key, + write, + self._instance_name, + token, + now, + ), + ) + else: + # For SQLite these need to be two queries. + txn.execute( + delete_sql, + ( + now - _LOCK_TIMEOUT_MS, + lock_name, + lock_key, + ), + ) + txn.execute( + insert_sql, + ( + lock_name, + lock_key, + write, + self._instance_name, + token, + now, + ), + ) + + return + + try: + await self.db_pool.runInteraction( + "try_acquire_read_write_lock", + _try_acquire_read_write_lock_txn, + ) + except self.database_engine.module.IntegrityError: + return None + + lock = Lock( + self._reactor, + self._clock, + self, + read_write=True, + lock_name=lock_name, + lock_key=lock_key, + token=token, ) - self._live_tokens.pop((lock_name, lock_key), None) + self._live_read_write_lock_tokens[(lock_name, lock_key, token)] = lock + + return lock class Lock: @@ -259,6 +320,7 @@ def __init__( reactor: IReactorCore, clock: Clock, store: LockStore, + read_write: bool, lock_name: str, lock_key: str, token: str, @@ -266,13 +328,23 @@ def __init__( self._reactor = reactor self._clock = clock self._store = store + self._read_write = read_write self._lock_name = lock_name self._lock_key = lock_key self._token = token + self._table = "worker_read_write_locks" if read_write else "worker_locks" + self._looping_call = clock.looping_call( - self._renew, _RENEWAL_INTERVAL_MS, store, lock_name, lock_key, token + self._renew, + _RENEWAL_INTERVAL_MS, + store, + clock, + read_write, + lock_name, + lock_key, + token, ) self._dropped = False @@ -281,6 +353,8 @@ def __init__( @wrap_as_background_process("Lock._renew") async def _renew( store: LockStore, + clock: Clock, + read_write: bool, lock_name: str, lock_key: str, token: str, @@ -291,12 +365,34 @@ async def _renew( don't end up with a reference to `self` in the reactor, which would stop this from being cleaned up if we dropped the context manager. """ - await store._renew_lock(lock_name, lock_key, token) + table = "worker_read_write_locks" if read_write else "worker_locks" + await store.db_pool.simple_update( + table=table, + keyvalues={ + "lock_name": lock_name, + "lock_key": lock_key, + "token": token, + }, + updatevalues={"last_renewed_ts": clock.time_msec()}, + desc="renew_lock", + ) async def is_still_valid(self) -> bool: """Check if the lock is still held by us""" - return await self._store._is_lock_still_valid( - self._lock_name, self._lock_key, self._token + last_renewed_ts = await self._store.db_pool.simple_select_one_onecol( + table=self._table, + keyvalues={ + "lock_name": self._lock_name, + "lock_key": self._lock_key, + "token": self._token, + }, + retcol="last_renewed_ts", + allow_none=True, + desc="is_lock_still_valid", + ) + return ( + last_renewed_ts is not None + and self._clock.time_msec() - _LOCK_TIMEOUT_MS < last_renewed_ts ) async def __aenter__(self) -> None: @@ -325,7 +421,23 @@ async def release(self) -> None: if self._looping_call.running: self._looping_call.stop() - await self._store._drop_lock(self._lock_name, self._lock_key, self._token) + await self._store.db_pool.simple_delete( + table=self._table, + keyvalues={ + "lock_name": self._lock_name, + "lock_key": self._lock_key, + "token": self._token, + }, + desc="drop_lock", + ) + + if self._read_write: + self._store._live_read_write_lock_tokens.pop( + (self._lock_name, self._lock_key, self._token), None + ) + else: + self._store._live_lock_tokens.pop((self._lock_name, self._lock_key), None) + self._dropped = True def __del__(self) -> None: diff --git a/synapse/storage/schema/main/delta/78/04_read_write_locks_triggers.sql.postgres b/synapse/storage/schema/main/delta/78/04_read_write_locks_triggers.sql.postgres new file mode 100644 index 000000000000..e1a41be9c948 --- /dev/null +++ b/synapse/storage/schema/main/delta/78/04_read_write_locks_triggers.sql.postgres @@ -0,0 +1,152 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +-- We implement read/write style locks by using two tables with mutual foreign +-- key constraints. Note that this implementation is vulnerable to starving +-- writers if read locks repeatedly get acquired. +-- +-- The first table (`worker_read_write_locks_mode`) indicates that a given lock +-- has either been acquired in read mode *or* write mode, but not both. This is +-- enforced by the unique constraint. Each instance of a lock being acquired is +-- associated with a random `token`. +-- +-- The second table (`worker_read_write_locks`) tracks who has currently +-- acquired a given lock. For a given lock_name/lock_key, there can be multiple +-- read locks at a time but only one write lock (no mixing read and write locks +-- at the same time). +-- +-- The foreign key from the second to first table enforces that for any given +-- lock the second table cannot have a mix of rows with read or write. +-- +-- The foreign key from the first to second table enforces that we don't have a +-- row for a lock in the first table if not in the second table. +-- +-- +-- Furthermore, we add some triggers to automatically keep the first table up to +-- date when inserting/deleting from the second table. This reduces the number +-- of round trips needed to acquire and release locks, as those operations +-- simply become an INSERT or DELETE. These triggers are added in a separate +-- delta due to database specific syntax. + + +-- A table to track whether a lock is currently acquired, and if so whether its +-- in read or write mode. +CREATE TABLE worker_read_write_locks_mode ( + lock_name TEXT NOT NULL, + lock_key TEXT NOT NULL, + -- Whether this lock is in read (false) or write (true) mode + write_lock BOOLEAN NOT NULL, + -- A token that has currently acquired the lock. We need this so that we can + -- add a foreign constraint from this table to `worker_read_write_locks`. + token TEXT NOT NULL +); + +-- Ensure that we can only have one row per lock +CREATE UNIQUE INDEX worker_read_write_locks_mode_key ON worker_read_write_locks_mode (lock_name, lock_key); +-- We need this (redundant) constraint so that we can have a foreign key +-- constraint against this table. +CREATE UNIQUE INDEX worker_read_write_locks_mode_type ON worker_read_write_locks_mode (lock_name, lock_key, write_lock); + + +-- A table to track who has currently acquired a given lock. +CREATE TABLE worker_read_write_locks ( + lock_name TEXT NOT NULL, + lock_key TEXT NOT NULL, + -- We write the instance name to ease manual debugging, we don't ever read + -- from it. + -- Note: instance names aren't guarenteed to be unique. + instance_name TEXT NOT NULL, + -- Whether the process has taken out a "read" or a "write" lock. + write_lock BOOLEAN NOT NULL, + -- A random string generated each time an instance takes out a lock. Used by + -- the instance to tell whether the lock is still held by it (e.g. in the + -- case where the process stalls for a long time the lock may time out and + -- be taken out by another instance, at which point the original instance + -- can tell it no longer holds the lock as the tokens no longer match). + token TEXT NOT NULL, + last_renewed_ts BIGINT NOT NULL, + + -- This constraint ensures that a given lock has only been acquired in read + -- xor write mode, but not both. + FOREIGN KEY (lock_name, lock_key, write_lock) REFERENCES worker_read_write_locks_mode (lock_name, lock_key, write_lock) +); + +CREATE UNIQUE INDEX worker_read_write_locks_key ON worker_read_write_locks (lock_name, lock_key, token); +-- Ensures that only one instance can acquire a lock in write mode at a time. +CREATE UNIQUE INDEX worker_read_write_locks_write ON worker_read_write_locks (lock_name, lock_key) WHERE write_lock; + + +-- Add a foreign key constraint to ensure that if a lock is in +-- `worker_read_write_locks_mode` then there must be a corresponding row in +-- `worker_read_write_locks` (i.e. we don't accidentally end up with a row in +-- `worker_read_write_locks_mode` when the lock is not currently acquired). +-- +-- We only add to PostgreSQL as SQLite does not support adding constraints +-- after table creation, and so doesn't support "circular" foreign key +-- constraints. +ALTER TABLE worker_read_write_locks_mode ADD CONSTRAINT worker_read_write_locks_mode_foreign + FOREIGN KEY (lock_name, lock_key, token) REFERENCES worker_read_write_locks(lock_name, lock_key, token) DEFERRABLE INITIALLY DEFERRED; + + +-- Add a trigger to UPSERT into `worker_read_write_locks_mode` whenever we try +-- and acquire a lock, i.e. insert into `worker_read_write_locks`, +CREATE OR REPLACE FUNCTION upsert_read_write_lock_parent() RETURNS trigger AS $$ +BEGIN + INSERT INTO worker_read_write_locks_mode (lock_name, lock_key, write_lock, token) + VALUES (NEW.lock_name, NEW.lock_key, NEW.write_lock, NEW.token) + ON CONFLICT (lock_name, lock_key) + DO NOTHING; + RETURN NEW; +END +$$ +LANGUAGE plpgsql; + +CREATE TRIGGER upsert_read_write_lock_parent_trigger BEFORE INSERT ON worker_read_write_locks + FOR EACH ROW + EXECUTE PROCEDURE upsert_read_write_lock_parent(); + + +-- Ensure that we keep `worker_read_write_locks_mode` up to date whenever a lock +-- is released (i.e. a row deleted from `worker_read_write_locks`). Either we +-- update the `worker_read_write_locks_mode.token` to match another instance +-- that has currently acquired the lock, or we delete the row if nobody has +-- currently acquired a lock. +CREATE OR REPLACE FUNCTION delete_read_write_lock_parent() RETURNS trigger AS $$ +DECLARE + new_token TEXT; +BEGIN + SELECT token INTO new_token FROM worker_read_write_locks + WHERE + lock_name = OLD.lock_name + AND lock_key = OLD.lock_key; + + IF NOT FOUND THEN + DELETE FROM worker_read_write_locks_mode + WHERE lock_name = OLD.lock_name AND lock_key = OLD.lock_key; + ELSE + UPDATE worker_read_write_locks_mode + SET token = new_token + WHERE lock_name = OLD.lock_name AND lock_key = OLD.lock_key; + END IF; + + RETURN NEW; +END +$$ +LANGUAGE plpgsql; + +CREATE TRIGGER delete_read_write_lock_parent_trigger AFTER DELETE ON worker_read_write_locks + FOR EACH ROW + EXECUTE PROCEDURE delete_read_write_lock_parent(); diff --git a/synapse/storage/schema/main/delta/78/04_read_write_locks_triggers.sql.sqlite b/synapse/storage/schema/main/delta/78/04_read_write_locks_triggers.sql.sqlite new file mode 100644 index 000000000000..be2dfbbb8a0a --- /dev/null +++ b/synapse/storage/schema/main/delta/78/04_read_write_locks_triggers.sql.sqlite @@ -0,0 +1,119 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +-- c.f. the postgres version for context. The tables and constraints are the +-- same, however they need to be defined slightly differently to work around how +-- each database handles circular foreign key references. + + + +-- A table to track whether a lock is currently acquired, and if so whether its +-- in read or write mode. +CREATE TABLE worker_read_write_locks_mode ( + lock_name TEXT NOT NULL, + lock_key TEXT NOT NULL, + -- Whether this lock is in read (false) or write (true) mode + write_lock BOOLEAN NOT NULL, + -- A token that has currently acquired the lock. We need this so that we can + -- add a foreign constraint from this table to `worker_read_write_locks`. + token TEXT NOT NULL, + -- Add a foreign key constraint to ensure that if a lock is in + -- `worker_read_write_locks_mode` then there must be a corresponding row in + -- `worker_read_write_locks` (i.e. we don't accidentally end up with a row in + -- `worker_read_write_locks_mode` when the lock is not currently acquired). + FOREIGN KEY (lock_name, lock_key, token) REFERENCES worker_read_write_locks(lock_name, lock_key, token) DEFERRABLE INITIALLY DEFERRED +); + +-- Ensure that we can only have one row per lock +CREATE UNIQUE INDEX worker_read_write_locks_mode_key ON worker_read_write_locks_mode (lock_name, lock_key); +-- We need this (redundant) constraint so that we can have a foreign key +-- constraint against this table. +CREATE UNIQUE INDEX worker_read_write_locks_mode_type ON worker_read_write_locks_mode (lock_name, lock_key, write_lock); + + +-- A table to track who has currently acquired a given lock. +CREATE TABLE worker_read_write_locks ( + lock_name TEXT NOT NULL, + lock_key TEXT NOT NULL, + -- We write the instance name to ease manual debugging, we don't ever read + -- from it. + -- Note: instance names aren't guarenteed to be unique. + instance_name TEXT NOT NULL, + -- Whether the process has taken out a "read" or a "write" lock. + write_lock BOOLEAN NOT NULL, + -- A random string generated each time an instance takes out a lock. Used by + -- the instance to tell whether the lock is still held by it (e.g. in the + -- case where the process stalls for a long time the lock may time out and + -- be taken out by another instance, at which point the original instance + -- can tell it no longer holds the lock as the tokens no longer match). + token TEXT NOT NULL, + last_renewed_ts BIGINT NOT NULL, + + -- This constraint ensures that a given lock has only been acquired in read + -- xor write mode, but not both. + FOREIGN KEY (lock_name, lock_key, write_lock) REFERENCES worker_read_write_locks_mode (lock_name, lock_key, write_lock) +); + +CREATE UNIQUE INDEX worker_read_write_locks_key ON worker_read_write_locks (lock_name, lock_key, token); +-- Ensures that only one instance can acquire a lock in write mode at a time. +CREATE UNIQUE INDEX worker_read_write_locks_write ON worker_read_write_locks (lock_name, lock_key) WHERE write_lock; + + +-- Add a trigger to UPSERT into `worker_read_write_locks_mode` whenever we try +-- and acquire a lock, i.e. insert into `worker_read_write_locks`, +CREATE TRIGGER IF NOT EXISTS upsert_read_write_lock_parent_trigger +BEFORE INSERT ON worker_read_write_locks +FOR EACH ROW +BEGIN + -- First ensure that `worker_read_write_locks_mode` doesn't have stale + -- entries in it, as on SQLite we don't have the foreign key constraint to + -- enforce this. + DELETE FROM worker_read_write_locks_mode + WHERE lock_name = NEW.lock_name AND lock_key = NEW.lock_key + AND NOT EXISTS ( + SELECT 1 FROM worker_read_write_locks + WHERE lock_name = NEW.lock_name AND lock_key = NEW.lock_key + ); + + INSERT INTO worker_read_write_locks_mode (lock_name, lock_key, write_lock, token) + VALUES (NEW.lock_name, NEW.lock_key, NEW.write_lock, NEW.token) + ON CONFLICT (lock_name, lock_key) + DO NOTHING; +END; + +-- Ensure that we keep `worker_read_write_locks_mode` up to date whenever a lock +-- is released (i.e. a row deleted from `worker_read_write_locks`). Either we +-- update the `worker_read_write_locks_mode.token` to match another instance +-- that has currently acquired the lock, or we delete the row if nobody has +-- currently acquired a lock. +CREATE TRIGGER IF NOT EXISTS delete_read_write_lock_parent_trigger +AFTER DELETE ON worker_read_write_locks +FOR EACH ROW +BEGIN + DELETE FROM worker_read_write_locks_mode + WHERE lock_name = OLD.lock_name AND lock_key = OLD.lock_key + AND NOT EXISTS ( + SELECT 1 FROM worker_read_write_locks + WHERE lock_name = OLD.lock_name AND lock_key = OLD.lock_key + ); + + UPDATE worker_read_write_locks_mode + SET token = ( + SELECT token FROM worker_read_write_locks + WHERE lock_name = OLD.lock_name AND lock_key = OLD.lock_key + ) + WHERE lock_name = OLD.lock_name AND lock_key = OLD.lock_key; +END; diff --git a/tests/storage/databases/main/test_lock.py b/tests/storage/databases/main/test_lock.py index 56cb49d9b58a..ad454f6dd81d 100644 --- a/tests/storage/databases/main/test_lock.py +++ b/tests/storage/databases/main/test_lock.py @@ -166,4 +166,285 @@ def test_shutdown(self) -> None: # Now call the shutdown code self.get_success(self.store._on_shutdown()) - self.assertEqual(self.store._live_tokens, {}) + self.assertEqual(self.store._live_lock_tokens, {}) + + +class ReadWriteLockTestCase(unittest.HomeserverTestCase): + """Test the read/write lock implementation.""" + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + + def test_acquire_write_contention(self) -> None: + """Test that we can only acquire one write lock at a time""" + # Track the number of tasks holding the lock. + # Should be at most 1. + in_lock = 0 + max_in_lock = 0 + + release_lock: "Deferred[None]" = Deferred() + + async def task() -> None: + nonlocal in_lock + nonlocal max_in_lock + + lock = await self.store.try_acquire_read_write_lock( + "name", "key", write=True + ) + if not lock: + return + + async with lock: + in_lock += 1 + max_in_lock = max(max_in_lock, in_lock) + + # Block to allow other tasks to attempt to take the lock. + await release_lock + + in_lock -= 1 + + # Start 3 tasks. + task1 = defer.ensureDeferred(task()) + task2 = defer.ensureDeferred(task()) + task3 = defer.ensureDeferred(task()) + + # Give the reactor a kick so that the database transaction returns. + self.pump() + + release_lock.callback(None) + + # Run the tasks to completion. + # To work around `Linearizer`s using a different reactor to sleep when + # contended (#12841), we call `runUntilCurrent` on + # `twisted.internet.reactor`, which is a different reactor to that used + # by the homeserver. + assert isinstance(reactor, ReactorBase) + self.get_success(task1) + reactor.runUntilCurrent() + self.get_success(task2) + reactor.runUntilCurrent() + self.get_success(task3) + + # At most one task should have held the lock at a time. + self.assertEqual(max_in_lock, 1) + + def test_acquire_multiple_reads(self) -> None: + """Test that we can acquire multiple read locks at a time""" + # Track the number of tasks holding the lock. + in_lock = 0 + max_in_lock = 0 + + release_lock: "Deferred[None]" = Deferred() + + async def task() -> None: + nonlocal in_lock + nonlocal max_in_lock + + lock = await self.store.try_acquire_read_write_lock( + "name", "key", write=False + ) + if not lock: + return + + async with lock: + in_lock += 1 + max_in_lock = max(max_in_lock, in_lock) + + # Block to allow other tasks to attempt to take the lock. + await release_lock + + in_lock -= 1 + + # Start 3 tasks. + task1 = defer.ensureDeferred(task()) + task2 = defer.ensureDeferred(task()) + task3 = defer.ensureDeferred(task()) + + # Give the reactor a kick so that the database transaction returns. + self.pump() + + release_lock.callback(None) + + # Run the tasks to completion. + # To work around `Linearizer`s using a different reactor to sleep when + # contended (#12841), we call `runUntilCurrent` on + # `twisted.internet.reactor`, which is a different reactor to that used + # by the homeserver. + assert isinstance(reactor, ReactorBase) + self.get_success(task1) + reactor.runUntilCurrent() + self.get_success(task2) + reactor.runUntilCurrent() + self.get_success(task3) + + # At most one task should have held the lock at a time. + self.assertEqual(max_in_lock, 3) + + def test_write_lock_acquired(self) -> None: + """Test that we can take out a write lock and that while we hold it + nobody else can take it out. + """ + # First to acquire this lock, so it should complete + lock = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + assert lock is not None + + # Enter the context manager + self.get_success(lock.__aenter__()) + + # Attempting to acquire the lock again fails, as both read and write. + lock2 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + self.assertIsNone(lock2) + + lock3 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=False) + ) + self.assertIsNone(lock3) + + # Calling `is_still_valid` reports true. + self.assertTrue(self.get_success(lock.is_still_valid())) + + # Drop the lock + self.get_success(lock.__aexit__(None, None, None)) + + # We can now acquire the lock again. + lock4 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + assert lock4 is not None + self.get_success(lock4.__aenter__()) + self.get_success(lock4.__aexit__(None, None, None)) + + def test_read_lock_acquired(self) -> None: + """Test that we can take out a read lock and that while we hold it + only other reads can use it. + """ + # First to acquire this lock, so it should complete + lock = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=False) + ) + assert lock is not None + + # Enter the context manager + self.get_success(lock.__aenter__()) + + # Attempting to acquire the write lock fails + lock2 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + self.assertIsNone(lock2) + + # Attempting to acquire a read lock succeeds + lock3 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=False) + ) + assert lock3 is not None + self.get_success(lock3.__aenter__()) + + # Calling `is_still_valid` reports true. + self.assertTrue(self.get_success(lock.is_still_valid())) + + # Drop the first lock + self.get_success(lock.__aexit__(None, None, None)) + + # Attempting to acquire the write lock still fails, as lock3 is still + # active. + lock4 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + self.assertIsNone(lock4) + + # Drop the still open third lock + self.get_success(lock3.__aexit__(None, None, None)) + + # We can now acquire the lock again. + lock5 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + assert lock5 is not None + self.get_success(lock5.__aenter__()) + self.get_success(lock5.__aexit__(None, None, None)) + + def test_maintain_lock(self) -> None: + """Test that we don't time out locks while they're still active (lock is + renewed in the background if the process is still alive)""" + + lock = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + assert lock is not None + + self.get_success(lock.__aenter__()) + + # Wait for ages with the lock, we should not be able to get the lock. + self.reactor.advance(5 * _LOCK_TIMEOUT_MS / 1000) + self.pump() + + lock2 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + self.assertIsNone(lock2) + + self.get_success(lock.__aexit__(None, None, None)) + + def test_timeout_lock(self) -> None: + """Test that we time out locks if they're not updated for ages""" + + lock = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + assert lock is not None + + self.get_success(lock.__aenter__()) + + # We simulate the process getting stuck by cancelling the looping call + # that keeps the lock active. + lock._looping_call.stop() + + # Wait for the lock to timeout. + self.reactor.advance(2 * _LOCK_TIMEOUT_MS / 1000) + + lock2 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + self.assertIsNotNone(lock2) + + self.assertFalse(self.get_success(lock.is_still_valid())) + + def test_drop(self) -> None: + """Test that dropping the context manager means we stop renewing the lock""" + + lock = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + self.assertIsNotNone(lock) + + del lock + + # Wait for the lock to timeout. + self.reactor.advance(2 * _LOCK_TIMEOUT_MS / 1000) + + lock2 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + self.assertIsNotNone(lock2) + + def test_shutdown(self) -> None: + """Test that shutting down Synapse releases the locks""" + # Acquire two locks + lock = self.get_success( + self.store.try_acquire_read_write_lock("name", "key", write=True) + ) + self.assertIsNotNone(lock) + lock2 = self.get_success( + self.store.try_acquire_read_write_lock("name", "key2", write=True) + ) + self.assertIsNotNone(lock2) + + # Now call the shutdown code + self.get_success(self.store._on_shutdown()) + + self.assertEqual(self.store._live_read_write_lock_tokens, {}) From 561d06b481176f61ed12f5a4723b127ff8624662 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 5 Jul 2023 18:45:42 -0500 Subject: [PATCH 130/153] Remove support for Python 3.7 (#15851) Fix https://github.com/matrix-org/synapse/issues/15836 --- .ci/scripts/calculate_jobs.py | 11 ++--- .github/workflows/release-artifacts.yml | 2 +- .github/workflows/tests.yml | 6 +-- changelog.d/15851.removal | 1 + docker/Dockerfile-dhvirtualenv | 50 +++++++++++----------- docs/setup/installation.md | 2 +- docs/upgrade.md | 12 ++++++ poetry.lock | 55 +------------------------ pyproject.toml | 5 +-- synapse/__init__.py | 4 +- synapse/util/check_dependencies.py | 5 +-- tests/metrics/test_metrics.py | 10 +---- 12 files changed, 55 insertions(+), 108 deletions(-) create mode 100644 changelog.d/15851.removal diff --git a/.ci/scripts/calculate_jobs.py b/.ci/scripts/calculate_jobs.py index b41ec0b6e2dc..c2c18b48e3d7 100755 --- a/.ci/scripts/calculate_jobs.py +++ b/.ci/scripts/calculate_jobs.py @@ -29,11 +29,12 @@ def set_output(key: str, value: str): # First calculate the various trial jobs. # -# For each type of test we only run on Py3.7 on PRs +# For PRs, we only run each type of test with the oldest Python version supported (which +# is Python 3.8 right now) trial_sqlite_tests = [ { - "python-version": "3.7", + "python-version": "3.8", "database": "sqlite", "extras": "all", } @@ -46,13 +47,13 @@ def set_output(key: str, value: str): "database": "sqlite", "extras": "all", } - for version in ("3.8", "3.9", "3.10", "3.11") + for version in ("3.9", "3.10", "3.11") ) trial_postgres_tests = [ { - "python-version": "3.7", + "python-version": "3.8", "database": "postgres", "postgres-version": "11", "extras": "all", @@ -71,7 +72,7 @@ def set_output(key: str, value: str): trial_no_extra_tests = [ { - "python-version": "3.7", + "python-version": "3.8", "database": "sqlite", "extras": "", } diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 09812004017f..f331f67d9728 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -144,7 +144,7 @@ jobs: - name: Only build a single wheel on PR if: startsWith(github.ref, 'refs/pull/') - run: echo "CIBW_BUILD="cp37-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV + run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV - name: Build wheels run: python -m cibuildwheel --output-dir wheelhouse diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 6c2298499777..0a01e8298468 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -320,7 +320,7 @@ jobs: - uses: actions/setup-python@v4 with: - python-version: '3.7' + python-version: '3.8' - name: Prepare old deps if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true' @@ -362,7 +362,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["pypy-3.7"] + python-version: ["pypy-3.8"] extras: ["all"] steps: @@ -477,7 +477,7 @@ jobs: strategy: matrix: include: - - python-version: "3.7" + - python-version: "3.8" postgres-version: "11" - python-version: "3.11" diff --git a/changelog.d/15851.removal b/changelog.d/15851.removal new file mode 100644 index 000000000000..e08df4c1366a --- /dev/null +++ b/changelog.d/15851.removal @@ -0,0 +1 @@ +Remove support for Python 3.7. diff --git a/docker/Dockerfile-dhvirtualenv b/docker/Dockerfile-dhvirtualenv index 861129ebc225..b7679924c24a 100644 --- a/docker/Dockerfile-dhvirtualenv +++ b/docker/Dockerfile-dhvirtualenv @@ -28,12 +28,12 @@ FROM docker.io/library/${distro} as builder RUN apt-get update -qq -o Acquire::Languages=none RUN env DEBIAN_FRONTEND=noninteractive apt-get install \ - -yqq --no-install-recommends \ - build-essential \ - ca-certificates \ - devscripts \ - equivs \ - wget + -yqq --no-install-recommends \ + build-essential \ + ca-certificates \ + devscripts \ + equivs \ + wget # fetch and unpack the package # We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with @@ -62,33 +62,29 @@ FROM docker.io/library/${distro} ARG distro="" ENV distro ${distro} -# Python < 3.7 assumes LANG="C" means ASCII-only and throws on printing unicode -# http://bugs.python.org/issue19846 -ENV LANG C.UTF-8 - # Install the build dependencies # # NB: keep this list in sync with the list of build-deps in debian/control # TODO: it would be nice to do that automatically. RUN apt-get update -qq -o Acquire::Languages=none \ && env DEBIAN_FRONTEND=noninteractive apt-get install \ - -yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \ - build-essential \ - curl \ - debhelper \ - devscripts \ - libsystemd-dev \ - lsb-release \ - pkg-config \ - python3-dev \ - python3-pip \ - python3-setuptools \ - python3-venv \ - sqlite3 \ - libpq-dev \ - libicu-dev \ - pkg-config \ - xmlsec1 + -yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \ + build-essential \ + curl \ + debhelper \ + devscripts \ + libsystemd-dev \ + lsb-release \ + pkg-config \ + python3-dev \ + python3-pip \ + python3-setuptools \ + python3-venv \ + sqlite3 \ + libpq-dev \ + libicu-dev \ + pkg-config \ + xmlsec1 # Install rust and ensure it's in the PATH ENV RUSTUP_HOME=/rust diff --git a/docs/setup/installation.md b/docs/setup/installation.md index 86e506a3e231..4ca8c6b69786 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -200,7 +200,7 @@ When following this route please make sure that the [Platform-specific prerequis System requirements: - POSIX-compliant system (tested on Linux & OS X) -- Python 3.7 or later, up to Python 3.11. +- Python 3.8 or later, up to Python 3.11. - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org If building on an uncommon architecture for which pre-built wheels are diff --git a/docs/upgrade.md b/docs/upgrade.md index 4cd38b13932a..384f4010b445 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -87,6 +87,18 @@ process, for example: wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb ``` + +# Upgrading to v1.88.0 + +## Minimum supported Python version + +The minimum supported Python version has been increased from v3.7 to v3.8. +You will need Python 3.8 to run Synapse v1.88.0 (due out July 18th, 2023). + +If you use current versions of the Matrix.org-distributed Debian +packages or Docker images, no action is required. + + # Upgrading to v1.86.0 ## Minimum supported Rust version diff --git a/poetry.lock b/poetry.lock index 9aaf5c7de724..c62337053e70 100644 --- a/poetry.lock +++ b/poetry.lock @@ -41,9 +41,6 @@ files = [ {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] dev = ["attrs[docs,tests]", "pre-commit"] @@ -190,7 +187,6 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -412,7 +408,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "click-default-group" @@ -601,7 +596,6 @@ files = [ [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} [[package]] name = "hiredis" @@ -847,7 +841,6 @@ files = [ ] [package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] @@ -987,11 +980,9 @@ files = [ [package.dependencies] attrs = ">=17.4.0" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -1199,7 +1190,6 @@ files = [ [package.dependencies] mdurl = ">=0.1,<1.0" -typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [package.extras] benchmarking = ["psutil", "pytest", "pytest-benchmark"] @@ -1283,7 +1273,6 @@ files = [ [package.dependencies] attrs = "*" -importlib-metadata = {version = ">=1.4", markers = "python_version < \"3.8\""} [package.extras] dev = ["aiounittest", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "mypy (==0.910)", "tox", "twine (==4.0.1)", "twisted"] @@ -1459,7 +1448,6 @@ files = [ [package.dependencies] mypy-extensions = ">=0.4.3" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} typing-extensions = ">=3.10" [package.extras] @@ -1721,9 +1709,6 @@ files = [ {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} - [package.extras] docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] @@ -2060,7 +2045,6 @@ files = [ [package.dependencies] cryptography = ">=3.1" defusedxml = "*" -importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} importlib-resources = {version = "*", markers = "python_version < \"3.9\""} pyopenssl = "*" python-dateutil = "*" @@ -2410,9 +2394,7 @@ files = [ [package.dependencies] canonicaljson = ">=1.0.0" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} pynacl = ">=0.3.0" -typing-extensions = {version = ">=3.5", markers = "python_version < \"3.8\""} unpaddedbase64 = ">=1.0.1" [package.extras] @@ -2852,39 +2834,6 @@ files = [ six = "*" twisted = "*" -[[package]] -name = "typed-ast" -version = "1.5.4" -description = "a fork of Python 2 and 3 ast modules with type comment support" -optional = false -python-versions = ">=3.6" -files = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, -] - [[package]] name = "types-bleach" version = "6.0.0.3" @@ -3293,5 +3242,5 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" -python-versions = "^3.7.1" -content-hash = "7f31754a1009d7b6c9a1bd7221a0b243ffd510f362c28f0da417aaac16757a87" +python-versions = "^3.8.0" +content-hash = "0832381cc9e7065e8d95c810d732aa031b98d55cf188719989b12d841993e62e" diff --git a/pyproject.toml b/pyproject.toml index 192a07756b6e..a6e3a935a9c2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -147,7 +147,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main" update_synapse_database = "synapse._scripts.update_synapse_database:main" [tool.poetry.dependencies] -python = "^3.7.1" +python = "^3.8.0" # Mandatory Dependencies # ---------------------- @@ -203,9 +203,6 @@ ijson = ">=3.1.4" matrix-common = "^1.3.0" # We need packaging.requirements.Requirement, added in 16.1. packaging = ">=16.1" -# At the time of writing, we only use functions from the version `importlib.metadata` -# which shipped in Python 3.8. This corresponds to version 1.4 of the backport. -importlib_metadata = { version = ">=1.4", python = "<3.8" } # This is the most recent version of Pydantic with available on common distros. # We are currently incompatible with >=2.0.0: (https://github.com/matrix-org/synapse/issues/15858) pydantic = "^1.7.4" diff --git a/synapse/__init__.py b/synapse/__init__.py index b97ee59f15b6..6c1801862b61 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -25,8 +25,8 @@ from synapse.util.stringutils import strtobool # Check that we're not running on an unsupported Python version. -if sys.version_info < (3, 7): - print("Synapse requires Python 3.7 or above.") +if sys.version_info < (3, 8): + print("Synapse requires Python 3.8 or above.") sys.exit(1) # Allow using the asyncio reactor via env var. diff --git a/synapse/util/check_dependencies.py b/synapse/util/check_dependencies.py index 1c0fde4966e7..114130a08fe2 100644 --- a/synapse/util/check_dependencies.py +++ b/synapse/util/check_dependencies.py @@ -21,16 +21,13 @@ """ import logging +from importlib import metadata from typing import Iterable, NamedTuple, Optional from packaging.requirements import Requirement DISTRIBUTION_NAME = "matrix-synapse" -try: - from importlib import metadata -except ImportError: - import importlib_metadata as metadata # type: ignore[no-redef] __all__ = ["check_requirements"] diff --git a/tests/metrics/test_metrics.py b/tests/metrics/test_metrics.py index 7c3656d049f1..d14876826ce9 100644 --- a/tests/metrics/test_metrics.py +++ b/tests/metrics/test_metrics.py @@ -12,19 +12,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from importlib import metadata from typing import Dict, Tuple - -from typing_extensions import Protocol - -try: - from importlib import metadata -except ImportError: - import importlib_metadata as metadata # type: ignore[no-redef] - from unittest.mock import patch from pkg_resources import parse_version from prometheus_client.core import Sample +from typing_extensions import Protocol from synapse.app._base import _set_prometheus_client_use_created_metrics from synapse.metrics import REGISTRY, InFlightGauge, generate_latest From b07b14b494ae1dd564b4c44f844c9a9545b3d08a Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 5 Jul 2023 18:53:55 -0500 Subject: [PATCH 131/153] Federation outbound proxy (#15773) Allow configuring the set of workers to proxy outbound federation traffic through (`outbound_federation_restricted_to`). This is useful when you have a worker setup with `federation_sender` instances responsible for sending outbound federation requests and want to make sure *all* outbound federation traffic goes through those instances. Before this change, the generic workers would still contact federation themselves for things like profile lookups, backfill, etc. This PR allows you to set more strict access controls/firewall for all workers and only allow the `federation_sender`'s to contact the outside world. The original code is from @erikjohnston's branches which I've gotten in-shape to merge. --- changelog.d/15773.feature | 1 + .../configuration/config_documentation.md | 31 ++- docs/workers.md | 20 ++ synapse/app/_base.py | 2 + synapse/app/generic_worker.py | 1 + synapse/app/homeserver.py | 1 + synapse/config/workers.py | 40 ++- synapse/http/client.py | 7 +- synapse/http/matrixfederationclient.py | 132 +++++++++- synapse/http/proxy.py | 249 ++++++++++++++++++ synapse/http/proxyagent.py | 79 +++++- synapse/http/server.py | 55 ++-- synapse/http/site.py | 26 +- tests/app/test_openid_listener.py | 8 +- tests/handlers/test_device.py | 3 +- tests/handlers/test_federation.py | 2 +- tests/handlers/test_presence.py | 1 - tests/handlers/test_typing.py | 10 + tests/http/test_matrixfederationclient.py | 189 ++++++++++++- tests/http/test_proxy.py | 53 ++++ tests/replication/_base.py | 3 +- .../test_federation_sender_shard.py | 22 +- tests/rest/client/test_presence.py | 1 - tests/rest/client/test_rooms.py | 2 - tests/storage/test_e2e_room_keys.py | 2 +- tests/storage/test_purge.py | 2 +- tests/storage/test_rollback_worker.py | 4 +- tests/test_server.py | 33 ++- tests/unittest.py | 1 + 29 files changed, 890 insertions(+), 90 deletions(-) create mode 100644 changelog.d/15773.feature create mode 100644 synapse/http/proxy.py create mode 100644 tests/http/test_proxy.py diff --git a/changelog.d/15773.feature b/changelog.d/15773.feature new file mode 100644 index 000000000000..0d77fae2dc68 --- /dev/null +++ b/changelog.d/15773.feature @@ -0,0 +1 @@ +Allow configuring the set of workers to proxy outbound federation traffic through via `outbound_federation_restricted_to`. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 26d7c7900cbe..89a92c4682be 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -3930,13 +3930,14 @@ federation_sender_instances: --- ### `instance_map` -When using workers this should be a map from [`worker_name`](#worker_name) to the -HTTP replication listener of the worker, if configured, and to the main process. -Each worker declared under [`stream_writers`](../../workers.md#stream-writers) needs -a HTTP replication listener, and that listener should be included in the `instance_map`. -The main process also needs an entry on the `instance_map`, and it should be listed under -`main` **if even one other worker exists**. Ensure the port matches with what is declared -inside the `listener` block for a `replication` listener. +When using workers this should be a map from [`worker_name`](#worker_name) to the HTTP +replication listener of the worker, if configured, and to the main process. Each worker +declared under [`stream_writers`](../../workers.md#stream-writers) and +[`outbound_federation_restricted_to`](#outbound_federation_restricted_to) needs a HTTP replication listener, and that +listener should be included in the `instance_map`. The main process also needs an entry +on the `instance_map`, and it should be listed under `main` **if even one other worker +exists**. Ensure the port matches with what is declared inside the `listener` block for +a `replication` listener. Example configuration: @@ -3966,6 +3967,22 @@ stream_writers: typing: worker1 ``` --- +### `outbound_federation_restricted_to` + +When using workers, you can restrict outbound federation traffic to only go through a +specific subset of workers. Any worker specified here must also be in the +[`instance_map`](#instance_map). + +```yaml +outbound_federation_restricted_to: + - federation_sender1 + - federation_sender2 +``` + +Also see the [worker +documentation](../../workers.md#restrict-outbound-federation-traffic-to-a-specific-set-of-workers) +for more info. +--- ### `run_background_tasks_on` The [worker](../../workers.md#background-tasks) that is used to run diff --git a/docs/workers.md b/docs/workers.md index 735128762a47..303e0f0e7a5a 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -531,6 +531,26 @@ the stream writer for the `presence` stream: ^/_matrix/client/(api/v1|r0|v3|unstable)/presence/ +#### Restrict outbound federation traffic to a specific set of workers + +The `outbound_federation_restricted_to` configuration is useful to make sure outbound +federation traffic only goes through a specified subset of workers. This allows you to +set more strict access controls (like a firewall) for all workers and only allow the +`federation_sender`'s to contact the outside world. + +```yaml +instance_map: + main: + host: localhost + port: 8030 + federation_sender1: + host: localhost + port: 8034 + +outbound_federation_restricted_to: + - federation_sender1 +``` + #### Background tasks There is also support for moving background tasks to a separate diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 936b1b043037..938ab40f278e 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -386,6 +386,7 @@ def listen_unix( def listen_http( + hs: "HomeServer", listener_config: ListenerConfig, root_resource: Resource, version_string: str, @@ -406,6 +407,7 @@ def listen_http( version_string, max_request_body_size=max_request_body_size, reactor=reactor, + federation_agent=hs.get_federation_http_client().agent, ) if isinstance(listener_config, TCPListenerConfig): diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 7406c3948cb1..dc79efcc142f 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -221,6 +221,7 @@ def _listen_http(self, listener_config: ListenerConfig) -> None: root_resource = create_resource_tree(resources, OptionsResource()) _base.listen_http( + self, listener_config, root_resource, self.version_string, diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 84236ac299e0..f188c7265a50 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -139,6 +139,7 @@ def _listener_http( root_resource = OptionsResource() ports = listen_http( + self, listener_config, create_resource_tree(resources, root_resource), self.version_string, diff --git a/synapse/config/workers.py b/synapse/config/workers.py index 38e13dd7b5e0..0b9789160cc0 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -15,7 +15,7 @@ import argparse import logging -from typing import Any, Dict, List, Union +from typing import Any, Dict, List, Optional, Union import attr from pydantic import BaseModel, Extra, StrictBool, StrictInt, StrictStr @@ -148,6 +148,27 @@ class WriterLocations: ) +@attr.s(auto_attribs=True) +class OutboundFederationRestrictedTo: + """Whether we limit outbound federation to a certain set of instances. + + Attributes: + instances: optional list of instances that can make outbound federation + requests. If None then all instances can make federation requests. + locations: list of instance locations to connect to proxy via. + """ + + instances: Optional[List[str]] + locations: List[InstanceLocationConfig] = attr.Factory(list) + + def __contains__(self, instance: str) -> bool: + # It feels a bit dirty to return `True` if `instances` is `None`, but it makes + # sense in downstream usage in the sense that if + # `outbound_federation_restricted_to` is not configured, then any instance can + # talk to federation (no restrictions so always return `True`). + return self.instances is None or instance in self.instances + + class WorkerConfig(Config): """The workers are processes run separately to the main synapse process. They have their own pid_file and listener configuration. They use the @@ -357,6 +378,23 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: new_option_name="update_user_directory_from_worker", ) + outbound_federation_restricted_to = config.get( + "outbound_federation_restricted_to", None + ) + self.outbound_federation_restricted_to = OutboundFederationRestrictedTo( + outbound_federation_restricted_to + ) + if outbound_federation_restricted_to: + for instance in outbound_federation_restricted_to: + if instance not in self.instance_map: + raise ConfigError( + "Instance %r is configured in 'outbound_federation_restricted_to' but does not appear in `instance_map` config." + % (instance,) + ) + self.outbound_federation_restricted_to.locations.append( + self.instance_map[instance] + ) + def _should_this_worker_perform_duty( self, config: Dict[str, Any], diff --git a/synapse/http/client.py b/synapse/http/client.py index 09ea93e10d0b..ca2cdbc6e243 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -1037,7 +1037,12 @@ def connectionLost(self, reason: Failure = connectionDone) -> None: if reason.check(ResponseDone): self.deferred.callback(self.length) elif reason.check(PotentialDataLoss): - # stolen from https://github.com/twisted/treq/pull/49/files + # This applies to requests which don't set `Content-Length` or a + # `Transfer-Encoding` in the response because in this case the end of the + # response is indicated by the connection being closed, an event which may + # also be due to a transient network problem or other error. But since this + # behavior is expected of some servers (like YouTube), let's ignore it. + # Stolen from https://github.com/twisted/treq/pull/49/files # http://twistedmatrix.com/trac/ticket/4840 self.deferred.callback(self.length) else: diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index cc4e258b0fba..b00396fdc71b 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -50,7 +50,7 @@ from twisted.internet.task import Cooperator from twisted.web.client import ResponseFailed from twisted.web.http_headers import Headers -from twisted.web.iweb import IBodyProducer, IResponse +from twisted.web.iweb import IAgent, IBodyProducer, IResponse import synapse.metrics import synapse.util.retryutils @@ -72,6 +72,7 @@ read_body_with_max_size, ) from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent +from synapse.http.proxyagent import ProxyAgent from synapse.http.types import QueryParams from synapse.logging import opentracing from synapse.logging.context import make_deferred_yieldable, run_in_background @@ -393,17 +394,32 @@ def __init__( if hs.config.server.user_agent_suffix: user_agent = "%s %s" % (user_agent, hs.config.server.user_agent_suffix) - federation_agent = MatrixFederationAgent( - self.reactor, - tls_client_options_factory, - user_agent.encode("ascii"), - hs.config.server.federation_ip_range_allowlist, - hs.config.server.federation_ip_range_blocklist, + outbound_federation_restricted_to = ( + hs.config.worker.outbound_federation_restricted_to ) + if hs.get_instance_name() in outbound_federation_restricted_to: + # Talk to federation directly + federation_agent: IAgent = MatrixFederationAgent( + self.reactor, + tls_client_options_factory, + user_agent.encode("ascii"), + hs.config.server.federation_ip_range_allowlist, + hs.config.server.federation_ip_range_blocklist, + ) + else: + # We need to talk to federation via the proxy via one of the configured + # locations + federation_proxies = outbound_federation_restricted_to.locations + federation_agent = ProxyAgent( + self.reactor, + self.reactor, + tls_client_options_factory, + federation_proxies=federation_proxies, + ) # Use a BlocklistingAgentWrapper to prevent circumventing the IP # blocking via IP literals in server names - self.agent = BlocklistingAgentWrapper( + self.agent: IAgent = BlocklistingAgentWrapper( federation_agent, ip_blocklist=hs.config.server.federation_ip_range_blocklist, ) @@ -412,7 +428,6 @@ def __init__( self._store = hs.get_datastores().main self.version_string_bytes = hs.version_string.encode("ascii") self.default_timeout_seconds = hs.config.federation.client_timeout_ms / 1000 - self.max_long_retry_delay_seconds = ( hs.config.federation.max_long_retry_delay_ms / 1000 ) @@ -1131,6 +1146,101 @@ async def get_json( Succeeds when we get a 2xx HTTP response. The result will be the decoded JSON body. + Raises: + HttpResponseException: If we get an HTTP response code >= 300 + (except 429). + NotRetryingDestination: If we are not yet ready to retry this + server. + FederationDeniedError: If this destination is not on our + federation whitelist + RequestSendFailed: If there were problems connecting to the + remote, due to e.g. DNS failures, connection timeouts etc. + """ + json_dict, _ = await self.get_json_with_headers( + destination=destination, + path=path, + args=args, + retry_on_dns_fail=retry_on_dns_fail, + timeout=timeout, + ignore_backoff=ignore_backoff, + try_trailing_slash_on_400=try_trailing_slash_on_400, + parser=parser, + ) + return json_dict + + @overload + async def get_json_with_headers( + self, + destination: str, + path: str, + args: Optional[QueryParams] = None, + retry_on_dns_fail: bool = True, + timeout: Optional[int] = None, + ignore_backoff: bool = False, + try_trailing_slash_on_400: bool = False, + parser: Literal[None] = None, + ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: + ... + + @overload + async def get_json_with_headers( + self, + destination: str, + path: str, + args: Optional[QueryParams] = ..., + retry_on_dns_fail: bool = ..., + timeout: Optional[int] = ..., + ignore_backoff: bool = ..., + try_trailing_slash_on_400: bool = ..., + parser: ByteParser[T] = ..., + ) -> Tuple[T, Dict[bytes, List[bytes]]]: + ... + + async def get_json_with_headers( + self, + destination: str, + path: str, + args: Optional[QueryParams] = None, + retry_on_dns_fail: bool = True, + timeout: Optional[int] = None, + ignore_backoff: bool = False, + try_trailing_slash_on_400: bool = False, + parser: Optional[ByteParser[T]] = None, + ) -> Tuple[Union[JsonDict, T], Dict[bytes, List[bytes]]]: + """GETs some json from the given host homeserver and path + + Args: + destination: The remote server to send the HTTP request to. + + path: The HTTP path. + + args: A dictionary used to create query strings, defaults to + None. + + retry_on_dns_fail: true if the request should be retried on DNS failures + + timeout: number of milliseconds to wait for the response. + self._default_timeout (60s) by default. + + Note that we may make several attempts to send the request; this + timeout applies to the time spent waiting for response headers for + *each* attempt (including connection time) as well as the time spent + reading the response body after a 200 response. + + ignore_backoff: true to ignore the historical backoff data + and try the request anyway. + + try_trailing_slash_on_400: True if on a 400 M_UNRECOGNIZED + response we should try appending a trailing slash to the end of + the request. Workaround for #3622 in Synapse <= v0.99.3. + + parser: The parser to use to decode the response. Defaults to + parsing as JSON. + + Returns: + Succeeds when we get a 2xx HTTP response. The result will be a tuple of the + decoded JSON body and a dict of the response headers. + Raises: HttpResponseException: If we get an HTTP response code >= 300 (except 429). @@ -1156,6 +1266,8 @@ async def get_json( timeout=timeout, ) + headers = dict(response.headers.getAllRawHeaders()) + if timeout is not None: _sec_timeout = timeout / 1000 else: @@ -1173,7 +1285,7 @@ async def get_json( parser=parser, ) - return body + return body, headers async def delete_json( self, diff --git a/synapse/http/proxy.py b/synapse/http/proxy.py new file mode 100644 index 000000000000..0874d67760ae --- /dev/null +++ b/synapse/http/proxy.py @@ -0,0 +1,249 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import logging +import urllib.parse +from typing import TYPE_CHECKING, Any, Optional, Set, Tuple, cast + +from twisted.internet import protocol +from twisted.internet.interfaces import ITCPTransport +from twisted.internet.protocol import connectionDone +from twisted.python import failure +from twisted.python.failure import Failure +from twisted.web.client import ResponseDone +from twisted.web.http_headers import Headers +from twisted.web.iweb import IAgent, IResponse +from twisted.web.resource import IResource +from twisted.web.server import Site + +from synapse.api.errors import Codes +from synapse.http import QuieterFileBodyProducer +from synapse.http.server import _AsyncResource +from synapse.logging.context import make_deferred_yieldable, run_in_background +from synapse.types import ISynapseReactor +from synapse.util.async_helpers import timeout_deferred + +if TYPE_CHECKING: + from synapse.http.site import SynapseRequest + +logger = logging.getLogger(__name__) + +# "Hop-by-hop" headers (as opposed to "end-to-end" headers) as defined by RFC2616 +# section 13.5.1 and referenced in RFC9110 section 7.6.1. These are meant to only be +# consumed by the immediate recipient and not be forwarded on. +HOP_BY_HOP_HEADERS = { + "Connection", + "Keep-Alive", + "Proxy-Authenticate", + "Proxy-Authorization", + "TE", + "Trailers", + "Transfer-Encoding", + "Upgrade", +} + + +def parse_connection_header_value( + connection_header_value: Optional[bytes], +) -> Set[str]: + """ + Parse the `Connection` header to determine which headers we should not be copied + over from the remote response. + + As defined by RFC2616 section 14.10 and RFC9110 section 7.6.1 + + Example: `Connection: close, X-Foo, X-Bar` will return `{"Close", "X-Foo", "X-Bar"}` + + Even though "close" is a special directive, let's just treat it as just another + header for simplicity. If people want to check for this directive, they can simply + check for `"Close" in headers`. + + Args: + connection_header_value: The value of the `Connection` header. + + Returns: + The set of header names that should not be copied over from the remote response. + The keys are capitalized in canonical capitalization. + """ + headers = Headers() + extra_headers_to_remove: Set[str] = set() + if connection_header_value: + extra_headers_to_remove = { + headers._canonicalNameCaps(connection_option.strip()).decode("ascii") + for connection_option in connection_header_value.split(b",") + } + + return extra_headers_to_remove + + +class ProxyResource(_AsyncResource): + """ + A stub resource that proxies any requests with a `matrix-federation://` scheme + through the given `federation_agent` to the remote homeserver and ferries back the + info. + """ + + isLeaf = True + + def __init__(self, reactor: ISynapseReactor, federation_agent: IAgent): + super().__init__(True) + + self.reactor = reactor + self.agent = federation_agent + + async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]: + uri = urllib.parse.urlparse(request.uri) + assert uri.scheme == b"matrix-federation" + + headers = Headers() + for header_name in (b"User-Agent", b"Authorization", b"Content-Type"): + header_value = request.getHeader(header_name) + if header_value: + headers.addRawHeader(header_name, header_value) + + request_deferred = run_in_background( + self.agent.request, + request.method, + request.uri, + headers=headers, + bodyProducer=QuieterFileBodyProducer(request.content), + ) + request_deferred = timeout_deferred( + request_deferred, + # This should be set longer than the timeout in `MatrixFederationHttpClient` + # so that it has enough time to complete and pass us the data before we give + # up. + timeout=90, + reactor=self.reactor, + ) + + response = await make_deferred_yieldable(request_deferred) + + return response.code, response + + def _send_response( + self, + request: "SynapseRequest", + code: int, + response_object: Any, + ) -> None: + response = cast(IResponse, response_object) + response_headers = cast(Headers, response.headers) + + request.setResponseCode(code) + + # The `Connection` header also defines which headers should not be copied over. + connection_header = response_headers.getRawHeaders(b"connection") + extra_headers_to_remove = parse_connection_header_value( + connection_header[0] if connection_header else None + ) + + # Copy headers. + for k, v in response_headers.getAllRawHeaders(): + # Do not copy over any hop-by-hop headers. These are meant to only be + # consumed by the immediate recipient and not be forwarded on. + header_key = k.decode("ascii") + if ( + header_key in HOP_BY_HOP_HEADERS + or header_key in extra_headers_to_remove + ): + continue + + request.responseHeaders.setRawHeaders(k, v) + + response.deliverBody(_ProxyResponseBody(request)) + + def _send_error_response( + self, + f: failure.Failure, + request: "SynapseRequest", + ) -> None: + request.setResponseCode(502) + request.setHeader(b"Content-Type", b"application/json") + request.write( + ( + json.dumps( + { + "errcode": Codes.UNKNOWN, + "err": "ProxyResource: Error when proxying request: %s %s -> %s" + % ( + request.method.decode("ascii"), + request.uri.decode("ascii"), + f, + ), + } + ) + ).encode() + ) + request.finish() + + +class _ProxyResponseBody(protocol.Protocol): + """ + A protocol that proxies the given remote response data back out to the given local + request. + """ + + transport: Optional[ITCPTransport] = None + + def __init__(self, request: "SynapseRequest") -> None: + self._request = request + + def dataReceived(self, data: bytes) -> None: + # Avoid sending response data to the local request that already disconnected + if self._request._disconnected and self.transport is not None: + # Close the connection (forcefully) since all the data will get + # discarded anyway. + self.transport.abortConnection() + return + + self._request.write(data) + + def connectionLost(self, reason: Failure = connectionDone) -> None: + # If the local request is already finished (successfully or failed), don't + # worry about sending anything back. + if self._request.finished: + return + + if reason.check(ResponseDone): + self._request.finish() + else: + # Abort the underlying request since our remote request also failed. + self._request.transport.abortConnection() + + +class ProxySite(Site): + """ + Proxies any requests with a `matrix-federation://` scheme through the given + `federation_agent`. Otherwise, behaves like a normal `Site`. + """ + + def __init__( + self, + resource: IResource, + reactor: ISynapseReactor, + federation_agent: IAgent, + ): + super().__init__(resource, reactor=reactor) + + self._proxy_resource = ProxyResource(reactor, federation_agent) + + def getResourceFor(self, request: "SynapseRequest") -> IResource: + uri = urllib.parse.urlparse(request.uri) + if uri.scheme == b"matrix-federation": + return self._proxy_resource + + return super().getResourceFor(request) diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py index 7bdc4acae7da..1fa3adbef20c 100644 --- a/synapse/http/proxyagent.py +++ b/synapse/http/proxyagent.py @@ -12,8 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +import random import re -from typing import Any, Dict, Optional, Tuple +from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple from urllib.parse import urlparse from urllib.request import ( # type: ignore[attr-defined] getproxies_environment, @@ -24,7 +25,12 @@ from twisted.internet import defer from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS -from twisted.internet.interfaces import IReactorCore, IStreamClientEndpoint +from twisted.internet.interfaces import ( + IProtocol, + IProtocolFactory, + IReactorCore, + IStreamClientEndpoint, +) from twisted.python.failure import Failure from twisted.web.client import ( URI, @@ -36,8 +42,10 @@ from twisted.web.http_headers import Headers from twisted.web.iweb import IAgent, IBodyProducer, IPolicyForHTTPS, IResponse +from synapse.config.workers import InstanceLocationConfig from synapse.http import redact_uri from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint, ProxyCredentials +from synapse.logging.context import run_in_background logger = logging.getLogger(__name__) @@ -74,6 +82,10 @@ class ProxyAgent(_AgentBase): use_proxy: Whether proxy settings should be discovered and used from conventional environment variables. + federation_proxies: An optional list of locations to proxy outbound federation + traffic through (only requests that use the `matrix-federation://` scheme + will be proxied). + Raises: ValueError if use_proxy is set and the environment variables contain an invalid proxy specification. @@ -89,6 +101,7 @@ def __init__( bindAddress: Optional[bytes] = None, pool: Optional[HTTPConnectionPool] = None, use_proxy: bool = False, + federation_proxies: Collection[InstanceLocationConfig] = (), ): contextFactory = contextFactory or BrowserLikePolicyForHTTPS() @@ -127,6 +140,27 @@ def __init__( self._policy_for_https = contextFactory self._reactor = reactor + self._federation_proxy_endpoint: Optional[IStreamClientEndpoint] = None + if federation_proxies: + endpoints = [] + for federation_proxy in federation_proxies: + endpoint = HostnameEndpoint( + self.proxy_reactor, + federation_proxy.host, + federation_proxy.port, + ) + + if federation_proxy.tls: + tls_connection_creator = self._policy_for_https.creatorForNetloc( + federation_proxy.host, + federation_proxy.port, + ) + endpoint = wrapClientTLS(tls_connection_creator, endpoint) + + endpoints.append(endpoint) + + self._federation_proxy_endpoint = _ProxyEndpoints(endpoints) + def request( self, method: bytes, @@ -214,6 +248,14 @@ def request( parsed_uri.port, self.https_proxy_creds, ) + elif ( + parsed_uri.scheme == b"matrix-federation" + and self._federation_proxy_endpoint + ): + # Cache *all* connections under the same key, since we are only + # connecting to a single destination, the proxy: + endpoint = self._federation_proxy_endpoint + request_path = uri else: # not using a proxy endpoint = HostnameEndpoint( @@ -233,6 +275,11 @@ def request( endpoint = wrapClientTLS(tls_connection_creator, endpoint) elif parsed_uri.scheme == b"http": pass + elif ( + parsed_uri.scheme == b"matrix-federation" + and self._federation_proxy_endpoint + ): + pass else: return defer.fail( Failure( @@ -337,3 +384,31 @@ def parse_proxy( credentials = ProxyCredentials(b"".join([url.username, b":", url.password])) return url.scheme, url.hostname, url.port or default_port, credentials + + +@implementer(IStreamClientEndpoint) +class _ProxyEndpoints: + """An endpoint that randomly iterates through a given list of endpoints at + each connection attempt. + """ + + def __init__(self, endpoints: Sequence[IStreamClientEndpoint]) -> None: + assert endpoints + self._endpoints = endpoints + + def connect( + self, protocol_factory: IProtocolFactory + ) -> "defer.Deferred[IProtocol]": + """Implements IStreamClientEndpoint interface""" + + return run_in_background(self._do_connect, protocol_factory) + + async def _do_connect(self, protocol_factory: IProtocolFactory) -> IProtocol: + failures: List[Failure] = [] + for endpoint in random.sample(self._endpoints, k=len(self._endpoints)): + try: + return await endpoint.connect(protocol_factory) + except Exception: + failures.append(Failure()) + + failures.pop().raiseException() diff --git a/synapse/http/server.py b/synapse/http/server.py index 933172c87327..ff3153a9d949 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -18,6 +18,7 @@ import logging import types import urllib +import urllib.parse from http import HTTPStatus from http.client import FOUND from inspect import isawaitable @@ -65,7 +66,6 @@ UnrecognizedRequestError, ) from synapse.config.homeserver import HomeServerConfig -from synapse.http.site import SynapseRequest from synapse.logging.context import defer_to_thread, preserve_fn, run_in_background from synapse.logging.opentracing import active_span, start_active_span, trace_servlet from synapse.util import json_encoder @@ -76,6 +76,7 @@ if TYPE_CHECKING: import opentracing + from synapse.http.site import SynapseRequest from synapse.server import HomeServer logger = logging.getLogger(__name__) @@ -102,7 +103,7 @@ def return_json_error( - f: failure.Failure, request: SynapseRequest, config: Optional[HomeServerConfig] + f: failure.Failure, request: "SynapseRequest", config: Optional[HomeServerConfig] ) -> None: """Sends a JSON error response to clients.""" @@ -220,8 +221,8 @@ def return_html_error( def wrap_async_request_handler( - h: Callable[["_AsyncResource", SynapseRequest], Awaitable[None]] -) -> Callable[["_AsyncResource", SynapseRequest], "defer.Deferred[None]"]: + h: Callable[["_AsyncResource", "SynapseRequest"], Awaitable[None]] +) -> Callable[["_AsyncResource", "SynapseRequest"], "defer.Deferred[None]"]: """Wraps an async request handler so that it calls request.processing. This helps ensure that work done by the request handler after the request is completed @@ -235,7 +236,7 @@ def wrap_async_request_handler( """ async def wrapped_async_request_handler( - self: "_AsyncResource", request: SynapseRequest + self: "_AsyncResource", request: "SynapseRequest" ) -> None: with request.processing(): await h(self, request) @@ -300,7 +301,7 @@ def __init__(self, extract_context: bool = False): self._extract_context = extract_context - def render(self, request: SynapseRequest) -> int: + def render(self, request: "SynapseRequest") -> int: """This gets called by twisted every time someone sends us a request.""" request.render_deferred = defer.ensureDeferred( self._async_render_wrapper(request) @@ -308,7 +309,7 @@ def render(self, request: SynapseRequest) -> int: return NOT_DONE_YET @wrap_async_request_handler - async def _async_render_wrapper(self, request: SynapseRequest) -> None: + async def _async_render_wrapper(self, request: "SynapseRequest") -> None: """This is a wrapper that delegates to `_async_render` and handles exceptions, return values, metrics, etc. """ @@ -326,9 +327,15 @@ async def _async_render_wrapper(self, request: SynapseRequest) -> None: # of our stack, and thus gives us a sensible stack # trace. f = failure.Failure() + logger.exception( + "Error handling request", + exc_info=(f.type, f.value, f.getTracebackObject()), + ) self._send_error_response(f, request) - async def _async_render(self, request: SynapseRequest) -> Optional[Tuple[int, Any]]: + async def _async_render( + self, request: "SynapseRequest" + ) -> Optional[Tuple[int, Any]]: """Delegates to `_async_render_` methods, or returns a 400 if no appropriate method exists. Can be overridden in sub classes for different routing. @@ -358,7 +365,7 @@ async def _async_render(self, request: SynapseRequest) -> Optional[Tuple[int, An @abc.abstractmethod def _send_response( self, - request: SynapseRequest, + request: "SynapseRequest", code: int, response_object: Any, ) -> None: @@ -368,7 +375,7 @@ def _send_response( def _send_error_response( self, f: failure.Failure, - request: SynapseRequest, + request: "SynapseRequest", ) -> None: raise NotImplementedError() @@ -384,7 +391,7 @@ def __init__(self, canonical_json: bool = False, extract_context: bool = False): def _send_response( self, - request: SynapseRequest, + request: "SynapseRequest", code: int, response_object: Any, ) -> None: @@ -401,7 +408,7 @@ def _send_response( def _send_error_response( self, f: failure.Failure, - request: SynapseRequest, + request: "SynapseRequest", ) -> None: """Implements _AsyncResource._send_error_response""" return_json_error(f, request, None) @@ -473,7 +480,7 @@ def register_paths( ) def _get_handler_for_request( - self, request: SynapseRequest + self, request: "SynapseRequest" ) -> Tuple[ServletCallback, str, Dict[str, str]]: """Finds a callback method to handle the given request. @@ -503,7 +510,7 @@ def _get_handler_for_request( # Huh. No one wanted to handle that? Fiiiiiine. raise UnrecognizedRequestError(code=404) - async def _async_render(self, request: SynapseRequest) -> Tuple[int, Any]: + async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]: callback, servlet_classname, group_dict = self._get_handler_for_request(request) request.is_render_cancellable = is_function_cancellable(callback) @@ -535,7 +542,7 @@ async def _async_render(self, request: SynapseRequest) -> Tuple[int, Any]: def _send_error_response( self, f: failure.Failure, - request: SynapseRequest, + request: "SynapseRequest", ) -> None: """Implements _AsyncResource._send_error_response""" return_json_error(f, request, self.hs.config) @@ -551,7 +558,7 @@ class DirectServeHtmlResource(_AsyncResource): def _send_response( self, - request: SynapseRequest, + request: "SynapseRequest", code: int, response_object: Any, ) -> None: @@ -565,7 +572,7 @@ def _send_response( def _send_error_response( self, f: failure.Failure, - request: SynapseRequest, + request: "SynapseRequest", ) -> None: """Implements _AsyncResource._send_error_response""" return_html_error(f, request, self.ERROR_TEMPLATE) @@ -592,7 +599,7 @@ class UnrecognizedRequestResource(resource.Resource): errcode of M_UNRECOGNIZED. """ - def render(self, request: SynapseRequest) -> int: + def render(self, request: "SynapseRequest") -> int: f = failure.Failure(UnrecognizedRequestError(code=404)) return_json_error(f, request, None) # A response has already been sent but Twisted requires either NOT_DONE_YET @@ -622,7 +629,7 @@ def getChild(self, name: str, request: Request) -> resource.Resource: class OptionsResource(resource.Resource): """Responds to OPTION requests for itself and all children.""" - def render_OPTIONS(self, request: SynapseRequest) -> bytes: + def render_OPTIONS(self, request: "SynapseRequest") -> bytes: request.setResponseCode(204) request.setHeader(b"Content-Length", b"0") @@ -737,7 +744,7 @@ def _encode_json_bytes(json_object: object) -> bytes: def respond_with_json( - request: SynapseRequest, + request: "SynapseRequest", code: int, json_object: Any, send_cors: bool = False, @@ -787,7 +794,7 @@ def respond_with_json( def respond_with_json_bytes( - request: SynapseRequest, + request: "SynapseRequest", code: int, json_bytes: bytes, send_cors: bool = False, @@ -825,7 +832,7 @@ def respond_with_json_bytes( async def _async_write_json_to_request_in_thread( - request: SynapseRequest, + request: "SynapseRequest", json_encoder: Callable[[Any], bytes], json_object: Any, ) -> None: @@ -883,7 +890,7 @@ def _write_bytes_to_request(request: Request, bytes_to_write: bytes) -> None: _ByteProducer(request, bytes_generator) -def set_cors_headers(request: SynapseRequest) -> None: +def set_cors_headers(request: "SynapseRequest") -> None: """Set the CORS headers so that javascript running in a web browsers can use this API @@ -981,7 +988,7 @@ def set_clickjacking_protection_headers(request: Request) -> None: def respond_with_redirect( - request: SynapseRequest, url: bytes, statusCode: int = FOUND, cors: bool = False + request: "SynapseRequest", url: bytes, statusCode: int = FOUND, cors: bool = False ) -> None: """ Write a 302 (or other specified status code) response to the request, if it is still alive. diff --git a/synapse/http/site.py b/synapse/http/site.py index 5b5a7c1e598f..0ee259834559 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -21,25 +21,28 @@ from twisted.internet.address import UNIXAddress from twisted.internet.defer import Deferred -from twisted.internet.interfaces import IAddress, IReactorTime +from twisted.internet.interfaces import IAddress from twisted.python.failure import Failure from twisted.web.http import HTTPChannel +from twisted.web.iweb import IAgent from twisted.web.resource import IResource, Resource -from twisted.web.server import Request, Site +from twisted.web.server import Request from synapse.config.server import ListenerConfig from synapse.http import get_request_user_agent, redact_uri +from synapse.http.proxy import ProxySite from synapse.http.request_metrics import RequestMetrics, requests_counter from synapse.logging.context import ( ContextRequest, LoggingContext, PreserveLoggingContext, ) -from synapse.types import Requester +from synapse.types import ISynapseReactor, Requester if TYPE_CHECKING: import opentracing + logger = logging.getLogger(__name__) _next_request_seq = 0 @@ -102,7 +105,7 @@ def __init__( # A boolean indicating whether `render_deferred` should be cancelled if the # client disconnects early. Expected to be set by the coroutine started by # `Resource.render`, if rendering is asynchronous. - self.is_render_cancellable = False + self.is_render_cancellable: bool = False global _next_request_seq self.request_seq = _next_request_seq @@ -601,7 +604,7 @@ class _XForwardedForAddress: host: str -class SynapseSite(Site): +class SynapseSite(ProxySite): """ Synapse-specific twisted http Site @@ -623,7 +626,8 @@ def __init__( resource: IResource, server_version_string: str, max_request_body_size: int, - reactor: IReactorTime, + reactor: ISynapseReactor, + federation_agent: IAgent, ): """ @@ -638,7 +642,11 @@ def __init__( dropping the connection reactor: reactor to be used to manage connection timeouts """ - Site.__init__(self, resource, reactor=reactor) + super().__init__( + resource=resource, + reactor=reactor, + federation_agent=federation_agent, + ) self.site_tag = site_tag self.reactor = reactor @@ -649,7 +657,9 @@ def __init__( request_id_header = config.http_options.request_id_header - self.experimental_cors_msc3886 = config.http_options.experimental_cors_msc3886 + self.experimental_cors_msc3886: bool = ( + config.http_options.experimental_cors_msc3886 + ) def request_factory(channel: HTTPChannel, queued: bool) -> Request: return request_class( diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py index 5a965f233b05..21c530974083 100644 --- a/tests/app/test_openid_listener.py +++ b/tests/app/test_openid_listener.py @@ -31,9 +31,7 @@ class FederationReaderOpenIDListenerTests(HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver( - federation_http_client=None, homeserver_to_use=GenericWorkerServer - ) + hs = self.setup_test_homeserver(homeserver_to_use=GenericWorkerServer) return hs def default_config(self) -> JsonDict: @@ -91,9 +89,7 @@ def test_openid_listener(self, names: List[str], expectation: str) -> None: @patch("synapse.app.homeserver.KeyResource", new=Mock()) class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver( - federation_http_client=None, homeserver_to_use=SynapseHomeServer - ) + hs = self.setup_test_homeserver(homeserver_to_use=SynapseHomeServer) return hs @parameterized.expand( diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index ee48f9e546e8..66215af2b890 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -41,7 +41,6 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.appservice_api = mock.Mock() hs = self.setup_test_homeserver( "server", - federation_http_client=None, application_service_api=self.appservice_api, ) handler = hs.get_device_handler() @@ -401,7 +400,7 @@ def test_on_federation_query_user_devices_appservice(self) -> None: class DehydrationTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver("server", federation_http_client=None) + hs = self.setup_test_homeserver("server") handler = hs.get_device_handler() assert isinstance(handler, DeviceHandler) self.handler = handler diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index bf0862ed541c..5f11d5df11ad 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -57,7 +57,7 @@ class FederationTestCase(unittest.FederatingHomeserverTestCase): ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver(federation_http_client=None) + hs = self.setup_test_homeserver() self.handler = hs.get_federation_handler() self.store = hs.get_datastores().main return hs diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 19f5322317a1..fd66d573d221 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -993,7 +993,6 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs = self.setup_test_homeserver( "server", - federation_http_client=None, federation_sender=Mock(spec=FederationSender), ) return hs diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 94518a7196b4..5da1d95f0b22 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -17,6 +17,8 @@ from typing import Dict, List, Set from unittest.mock import ANY, Mock, call +from netaddr import IPSet + from twisted.test.proto_helpers import MemoryReactor from twisted.web.resource import Resource @@ -24,6 +26,7 @@ from synapse.api.errors import AuthError from synapse.federation.transport.server import TransportLayerServer from synapse.handlers.typing import TypingWriterHandler +from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent from synapse.server import HomeServer from synapse.types import JsonDict, Requester, UserID, create_requester from synapse.util import Clock @@ -76,6 +79,13 @@ def make_homeserver( # we mock out the federation client too self.mock_federation_client = Mock(spec=["put_json"]) self.mock_federation_client.put_json.return_value = make_awaitable((200, "OK")) + self.mock_federation_client.agent = MatrixFederationAgent( + reactor, + tls_client_options_factory=None, + user_agent=b"SynapseInTrialTest/0.0.0", + ip_allowlist=None, + ip_blocklist=IPSet(), + ) # the tests assume that we are starting at unix time 1000 reactor.pump((1000,)) diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index b5f4a60fe500..a8b9737d1fb7 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -11,8 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Generator -from unittest.mock import Mock +from typing import Any, Dict, Generator +from unittest.mock import ANY, Mock, create_autospec from netaddr import IPSet from parameterized import parameterized @@ -21,10 +21,11 @@ from twisted.internet.defer import Deferred, TimeoutError from twisted.internet.error import ConnectingCancelledError, DNSLookupError from twisted.test.proto_helpers import MemoryReactor, StringTransport -from twisted.web.client import ResponseNeverReceived +from twisted.web.client import Agent, ResponseNeverReceived from twisted.web.http import HTTPChannel +from twisted.web.http_headers import Headers -from synapse.api.errors import RequestSendFailed +from synapse.api.errors import HttpResponseException, RequestSendFailed from synapse.http.matrixfederationclient import ( ByteParser, MatrixFederationHttpClient, @@ -39,7 +40,9 @@ from synapse.server import HomeServer from synapse.util import Clock +from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.server import FakeTransport +from tests.test_utils import FakeResponse from tests.unittest import HomeserverTestCase, override_config @@ -658,3 +661,181 @@ def test_configurable_retry_and_delay_values(self) -> None: self.assertEqual(self.cl.max_short_retry_delay_seconds, 7) self.assertEqual(self.cl.max_long_retries, 20) self.assertEqual(self.cl.max_short_retries, 5) + + +class FederationClientProxyTests(BaseMultiWorkerStreamTestCase): + def default_config(self) -> Dict[str, Any]: + conf = super().default_config() + conf["instance_map"] = { + "main": {"host": "testserv", "port": 8765}, + "federation_sender": {"host": "testserv", "port": 1001}, + } + return conf + + @override_config({"outbound_federation_restricted_to": ["federation_sender"]}) + def test_proxy_requests_through_federation_sender_worker(self) -> None: + """ + Test that all outbound federation requests go through the `federation_sender` + worker + """ + # Mock out the `MatrixFederationHttpClient` of the `federation_sender` instance + # so we can act like some remote server responding to requests + mock_client_on_federation_sender = Mock() + mock_agent_on_federation_sender = create_autospec(Agent, spec_set=True) + mock_client_on_federation_sender.agent = mock_agent_on_federation_sender + + # Create the `federation_sender` worker + self.federation_sender = self.make_worker_hs( + "synapse.app.generic_worker", + {"worker_name": "federation_sender"}, + federation_http_client=mock_client_on_federation_sender, + ) + + # Fake `remoteserv:8008` responding to requests + mock_agent_on_federation_sender.request.side_effect = ( + lambda *args, **kwargs: defer.succeed( + FakeResponse.json( + payload={ + "foo": "bar", + } + ) + ) + ) + + # This federation request from the main process should be proxied through the + # `federation_sender` worker off to the remote server + test_request_from_main_process_d = defer.ensureDeferred( + self.hs.get_federation_http_client().get_json("remoteserv:8008", "foo/bar") + ) + + # Pump the reactor so our deferred goes through the motions + self.pump() + + # Make sure that the request was proxied through the `federation_sender` worker + mock_agent_on_federation_sender.request.assert_called_once_with( + b"GET", + b"matrix-federation://remoteserv:8008/foo/bar", + headers=ANY, + bodyProducer=ANY, + ) + + # Make sure the response is as expected back on the main worker + res = self.successResultOf(test_request_from_main_process_d) + self.assertEqual(res, {"foo": "bar"}) + + @override_config({"outbound_federation_restricted_to": ["federation_sender"]}) + def test_proxy_request_with_network_error_through_federation_sender_worker( + self, + ) -> None: + """ + Test that when the outbound federation request fails with a network related + error, a sensible error makes its way back to the main process. + """ + # Mock out the `MatrixFederationHttpClient` of the `federation_sender` instance + # so we can act like some remote server responding to requests + mock_client_on_federation_sender = Mock() + mock_agent_on_federation_sender = create_autospec(Agent, spec_set=True) + mock_client_on_federation_sender.agent = mock_agent_on_federation_sender + + # Create the `federation_sender` worker + self.federation_sender = self.make_worker_hs( + "synapse.app.generic_worker", + {"worker_name": "federation_sender"}, + federation_http_client=mock_client_on_federation_sender, + ) + + # Fake `remoteserv:8008` responding to requests + mock_agent_on_federation_sender.request.side_effect = ( + lambda *args, **kwargs: defer.fail(ResponseNeverReceived("fake error")) + ) + + # This federation request from the main process should be proxied through the + # `federation_sender` worker off to the remote server + test_request_from_main_process_d = defer.ensureDeferred( + self.hs.get_federation_http_client().get_json("remoteserv:8008", "foo/bar") + ) + + # Pump the reactor so our deferred goes through the motions. We pump with 10 + # seconds (0.1 * 100) so the `MatrixFederationHttpClient` runs out of retries + # and finally passes along the error response. + self.pump(0.1) + + # Make sure that the request was proxied through the `federation_sender` worker + mock_agent_on_federation_sender.request.assert_called_with( + b"GET", + b"matrix-federation://remoteserv:8008/foo/bar", + headers=ANY, + bodyProducer=ANY, + ) + + # Make sure we get some sort of error back on the main worker + failure_res = self.failureResultOf(test_request_from_main_process_d) + self.assertIsInstance(failure_res.value, RequestSendFailed) + self.assertIsInstance(failure_res.value.inner_exception, HttpResponseException) + + @override_config({"outbound_federation_restricted_to": ["federation_sender"]}) + def test_proxy_requests_and_discards_hop_by_hop_headers(self) -> None: + """ + Test to make sure hop-by-hop headers and addional headers defined in the + `Connection` header are discarded when proxying requests + """ + # Mock out the `MatrixFederationHttpClient` of the `federation_sender` instance + # so we can act like some remote server responding to requests + mock_client_on_federation_sender = Mock() + mock_agent_on_federation_sender = create_autospec(Agent, spec_set=True) + mock_client_on_federation_sender.agent = mock_agent_on_federation_sender + + # Create the `federation_sender` worker + self.federation_sender = self.make_worker_hs( + "synapse.app.generic_worker", + {"worker_name": "federation_sender"}, + federation_http_client=mock_client_on_federation_sender, + ) + + # Fake `remoteserv:8008` responding to requests + mock_agent_on_federation_sender.request.side_effect = lambda *args, **kwargs: defer.succeed( + FakeResponse( + code=200, + body=b'{"foo": "bar"}', + headers=Headers( + { + "Content-Type": ["application/json"], + "Connection": ["close, X-Foo, X-Bar"], + # Should be removed because it's defined in the `Connection` header + "X-Foo": ["foo"], + "X-Bar": ["bar"], + # Should be removed because it's a hop-by-hop header + "Proxy-Authorization": "abcdef", + } + ), + ) + ) + + # This federation request from the main process should be proxied through the + # `federation_sender` worker off to the remote server + test_request_from_main_process_d = defer.ensureDeferred( + self.hs.get_federation_http_client().get_json_with_headers( + "remoteserv:8008", "foo/bar" + ) + ) + + # Pump the reactor so our deferred goes through the motions + self.pump() + + # Make sure that the request was proxied through the `federation_sender` worker + mock_agent_on_federation_sender.request.assert_called_once_with( + b"GET", + b"matrix-federation://remoteserv:8008/foo/bar", + headers=ANY, + bodyProducer=ANY, + ) + + res, headers = self.successResultOf(test_request_from_main_process_d) + header_names = set(headers.keys()) + + # Make sure the response does not include the hop-by-hop headers + self.assertNotIn(b"X-Foo", header_names) + self.assertNotIn(b"X-Bar", header_names) + self.assertNotIn(b"Proxy-Authorization", header_names) + # Make sure the response is as expected back on the main worker + self.assertEqual(res, {"foo": "bar"}) diff --git a/tests/http/test_proxy.py b/tests/http/test_proxy.py new file mode 100644 index 000000000000..0dc9ba8e0582 --- /dev/null +++ b/tests/http/test_proxy.py @@ -0,0 +1,53 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Set + +from parameterized import parameterized + +from synapse.http.proxy import parse_connection_header_value + +from tests.unittest import TestCase + + +class ProxyTests(TestCase): + @parameterized.expand( + [ + [b"close, X-Foo, X-Bar", {"Close", "X-Foo", "X-Bar"}], + # No whitespace + [b"close,X-Foo,X-Bar", {"Close", "X-Foo", "X-Bar"}], + # More whitespace + [b"close, X-Foo, X-Bar", {"Close", "X-Foo", "X-Bar"}], + # "close" directive in not the first position + [b"X-Foo, X-Bar, close", {"X-Foo", "X-Bar", "Close"}], + # Normalizes header capitalization + [b"keep-alive, x-fOo, x-bAr", {"Keep-Alive", "X-Foo", "X-Bar"}], + # Handles header names with whitespace + [ + b"keep-alive, x foo, x bar", + {"Keep-Alive", "X foo", "X bar"}, + ], + ] + ) + def test_parse_connection_header_value( + self, + connection_header_value: bytes, + expected_extra_headers_to_remove: Set[str], + ) -> None: + """ + Tests that the connection header value is parsed correctly + """ + self.assertEqual( + expected_extra_headers_to_remove, + parse_connection_header_value(connection_header_value), + ) diff --git a/tests/replication/_base.py b/tests/replication/_base.py index eb9b1f1cd9be..96badc46b04d 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -69,10 +69,10 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # Make a new HomeServer object for the worker self.reactor.lookups["testserv"] = "1.2.3.4" self.worker_hs = self.setup_test_homeserver( - federation_http_client=None, homeserver_to_use=GenericWorkerServer, config=self._get_worker_hs_config(), reactor=self.reactor, + federation_http_client=None, ) # Since we use sqlite in memory databases we need to make sure the @@ -380,6 +380,7 @@ def make_worker_hs( server_version_string="1", max_request_body_size=8192, reactor=self.reactor, + federation_agent=worker_hs.get_federation_http_client().agent, ) worker_hs.get_replication_command_handler().start_replication(worker_hs) diff --git a/tests/replication/test_federation_sender_shard.py b/tests/replication/test_federation_sender_shard.py index 08703206a9c9..a324b4d31dde 100644 --- a/tests/replication/test_federation_sender_shard.py +++ b/tests/replication/test_federation_sender_shard.py @@ -14,14 +14,18 @@ import logging from unittest.mock import Mock +from netaddr import IPSet + from synapse.api.constants import EventTypes, Membership from synapse.events.builder import EventBuilderFactory from synapse.handlers.typing import TypingWriterHandler +from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent from synapse.rest.admin import register_servlets_for_client_rest_resource from synapse.rest.client import login, room from synapse.types import UserID, create_requester from tests.replication._base import BaseMultiWorkerStreamTestCase +from tests.server import get_clock from tests.test_utils import make_awaitable logger = logging.getLogger(__name__) @@ -41,13 +45,25 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase): room.register_servlets, ] + def setUp(self) -> None: + super().setUp() + + reactor, _ = get_clock() + self.matrix_federation_agent = MatrixFederationAgent( + reactor, + tls_client_options_factory=None, + user_agent=b"SynapseInTrialTest/0.0.0", + ip_allowlist=None, + ip_blocklist=IPSet(), + ) + def test_send_event_single_sender(self) -> None: """Test that using a single federation sender worker correctly sends a new event. """ mock_client = Mock(spec=["put_json"]) mock_client.put_json.return_value = make_awaitable({}) - + mock_client.agent = self.matrix_federation_agent self.make_worker_hs( "synapse.app.generic_worker", { @@ -78,6 +94,7 @@ def test_send_event_sharded(self) -> None: """ mock_client1 = Mock(spec=["put_json"]) mock_client1.put_json.return_value = make_awaitable({}) + mock_client1.agent = self.matrix_federation_agent self.make_worker_hs( "synapse.app.generic_worker", { @@ -92,6 +109,7 @@ def test_send_event_sharded(self) -> None: mock_client2 = Mock(spec=["put_json"]) mock_client2.put_json.return_value = make_awaitable({}) + mock_client2.agent = self.matrix_federation_agent self.make_worker_hs( "synapse.app.generic_worker", { @@ -145,6 +163,7 @@ def test_send_typing_sharded(self) -> None: """ mock_client1 = Mock(spec=["put_json"]) mock_client1.put_json.return_value = make_awaitable({}) + mock_client1.agent = self.matrix_federation_agent self.make_worker_hs( "synapse.app.generic_worker", { @@ -159,6 +178,7 @@ def test_send_typing_sharded(self) -> None: mock_client2 = Mock(spec=["put_json"]) mock_client2.put_json.return_value = make_awaitable({}) + mock_client2.agent = self.matrix_federation_agent self.make_worker_hs( "synapse.app.generic_worker", { diff --git a/tests/rest/client/test_presence.py b/tests/rest/client/test_presence.py index dcbb125a3bba..e12098102b96 100644 --- a/tests/rest/client/test_presence.py +++ b/tests/rest/client/test_presence.py @@ -40,7 +40,6 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs = self.setup_test_homeserver( "red", - federation_http_client=None, federation_client=Mock(), presence_handler=self.presence_handler, ) diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index f1b4e1ad2fc1..d013e75d55d7 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -67,8 +67,6 @@ class RoomBase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver( "red", - federation_http_client=None, - federation_client=Mock(), ) self.hs.get_federation_handler = Mock() # type: ignore[assignment] diff --git a/tests/storage/test_e2e_room_keys.py b/tests/storage/test_e2e_room_keys.py index 9cb326d90a87..f6df31aba4ca 100644 --- a/tests/storage/test_e2e_room_keys.py +++ b/tests/storage/test_e2e_room_keys.py @@ -31,7 +31,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver("server", federation_http_client=None) + hs = self.setup_test_homeserver("server") self.store = hs.get_datastores().main return hs diff --git a/tests/storage/test_purge.py b/tests/storage/test_purge.py index 857e2caf2e24..02826731679e 100644 --- a/tests/storage/test_purge.py +++ b/tests/storage/test_purge.py @@ -27,7 +27,7 @@ class PurgeTests(HomeserverTestCase): servlets = [room.register_servlets] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver("server", federation_http_client=None) + hs = self.setup_test_homeserver("server") return hs def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: diff --git a/tests/storage/test_rollback_worker.py b/tests/storage/test_rollback_worker.py index 6861d3a6c9e5..809c9f175d42 100644 --- a/tests/storage/test_rollback_worker.py +++ b/tests/storage/test_rollback_worker.py @@ -45,9 +45,7 @@ def fake_listdir(filepath: str) -> List[str]: class WorkerSchemaTests(HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver( - federation_http_client=None, homeserver_to_use=GenericWorkerServer - ) + hs = self.setup_test_homeserver(homeserver_to_use=GenericWorkerServer) return hs def default_config(self) -> JsonDict: diff --git a/tests/test_server.py b/tests/test_server.py index e266c06a2cbf..fe5afebdcda4 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -38,7 +38,7 @@ from tests.server import ( FakeChannel, FakeSite, - ThreadedMemoryReactorClock, + get_clock, make_request, setup_test_homeserver, ) @@ -46,12 +46,11 @@ class JsonResourceTests(unittest.TestCase): def setUp(self) -> None: - self.reactor = ThreadedMemoryReactorClock() - self.hs_clock = Clock(self.reactor) + reactor, clock = get_clock() + self.reactor = reactor self.homeserver = setup_test_homeserver( self.addCleanup, - federation_http_client=None, - clock=self.hs_clock, + clock=clock, reactor=self.reactor, ) @@ -209,7 +208,13 @@ def _callback( class OptionsResourceTests(unittest.TestCase): def setUp(self) -> None: - self.reactor = ThreadedMemoryReactorClock() + reactor, clock = get_clock() + self.reactor = reactor + self.homeserver = setup_test_homeserver( + self.addCleanup, + clock=clock, + reactor=self.reactor, + ) class DummyResource(Resource): isLeaf = True @@ -242,6 +247,7 @@ def _make_request( "1.0", max_request_body_size=4096, reactor=self.reactor, + federation_agent=self.homeserver.get_federation_http_client().agent, ) # render the request and return the channel @@ -344,7 +350,8 @@ async def _async_render_GET(self, request: SynapseRequest) -> None: await self.callback(request) def setUp(self) -> None: - self.reactor = ThreadedMemoryReactorClock() + reactor, _ = get_clock() + self.reactor = reactor def test_good_response(self) -> None: async def callback(request: SynapseRequest) -> None: @@ -462,9 +469,9 @@ class DirectServeJsonResourceCancellationTests(unittest.TestCase): """Tests for `DirectServeJsonResource` cancellation.""" def setUp(self) -> None: - self.reactor = ThreadedMemoryReactorClock() - self.clock = Clock(self.reactor) - self.resource = CancellableDirectServeJsonResource(self.clock) + reactor, clock = get_clock() + self.reactor = reactor + self.resource = CancellableDirectServeJsonResource(clock) self.site = FakeSite(self.resource, self.reactor) def test_cancellable_disconnect(self) -> None: @@ -496,9 +503,9 @@ class DirectServeHtmlResourceCancellationTests(unittest.TestCase): """Tests for `DirectServeHtmlResource` cancellation.""" def setUp(self) -> None: - self.reactor = ThreadedMemoryReactorClock() - self.clock = Clock(self.reactor) - self.resource = CancellableDirectServeHtmlResource(self.clock) + reactor, clock = get_clock() + self.reactor = reactor + self.resource = CancellableDirectServeHtmlResource(clock) self.site = FakeSite(self.resource, self.reactor) def test_cancellable_disconnect(self) -> None: diff --git a/tests/unittest.py b/tests/unittest.py index c73195b32b00..334a95a9175d 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -358,6 +358,7 @@ def setUp(self) -> None: server_version_string="1", max_request_body_size=4096, reactor=self.reactor, + federation_agent=self.hs.get_federation_http_client().agent, ) from tests.rest.client.utils import RestHelper From f19dd39dfc04d3533198d8a52fab6ed49e6d5066 Mon Sep 17 00:00:00 2001 From: sarthak shah <75999816+sarthakshah65@users.noreply.github.com> Date: Thu, 6 Jul 2023 20:58:09 +0530 Subject: [PATCH 132/153] Update link to the clients webpage, fix #15825 (#15874) --- changelog.d/15874.misc | 1 + synapse/static/index.html | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15874.misc diff --git a/changelog.d/15874.misc b/changelog.d/15874.misc new file mode 100644 index 000000000000..0d434bef5d92 --- /dev/null +++ b/changelog.d/15874.misc @@ -0,0 +1 @@ +Updated the link in home page from https://matrix.org/docs/projects/try-matrix-now.html#clients to https://matrix.org/ecosystem/clients/. diff --git a/synapse/static/index.html b/synapse/static/index.html index bf46df90978c..297a7877f325 100644 --- a/synapse/static/index.html +++ b/synapse/static/index.html @@ -48,7 +48,7 @@

It works! Synapse is running

Your Synapse server is listening on this port and is ready for messages.

-

To use this server you'll need a Matrix client. +

To use this server you'll need a Matrix client.

Welcome to the Matrix universe :)


From 2481b7dfa41c1c890346136f04344a4e1660ef32 Mon Sep 17 00:00:00 2001 From: Jason Little Date: Fri, 7 Jul 2023 02:45:25 -0500 Subject: [PATCH 133/153] Remove `worker_replication_*` deprecated settings, with helpful errors on startup (#15860) Co-authored-by: reivilibre --- changelog.d/15860.removal | 1 + docs/upgrade.md | 15 ++++++ .../configuration/config_documentation.md | 45 ----------------- docs/workers.md | 3 -- synapse/config/workers.py | 50 +++++++++++-------- tests/app/test_homeserver_start.py | 6 +-- tests/config/test_workers.py | 27 +--------- 7 files changed, 49 insertions(+), 98 deletions(-) create mode 100644 changelog.d/15860.removal diff --git a/changelog.d/15860.removal b/changelog.d/15860.removal new file mode 100644 index 000000000000..1993bf02997b --- /dev/null +++ b/changelog.d/15860.removal @@ -0,0 +1 @@ +Remove deprecated `worker_replication_host`, `worker_replication_http_port` and `worker_replication_http_tls` configuration options. diff --git a/docs/upgrade.md b/docs/upgrade.md index 384f4010b445..b94d13c4daac 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -98,6 +98,21 @@ You will need Python 3.8 to run Synapse v1.88.0 (due out July 18th, 2023). If you use current versions of the Matrix.org-distributed Debian packages or Docker images, no action is required. +## Removal of `worker_replication_*` settings + +As mentioned previously in [Upgrading to v1.84.0](#upgrading-to-v1840), the following deprecated settings +are being removed in this release of Synapse: + +* [`worker_replication_host`](https://matrix-org.github.io/synapse/v1.86/usage/configuration/config_documentation.html#worker_replication_host) +* [`worker_replication_http_port`](https://matrix-org.github.io/synapse/v1.86/usage/configuration/config_documentation.html#worker_replication_http_port) +* [`worker_replication_http_tls`](https://matrix-org.github.io/synapse/v1.86/usage/configuration/config_documentation.html#worker_replication_http_tls) + +Please ensure that you have migrated to using `main` on your shared configuration's `instance_map` +(or create one if necessary). This is required if you have ***any*** workers at all; +administrators of single-process (monolith) installations don't need to do anything. + +For an illustrative example, please see [Upgrading to v1.84.0](#upgrading-to-v1840) below. + # Upgrading to v1.86.0 diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 89a92c4682be..04e8390ffedf 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -4107,51 +4107,6 @@ Example configuration: worker_name: generic_worker1 ``` --- -### `worker_replication_host` -*Deprecated as of version 1.84.0. Place `host` under `main` entry on the [`instance_map`](#instance_map) in your shared yaml configuration instead.* - -The HTTP replication endpoint that it should talk to on the main Synapse process. -The main Synapse process defines this with a `replication` resource in -[`listeners` option](#listeners). - -Example configuration: -```yaml -worker_replication_host: 127.0.0.1 -``` ---- -### `worker_replication_http_port` -*Deprecated as of version 1.84.0. Place `port` under `main` entry on the [`instance_map`](#instance_map) in your shared yaml configuration instead.* - -The HTTP replication port that it should talk to on the main Synapse process. -The main Synapse process defines this with a `replication` resource in -[`listeners` option](#listeners). - -Example configuration: -```yaml -worker_replication_http_port: 9093 -``` ---- -### `worker_replication_http_tls` -*Deprecated as of version 1.84.0. Place `tls` under `main` entry on the [`instance_map`](#instance_map) in your shared yaml configuration instead.* - -Whether TLS should be used for talking to the HTTP replication port on the main -Synapse process. -The main Synapse process defines this with the `tls` option on its [listener](#listeners) that -has the `replication` resource enabled. - -**Please note:** by default, it is not safe to expose replication ports to the -public Internet, even with TLS enabled. -See [`worker_replication_secret`](#worker_replication_secret). - -Defaults to `false`. - -*Added in Synapse 1.72.0.* - -Example configuration: -```yaml -worker_replication_http_tls: true -``` ---- ### `worker_listeners` A worker can handle HTTP requests. To do so, a `worker_listeners` option diff --git a/docs/workers.md b/docs/workers.md index 303e0f0e7a5a..03415c6eb3e4 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -145,9 +145,6 @@ In the config file for each worker, you must specify: with an `http` listener. * **Synapse 1.72 and older:** if handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for the main process (`worker_main_http_uri`). This config option is no longer required and is ignored when running Synapse 1.73 and newer. - * **Synapse 1.83 and older:** The HTTP replication endpoint that the worker should talk to on the main synapse process - ([`worker_replication_host`](usage/configuration/config_documentation.md#worker_replication_host) and - [`worker_replication_http_port`](usage/configuration/config_documentation.md#worker_replication_http_port)). If using Synapse 1.84 and newer, these are not needed if `main` is defined on the [shared configuration](#shared-configuration) `instance_map` For example: diff --git a/synapse/config/workers.py b/synapse/config/workers.py index 0b9789160cc0..5c81eb5c6744 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -41,11 +41,17 @@ _MISSING_MAIN_PROCESS_INSTANCE_MAP_DATA = """ Missing data for a worker to connect to main process. Please include '%s' in the -`instance_map` declared in your shared yaml configuration, or optionally(as a deprecated -solution) in every worker's yaml as various `worker_replication_*` settings as defined -in workers documentation here: +`instance_map` declared in your shared yaml configuration as defined in configuration +documentation here: +`https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#instance_map` +""" + +WORKER_REPLICATION_SETTING_DEPRECATED_MESSAGE = """ +'%s' is no longer a supported worker setting, please place '%s' onto your shared +configuration under `main` inside the `instance_map`. See workers documentation here: `https://matrix-org.github.io/synapse/latest/workers.html#worker-configuration` """ + # This allows for a handy knob when it's time to change from 'master' to # something with less 'history' MAIN_PROCESS_INSTANCE_NAME = "master" @@ -237,22 +243,37 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) # A map from instance name to host/port of their HTTP replication endpoint. - # Check if the main process is declared. Inject it into the map if it's not, - # based first on if a 'main' block is declared then on 'worker_replication_*' - # data. If both are available, default to instance_map. The main process - # itself doesn't need this data as it would never have to talk to itself. + # Check if the main process is declared. The main process itself doesn't need + # this data as it would never have to talk to itself. instance_map: Dict[str, Any] = config.get("instance_map", {}) if self.instance_name is not MAIN_PROCESS_INSTANCE_NAME: + # TODO: The next 3 condition blocks can be deleted after some time has + # passed and we're ready to stop checking for these settings. # The host used to connect to the main synapse main_host = config.get("worker_replication_host", None) + if main_host: + raise ConfigError( + WORKER_REPLICATION_SETTING_DEPRECATED_MESSAGE + % ("worker_replication_host", main_host) + ) # The port on the main synapse for HTTP replication endpoint main_port = config.get("worker_replication_http_port") + if main_port: + raise ConfigError( + WORKER_REPLICATION_SETTING_DEPRECATED_MESSAGE + % ("worker_replication_http_port", main_port) + ) # The tls mode on the main synapse for HTTP replication endpoint. # For backward compatibility this defaults to False. main_tls = config.get("worker_replication_http_tls", False) + if main_tls: + raise ConfigError( + WORKER_REPLICATION_SETTING_DEPRECATED_MESSAGE + % ("worker_replication_http_tls", main_tls) + ) # For now, accept 'main' in the instance_map, but the replication system # expects 'master', force that into being until it's changed later. @@ -262,22 +283,9 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ] del instance_map[MAIN_PROCESS_INSTANCE_MAP_NAME] - # This is the backwards compatibility bit that handles the - # worker_replication_* bits using setdefault() to not overwrite anything. - elif main_host is not None and main_port is not None: - instance_map.setdefault( - MAIN_PROCESS_INSTANCE_NAME, - { - "host": main_host, - "port": main_port, - "tls": main_tls, - }, - ) - else: # If we've gotten here, it means that the main process is not on the - # instance_map and that not enough worker_replication_* variables - # were declared in the worker's yaml. + # instance_map. raise ConfigError( _MISSING_MAIN_PROCESS_INSTANCE_MAP_DATA % MAIN_PROCESS_INSTANCE_MAP_NAME diff --git a/tests/app/test_homeserver_start.py b/tests/app/test_homeserver_start.py index cd117b739473..0201933b04da 100644 --- a/tests/app/test_homeserver_start.py +++ b/tests/app/test_homeserver_start.py @@ -25,9 +25,9 @@ def test_wrong_start_caught(self) -> None: # Add a blank line as otherwise the next addition ends up on a line with a comment self.add_lines_to_config([" "]) self.add_lines_to_config(["worker_app: test_worker_app"]) - self.add_lines_to_config(["worker_replication_host: 127.0.0.1"]) - self.add_lines_to_config(["worker_replication_http_port: 0"]) - + self.add_lines_to_config(["worker_log_config: /data/logconfig.config"]) + self.add_lines_to_config(["instance_map:"]) + self.add_lines_to_config([" main:", " host: 127.0.0.1", " port: 1234"]) # Ensure that starting master process with worker config raises an exception with self.assertRaises(ConfigError): synapse.app.homeserver.setup(["-c", self.config_file]) diff --git a/tests/config/test_workers.py b/tests/config/test_workers.py index 086359fd71a2..2a643ae4f3f3 100644 --- a/tests/config/test_workers.py +++ b/tests/config/test_workers.py @@ -17,7 +17,7 @@ from immutabledict import immutabledict from synapse.config import ConfigError -from synapse.config.workers import InstanceLocationConfig, WorkerConfig +from synapse.config.workers import WorkerConfig from tests.unittest import TestCase @@ -323,28 +323,3 @@ def test_worker_duty_configs(self) -> None: ) self.assertTrue(worker2_config.should_notify_appservices) self.assertFalse(worker2_config.should_update_user_directory) - - def test_worker_instance_map_compat(self) -> None: - """ - Test that `worker_replication_*` settings are compatibly handled by - adding them to the instance map as a `main` entry. - """ - - worker1_config = self._make_worker_config( - worker_app="synapse.app.generic_worker", - worker_name="worker1", - extras={ - "notify_appservices_from_worker": "worker2", - "update_user_directory_from_worker": "worker1", - "worker_replication_host": "127.0.0.42", - "worker_replication_http_port": 1979, - }, - ) - self.assertEqual( - worker1_config.instance_map, - { - "master": InstanceLocationConfig( - host="127.0.0.42", port=1979, tls=False - ), - }, - ) From 677272caed43b1c534e5e779b5261327711178a5 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Fri, 7 Jul 2023 10:09:41 +0200 Subject: [PATCH 134/153] Remove `worker_replication_*` settings from worker doc (#15872) Co-authored-by: Mathieu Velten --- changelog.d/15872.doc | 1 + docs/systemd-with-workers/workers/background_worker.yaml | 4 ---- docs/systemd-with-workers/workers/event_persister.yaml | 6 +----- docs/systemd-with-workers/workers/federation_sender.yaml | 4 ---- docs/systemd-with-workers/workers/media_worker.yaml | 4 ---- docs/systemd-with-workers/workers/pusher_worker.yaml | 4 ---- 6 files changed, 2 insertions(+), 21 deletions(-) create mode 100644 changelog.d/15872.doc diff --git a/changelog.d/15872.doc b/changelog.d/15872.doc new file mode 100644 index 000000000000..1993bf02997b --- /dev/null +++ b/changelog.d/15872.doc @@ -0,0 +1 @@ +Remove deprecated `worker_replication_host`, `worker_replication_http_port` and `worker_replication_http_tls` configuration options. diff --git a/docs/systemd-with-workers/workers/background_worker.yaml b/docs/systemd-with-workers/workers/background_worker.yaml index 9fbfbda7db94..e236e10458fb 100644 --- a/docs/systemd-with-workers/workers/background_worker.yaml +++ b/docs/systemd-with-workers/workers/background_worker.yaml @@ -1,8 +1,4 @@ worker_app: synapse.app.generic_worker worker_name: background_worker -# The replication listener on the main synapse process. -worker_replication_host: 127.0.0.1 -worker_replication_http_port: 9093 - worker_log_config: /etc/matrix-synapse/background-worker-log.yaml diff --git a/docs/systemd-with-workers/workers/event_persister.yaml b/docs/systemd-with-workers/workers/event_persister.yaml index c11d5897b18e..018133772cd0 100644 --- a/docs/systemd-with-workers/workers/event_persister.yaml +++ b/docs/systemd-with-workers/workers/event_persister.yaml @@ -1,9 +1,5 @@ worker_app: synapse.app.generic_worker -worker_name: event_persister1 - -# The replication listener on the main synapse process. -worker_replication_host: 127.0.0.1 -worker_replication_http_port: 9093 +worker_name: event_persister1 worker_listeners: - type: http diff --git a/docs/systemd-with-workers/workers/federation_sender.yaml b/docs/systemd-with-workers/workers/federation_sender.yaml index 5c591aec2c3c..05b8e79e2708 100644 --- a/docs/systemd-with-workers/workers/federation_sender.yaml +++ b/docs/systemd-with-workers/workers/federation_sender.yaml @@ -1,8 +1,4 @@ worker_app: synapse.app.federation_sender worker_name: federation_sender1 -# The replication listener on the main synapse process. -worker_replication_host: 127.0.0.1 -worker_replication_http_port: 9093 - worker_log_config: /etc/matrix-synapse/federation-sender-log.yaml diff --git a/docs/systemd-with-workers/workers/media_worker.yaml b/docs/systemd-with-workers/workers/media_worker.yaml index 8ad046f11a5b..6491959845ac 100644 --- a/docs/systemd-with-workers/workers/media_worker.yaml +++ b/docs/systemd-with-workers/workers/media_worker.yaml @@ -1,10 +1,6 @@ worker_app: synapse.app.media_repository worker_name: media_worker -# The replication listener on the main synapse process. -worker_replication_host: 127.0.0.1 -worker_replication_http_port: 9093 - worker_listeners: - type: http port: 8085 diff --git a/docs/systemd-with-workers/workers/pusher_worker.yaml b/docs/systemd-with-workers/workers/pusher_worker.yaml index 46e22c6f06d6..de91d03ec07e 100644 --- a/docs/systemd-with-workers/workers/pusher_worker.yaml +++ b/docs/systemd-with-workers/workers/pusher_worker.yaml @@ -1,8 +1,4 @@ worker_app: synapse.app.pusher worker_name: pusher_worker1 -# The replication listener on the main synapse process. -worker_replication_host: 127.0.0.1 -worker_replication_http_port: 9093 - worker_log_config: /etc/matrix-synapse/pusher-worker-log.yaml From f25b0f88081bb436bef914983cff7087b54eba5f Mon Sep 17 00:00:00 2001 From: Shay Date: Fri, 7 Jul 2023 09:23:27 -0700 Subject: [PATCH 135/153] Stop writing to column `user_id` of tables `profiles` and `user_filters` (#15787) --- changelog.d/15787.misc | 1 + synapse/storage/database.py | 2 + synapse/storage/databases/main/__init__.py | 6 +- synapse/storage/databases/main/filtering.py | 5 +- synapse/storage/databases/main/profile.py | 12 +-- synapse/storage/schema/__init__.py | 9 +- .../79/01_drop_user_id_constraint_profiles.py | 50 ++++++++++ ...02_drop_user_id_constraint_user_filters.py | 54 +++++++++++ tests/storage/test_profile.py | 63 ------------- tests/storage/test_user_filters.py | 94 ------------------- 10 files changed, 123 insertions(+), 173 deletions(-) create mode 100644 changelog.d/15787.misc create mode 100644 synapse/storage/schema/main/delta/79/01_drop_user_id_constraint_profiles.py create mode 100644 synapse/storage/schema/main/delta/79/02_drop_user_id_constraint_user_filters.py delete mode 100644 tests/storage/test_user_filters.py diff --git a/changelog.d/15787.misc b/changelog.d/15787.misc new file mode 100644 index 000000000000..bd7536d36ec1 --- /dev/null +++ b/changelog.d/15787.misc @@ -0,0 +1 @@ +Stop writing to column `user_id` of tables `profiles` and `user_filters`. diff --git a/synapse/storage/database.py b/synapse/storage/database.py index a1c8fb0f46a4..c9d687fb2faf 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -98,6 +98,8 @@ "event_push_summary": "event_push_summary_unique_index2", "receipts_linearized": "receipts_linearized_unique_index", "receipts_graph": "receipts_graph_unique_index", + "profiles": "profiles_full_user_id_key_idx", + "user_filters": "full_users_filters_unique_idx", } diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 80c0304b1917..b6028853c939 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -15,7 +15,7 @@ # limitations under the License. import logging -from typing import TYPE_CHECKING, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, List, Optional, Tuple, Union, cast from synapse.api.constants import Direction from synapse.config.homeserver import HomeServerConfig @@ -196,7 +196,7 @@ def get_users_paginate_txn( txn: LoggingTransaction, ) -> Tuple[List[JsonDict], int]: filters = [] - args = [self.hs.config.server.server_name] + args: List[Union[str, int]] = [] # Set ordering order_by_column = UserSortOrder(order_by).value @@ -263,7 +263,7 @@ def get_users_paginate_txn( sql_base = f""" FROM users as u - LEFT JOIN profiles AS p ON u.name = '@' || p.user_id || ':' || ? + LEFT JOIN profiles AS p ON u.name = p.full_user_id LEFT JOIN erased_users AS eu ON u.name = eu.user_id {where_clause} """ diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py index fff417f9e3bb..75f7fe875651 100644 --- a/synapse/storage/databases/main/filtering.py +++ b/synapse/storage/databases/main/filtering.py @@ -188,14 +188,13 @@ def _do_txn(txn: LoggingTransaction) -> int: filter_id = max_id + 1 sql = ( - "INSERT INTO user_filters (full_user_id, user_id, filter_id, filter_json)" - "VALUES(?, ?, ?, ?)" + "INSERT INTO user_filters (full_user_id, filter_id, filter_json)" + "VALUES(?, ?, ?)" ) txn.execute( sql, ( user_id.to_string(), - user_id.localpart, filter_id, bytearray(def_json), ), diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index 3ba9cc88537a..660a5507b75a 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -173,10 +173,9 @@ async def get_profile_avatar_url(self, user_id: UserID) -> Optional[str]: ) async def create_profile(self, user_id: UserID) -> None: - user_localpart = user_id.localpart await self.db_pool.simple_insert( table="profiles", - values={"user_id": user_localpart, "full_user_id": user_id.to_string()}, + values={"full_user_id": user_id.to_string()}, desc="create_profile", ) @@ -191,13 +190,11 @@ async def set_profile_displayname( new_displayname: The new display name. If this is None, the user's display name is removed. """ - user_localpart = user_id.localpart await self.db_pool.simple_upsert( table="profiles", - keyvalues={"user_id": user_localpart}, + keyvalues={"full_user_id": user_id.to_string()}, values={ "displayname": new_displayname, - "full_user_id": user_id.to_string(), }, desc="set_profile_displayname", ) @@ -213,11 +210,10 @@ async def set_profile_avatar_url( new_avatar_url: The new avatar URL. If this is None, the user's avatar is removed. """ - user_localpart = user_id.localpart await self.db_pool.simple_upsert( table="profiles", - keyvalues={"user_id": user_localpart}, - values={"avatar_url": new_avatar_url, "full_user_id": user_id.to_string()}, + keyvalues={"full_user_id": user_id.to_string()}, + values={"avatar_url": new_avatar_url}, desc="set_profile_avatar_url", ) diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index fc190a8b13cf..6d14963c0abe 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -SCHEMA_VERSION = 78 # remember to update the list below when updating +SCHEMA_VERSION = 79 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -106,6 +106,9 @@ Changes in SCHEMA_VERSION = 78 - Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters + +Changes in SCHEMA_VERSION = 79 + - We no longer write to column user_id of tables profiles and user_filters """ @@ -118,7 +121,9 @@ # # insertions to the column `full_user_id` of tables profiles and user_filters can no # longer be null - 76 + # + # we no longer write to column `full_user_id` of tables profiles and user_filters + 78 ) """Limit on how far the synapse codebase can be rolled back without breaking db compat diff --git a/synapse/storage/schema/main/delta/79/01_drop_user_id_constraint_profiles.py b/synapse/storage/schema/main/delta/79/01_drop_user_id_constraint_profiles.py new file mode 100644 index 000000000000..3541266f7db4 --- /dev/null +++ b/synapse/storage/schema/main/delta/79/01_drop_user_id_constraint_profiles.py @@ -0,0 +1,50 @@ +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine + + +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: + """ + Update to drop the NOT NULL constraint on column user_id so that we can cease to + write to it without inserts to other columns triggering the constraint + """ + + if isinstance(database_engine, PostgresEngine): + drop_sql = """ + ALTER TABLE profiles ALTER COLUMN user_id DROP NOT NULL + """ + cur.execute(drop_sql) + else: + # irritatingly in SQLite we need to rewrite the table to drop the constraint. + cur.execute("DROP TABLE IF EXISTS temp_profiles") + + create_sql = """ + CREATE TABLE temp_profiles ( + full_user_id text NOT NULL, + user_id text, + displayname text, + avatar_url text, + UNIQUE (full_user_id), + UNIQUE (user_id) + ) + """ + cur.execute(create_sql) + + copy_sql = """ + INSERT INTO temp_profiles ( + user_id, + displayname, + avatar_url, + full_user_id) + SELECT user_id, displayname, avatar_url, full_user_id FROM profiles + """ + cur.execute(copy_sql) + + drop_sql = """ + DROP TABLE profiles + """ + cur.execute(drop_sql) + + rename_sql = """ + ALTER TABLE temp_profiles RENAME to profiles + """ + cur.execute(rename_sql) diff --git a/synapse/storage/schema/main/delta/79/02_drop_user_id_constraint_user_filters.py b/synapse/storage/schema/main/delta/79/02_drop_user_id_constraint_user_filters.py new file mode 100644 index 000000000000..8e7569c47073 --- /dev/null +++ b/synapse/storage/schema/main/delta/79/02_drop_user_id_constraint_user_filters.py @@ -0,0 +1,54 @@ +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine + + +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: + """ + Update to drop the NOT NULL constraint on column user_id so that we can cease to + write to it without inserts to other columns triggering the constraint + """ + if isinstance(database_engine, PostgresEngine): + drop_sql = """ + ALTER TABLE user_filters ALTER COLUMN user_id DROP NOT NULL + """ + cur.execute(drop_sql) + + else: + # irritatingly in SQLite we need to rewrite the table to drop the constraint. + cur.execute("DROP TABLE IF EXISTS temp_user_filters") + + create_sql = """ + CREATE TABLE temp_user_filters ( + full_user_id text NOT NULL, + user_id text, + filter_id bigint NOT NULL, + filter_json bytea NOT NULL + ) + """ + cur.execute(create_sql) + + index_sql = """ + CREATE UNIQUE INDEX IF NOT EXISTS user_filters_full_user_id_unique ON + temp_user_filters (full_user_id, filter_id) + """ + cur.execute(index_sql) + + copy_sql = """ + INSERT INTO temp_user_filters ( + user_id, + filter_id, + filter_json, + full_user_id) + SELECT user_id, filter_id, filter_json, full_user_id FROM user_filters + """ + cur.execute(copy_sql) + + drop_sql = """ + DROP TABLE user_filters + """ + cur.execute(drop_sql) + + rename_sql = """ + ALTER TABLE temp_user_filters RENAME to user_filters + """ + cur.execute(rename_sql) diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index fe5bb7791336..bbe8bd88bc5b 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -15,8 +15,6 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.server import HomeServer -from synapse.storage.database import LoggingTransaction -from synapse.storage.engines import PostgresEngine from synapse.types import UserID from synapse.util import Clock @@ -64,64 +62,3 @@ def test_avatar_url(self) -> None: self.assertIsNone( self.get_success(self.store.get_profile_avatar_url(self.u_frank)) ) - - def test_profiles_bg_migration(self) -> None: - """ - Test background job that copies entries from column user_id to full_user_id, adding - the hostname in the process. - """ - updater = self.hs.get_datastores().main.db_pool.updates - - # drop the constraint so we can insert nulls in full_user_id to populate the test - if isinstance(self.store.database_engine, PostgresEngine): - - def f(txn: LoggingTransaction) -> None: - txn.execute( - "ALTER TABLE profiles DROP CONSTRAINT full_user_id_not_null" - ) - - self.get_success(self.store.db_pool.runInteraction("", f)) - - for i in range(0, 70): - self.get_success( - self.store.db_pool.simple_insert( - "profiles", - {"user_id": f"hello{i:02}"}, - ) - ) - - # re-add the constraint so that when it's validated it actually exists - if isinstance(self.store.database_engine, PostgresEngine): - - def f(txn: LoggingTransaction) -> None: - txn.execute( - "ALTER TABLE profiles ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID" - ) - - self.get_success(self.store.db_pool.runInteraction("", f)) - - self.get_success( - self.store.db_pool.simple_insert( - "background_updates", - values={ - "update_name": "populate_full_user_id_profiles", - "progress_json": "{}", - }, - ) - ) - - self.get_success( - updater.run_background_updates(False), - ) - - expected_values = [] - for i in range(0, 70): - expected_values.append((f"@hello{i:02}:{self.hs.hostname}",)) - - res = self.get_success( - self.store.db_pool.execute( - "", None, "SELECT full_user_id from profiles ORDER BY full_user_id" - ) - ) - self.assertEqual(len(res), len(expected_values)) - self.assertEqual(res, expected_values) diff --git a/tests/storage/test_user_filters.py b/tests/storage/test_user_filters.py deleted file mode 100644 index bab802f56ec6..000000000000 --- a/tests/storage/test_user_filters.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2023 The Matrix.org Foundation C.I.C -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from twisted.test.proto_helpers import MemoryReactor - -from synapse.server import HomeServer -from synapse.storage.database import LoggingTransaction -from synapse.storage.engines import PostgresEngine -from synapse.util import Clock - -from tests import unittest - - -class UserFiltersStoreTestCase(unittest.HomeserverTestCase): - """ - Test background migration that copies entries from column user_id to full_user_id, adding - the hostname in the process. - """ - - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: - self.store = hs.get_datastores().main - - def test_bg_migration(self) -> None: - updater = self.hs.get_datastores().main.db_pool.updates - - # drop the constraint so we can insert nulls in full_user_id to populate the test - if isinstance(self.store.database_engine, PostgresEngine): - - def f(txn: LoggingTransaction) -> None: - txn.execute( - "ALTER TABLE user_filters DROP CONSTRAINT full_user_id_not_null" - ) - - self.get_success(self.store.db_pool.runInteraction("", f)) - - for i in range(0, 70): - self.get_success( - self.store.db_pool.simple_insert( - "user_filters", - { - "user_id": f"hello{i:02}", - "filter_id": i, - "filter_json": bytearray(i), - }, - ) - ) - - # re-add the constraint so that when it's validated it actually exists - if isinstance(self.store.database_engine, PostgresEngine): - - def f(txn: LoggingTransaction) -> None: - txn.execute( - "ALTER TABLE user_filters ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID" - ) - - self.get_success(self.store.db_pool.runInteraction("", f)) - - self.get_success( - self.store.db_pool.simple_insert( - "background_updates", - values={ - "update_name": "populate_full_user_id_user_filters", - "progress_json": "{}", - }, - ) - ) - - self.get_success( - updater.run_background_updates(False), - ) - - expected_values = [] - for i in range(0, 70): - expected_values.append((f"@hello{i:02}:{self.hs.hostname}",)) - - res = self.get_success( - self.store.db_pool.execute( - "", None, "SELECT full_user_id from user_filters ORDER BY full_user_id" - ) - ) - self.assertEqual(len(res), len(expected_values)) - self.assertEqual(res, expected_values) From 8a529e4fb65bb29121f9a0474da174c7b5ffac64 Mon Sep 17 00:00:00 2001 From: Shay Date: Fri, 7 Jul 2023 12:04:55 -0700 Subject: [PATCH 136/153] Stop running sytest on buster/python3.7 (#15892) --- .ci/scripts/calculate_jobs.py | 5 ----- changelog.d/15892.misc | 1 + 2 files changed, 1 insertion(+), 5 deletions(-) create mode 100644 changelog.d/15892.misc diff --git a/.ci/scripts/calculate_jobs.py b/.ci/scripts/calculate_jobs.py index c2c18b48e3d7..50e11e6504ff 100755 --- a/.ci/scripts/calculate_jobs.py +++ b/.ci/scripts/calculate_jobs.py @@ -134,11 +134,6 @@ def set_output(key: str, value: str): "sytest-tag": "testing", "postgres": "postgres", }, - { - "sytest-tag": "buster", - "postgres": "multi-postgres", - "workers": "workers", - }, ] ) diff --git a/changelog.d/15892.misc b/changelog.d/15892.misc new file mode 100644 index 000000000000..e5a123d21864 --- /dev/null +++ b/changelog.d/15892.misc @@ -0,0 +1 @@ +Stop running sytest on buster/python3.7. From df8c8a4f45df909fb6d19ecca5048ad30abf5531 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 10:24:11 +0100 Subject: [PATCH 137/153] Bump lxml from 4.9.2 to 4.9.3 (#15897) Bumps [lxml](https://github.com/lxml/lxml) from 4.9.2 to 4.9.3. - [Release notes](https://github.com/lxml/lxml/releases) - [Changelog](https://github.com/lxml/lxml/blob/master/CHANGES.txt) - [Commits](https://github.com/lxml/lxml/compare/lxml-4.9.2...lxml-4.9.3) --- updated-dependencies: - dependency-name: lxml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 173 ++++++++++++++++++++++++++++------------------------ 1 file changed, 93 insertions(+), 80 deletions(-) diff --git a/poetry.lock b/poetry.lock index c62337053e70..fc6892d80bf8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1073,95 +1073,108 @@ pyasn1 = ">=0.4.6" [[package]] name = "lxml" -version = "4.9.2" +version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ - {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, - {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, - {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, - {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, - {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, - {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, - {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, - {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, - {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, - {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, - {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, - {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, - {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, - {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, - {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, - {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, - {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, - {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, - {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, - {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, - {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, - {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, - {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, - {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, - {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, - {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, - {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, - {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, - {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, - {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, - {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, - {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, - {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, - {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, - {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, - {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, - {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, - {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, - {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, - {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, - {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, - {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, - {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.7)"] +source = ["Cython (>=0.29.35)"] [[package]] name = "lxml-stubs" @@ -3243,4 +3256,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "0832381cc9e7065e8d95c810d732aa031b98d55cf188719989b12d841993e62e" +content-hash = "448c0d2b9815e67ac32494f1abf7b5ec9f22883833476a6942f85b3ba29ec9a6" From 3710fea19d50be1a0e83e843bde718c6c291539f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 10:16:03 +0000 Subject: [PATCH 138/153] Bump ruff from 0.0.275 to 0.0.277 (#15900) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.0.275 to 0.0.277. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/BREAKING_CHANGES.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.0.275...v0.0.277) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index fc6892d80bf8..b903fdc9aee5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2242,28 +2242,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.0.275" +version = "0.0.277" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.275-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5e6554a072e7ce81eb6f0bec1cebd3dcb0e358652c0f4900d7d630d61691e914"}, - {file = "ruff-0.0.275-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1cc599022fe5ffb143a965b8d659eb64161ab8ab4433d208777eab018a1aab67"}, - {file = "ruff-0.0.275-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5206fc1cd8c1c1deadd2e6360c0dbcd690f1c845da588ca9d32e4a764a402c60"}, - {file = "ruff-0.0.275-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c4e6468da26f77b90cae35319d310999f471a8c352998e9b39937a23750149e"}, - {file = "ruff-0.0.275-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0dbdea02942131dbc15dd45f431d152224f15e1dd1859fcd0c0487b658f60f1a"}, - {file = "ruff-0.0.275-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:22efd9f41af27ef8fb9779462c46c35c89134d33e326c889971e10b2eaf50c63"}, - {file = "ruff-0.0.275-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c09662112cfa22d7467a19252a546291fd0eae4f423e52b75a7a2000a1894db"}, - {file = "ruff-0.0.275-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80043726662144876a381efaab88841c88e8df8baa69559f96b22d4fa216bef1"}, - {file = "ruff-0.0.275-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5859ee543b01b7eb67835dfd505faa8bb7cc1550f0295c92c1401b45b42be399"}, - {file = "ruff-0.0.275-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c8ace4d40a57b5ea3c16555f25a6b16bc5d8b2779ae1912ce2633543d4e9b1da"}, - {file = "ruff-0.0.275-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8347fc16aa185aae275906c4ac5b770e00c896b6a0acd5ba521f158801911998"}, - {file = "ruff-0.0.275-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ec43658c64bfda44fd84bbea9da8c7a3b34f65448192d1c4dd63e9f4e7abfdd4"}, - {file = "ruff-0.0.275-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:508b13f7ca37274cceaba4fb3ea5da6ca192356323d92acf39462337c33ad14e"}, - {file = "ruff-0.0.275-py3-none-win32.whl", hash = "sha256:6afb1c4422f24f361e877937e2a44b3f8176774a476f5e33845ebfe887dd5ec2"}, - {file = "ruff-0.0.275-py3-none-win_amd64.whl", hash = "sha256:d9b264d78621bf7b698b6755d4913ab52c19bd28bee1a16001f954d64c1a1220"}, - {file = "ruff-0.0.275-py3-none-win_arm64.whl", hash = "sha256:a19ce3bea71023eee5f0f089dde4a4272d088d5ac0b675867e074983238ccc65"}, - {file = "ruff-0.0.275.tar.gz", hash = "sha256:a63a0b645da699ae5c758fce19188e901b3033ec54d862d93fcd042addf7f38d"}, + {file = "ruff-0.0.277-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:3250b24333ef419b7a232080d9724ccc4d2da1dbbe4ce85c4caa2290d83200f8"}, + {file = "ruff-0.0.277-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:3e60605e07482183ba1c1b7237eca827bd6cbd3535fe8a4ede28cbe2a323cb97"}, + {file = "ruff-0.0.277-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7baa97c3d7186e5ed4d5d4f6834d759a27e56cf7d5874b98c507335f0ad5aadb"}, + {file = "ruff-0.0.277-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:74e4b206cb24f2e98a615f87dbe0bde18105217cbcc8eb785bb05a644855ba50"}, + {file = "ruff-0.0.277-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:479864a3ccd8a6a20a37a6e7577bdc2406868ee80b1e65605478ad3b8eb2ba0b"}, + {file = "ruff-0.0.277-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:468bfb0a7567443cec3d03cf408d6f562b52f30c3c29df19927f1e0e13a40cd7"}, + {file = "ruff-0.0.277-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f32ec416c24542ca2f9cc8c8b65b84560530d338aaf247a4a78e74b99cd476b4"}, + {file = "ruff-0.0.277-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14a7b2f00f149c5a295f188a643ac25226ff8a4d08f7a62b1d4b0a1dc9f9b85c"}, + {file = "ruff-0.0.277-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9879f59f763cc5628aa01c31ad256a0f4dc61a29355c7315b83c2a5aac932b5"}, + {file = "ruff-0.0.277-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f612e0a14b3d145d90eb6ead990064e22f6f27281d847237560b4e10bf2251f3"}, + {file = "ruff-0.0.277-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:323b674c98078be9aaded5b8b51c0d9c424486566fb6ec18439b496ce79e5998"}, + {file = "ruff-0.0.277-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3a43fbe026ca1a2a8c45aa0d600a0116bec4dfa6f8bf0c3b871ecda51ef2b5dd"}, + {file = "ruff-0.0.277-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:734165ea8feb81b0d53e3bf523adc2413fdb76f1264cde99555161dd5a725522"}, + {file = "ruff-0.0.277-py3-none-win32.whl", hash = "sha256:88d0f2afb2e0c26ac1120e7061ddda2a566196ec4007bd66d558f13b374b9efc"}, + {file = "ruff-0.0.277-py3-none-win_amd64.whl", hash = "sha256:6fe81732f788894a00f6ade1fe69e996cc9e485b7c35b0f53fb00284397284b2"}, + {file = "ruff-0.0.277-py3-none-win_arm64.whl", hash = "sha256:2d4444c60f2e705c14cd802b55cd2b561d25bf4311702c463a002392d3116b22"}, + {file = "ruff-0.0.277.tar.gz", hash = "sha256:2dab13cdedbf3af6d4427c07f47143746b6b95d9e4a254ac369a0edb9280a0d2"}, ] [[package]] @@ -3256,4 +3256,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "448c0d2b9815e67ac32494f1abf7b5ec9f22883833476a6942f85b3ba29ec9a6" +content-hash = "0a8c6605e7e1d0ac7188a5d02b47a029bfb0f917458b87cb40755911442383d8" diff --git a/pyproject.toml b/pyproject.toml index a6e3a935a9c2..fc1b8c0dadd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -309,7 +309,7 @@ all = [ # We pin black so that our tests don't start failing on new releases. isort = ">=5.10.1" black = ">=22.3.0" -ruff = "0.0.275" +ruff = "0.0.277" # Typechecking lxml-stubs = ">=0.4.0" From 7477f43fd8aa8dd6eb46ce88bc76105f34abf3d2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 15:34:26 +0200 Subject: [PATCH 139/153] Bump serde_json from 1.0.99 to 1.0.100 (#15901) Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.99 to 1.0.100. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.99...v1.0.100) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 52f911277ebd..172a8dc9133f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -182,9 +182,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.52" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224" +checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" dependencies = [ "unicode-ident", ] @@ -273,9 +273,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.26" +version = "1.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" dependencies = [ "proc-macro2", ] @@ -320,29 +320,29 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.164" +version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d" +checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.164" +version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" +checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" dependencies = [ "proc-macro2", "quote", - "syn 2.0.10", + "syn 2.0.25", ] [[package]] name = "serde_json" -version = "1.0.99" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3" +checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" dependencies = [ "itoa", "ryu", @@ -374,9 +374,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.10" +version = "2.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aad1363ed6d37b84299588d62d3a7d95b5a5c2d9aad5c85609fda12afaa1f40" +checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2" dependencies = [ "proc-macro2", "quote", From c971698bff14c18d2b51440b9cbb9474f81c2feb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 15:55:39 +0100 Subject: [PATCH 140/153] Bump regex from 1.8.4 to 1.9.1 (#15902) Bumps [regex](https://github.com/rust-lang/regex) from 1.8.4 to 1.9.1. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.8.4...1.9.1) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 172a8dc9133f..d6fa96ea7761 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -291,9 +291,21 @@ dependencies = [ [[package]] name = "regex" -version = "1.8.4" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" +checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf" dependencies = [ "aho-corasick", "memchr", @@ -302,9 +314,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" +checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846" [[package]] name = "ryu" From 6e731e86bfa9d92f983f7df9367e37aa80733078 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 10 Jul 2023 10:23:30 -0500 Subject: [PATCH 141/153] Placeholder changelog --- changelog.d/15910.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/15910.misc diff --git a/changelog.d/15910.misc b/changelog.d/15910.misc new file mode 100644 index 000000000000..d8f68a561073 --- /dev/null +++ b/changelog.d/15910.misc @@ -0,0 +1 @@ +Placeholder changelog so we can see more of the CI Pass (remove before merging). From 6774f265b4761302cab84405a181d437e69b89c0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Jul 2023 16:24:04 +0100 Subject: [PATCH 142/153] Fix building rust with nightly (#15906) Also fix up a warning. --- Cargo.toml | 1 + changelog.d/15906.misc | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/15906.misc diff --git a/Cargo.toml b/Cargo.toml index de141bdee9d3..c636b3acda0e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,3 +3,4 @@ [workspace] members = ["rust"] +resolver = "2" diff --git a/changelog.d/15906.misc b/changelog.d/15906.misc new file mode 100644 index 000000000000..b721b88d568d --- /dev/null +++ b/changelog.d/15906.misc @@ -0,0 +1 @@ +Fix building rust with nightly rust compiler. From e55a9b3e41e73f34fda781b9374935c4623f7ea9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Jul 2023 16:24:42 +0100 Subject: [PATCH 143/153] Fix downgrading to previous version of Synapse (#15907) We do this by marking the constraint as deferrable. --- changelog.d/15907.misc | 1 + synapse/storage/background_updates.py | 7 ++++++- synapse/storage/databases/main/event_federation.py | 4 +++- .../main/delta/78/03event_extremities_constraints.py | 10 ++++++++-- tests/storage/test_background_update.py | 8 ++++++-- 5 files changed, 24 insertions(+), 6 deletions(-) create mode 100644 changelog.d/15907.misc diff --git a/changelog.d/15907.misc b/changelog.d/15907.misc new file mode 100644 index 000000000000..e0ecea6c2fdb --- /dev/null +++ b/changelog.d/15907.misc @@ -0,0 +1 @@ +Add foreign key constraint to `event_forward_extremities`. diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index 5dce0a01599d..2d5ddc3e7be4 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -80,10 +80,14 @@ class ForeignKeyConstraint(Constraint): Attributes: referenced_table: The "parent" table name. columns: The list of mappings of columns from table to referenced table + deferred: Whether to defer checking of the constraint to the end of the + transaction. This is useful for e.g. backwards compatibility where + an older version inserted data in the wrong order. """ referenced_table: str columns: Sequence[Tuple[str, str]] + deferred: bool def make_check_clause(self, table: str) -> str: join_clause = " AND ".join( @@ -94,7 +98,8 @@ def make_check_clause(self, table: str) -> str: def make_constraint_clause_postgres(self) -> str: column1_list = ", ".join(col1 for col1, col2 in self.columns) column2_list = ", ".join(col2 for col1, col2 in self.columns) - return f"FOREIGN KEY ({column1_list}) REFERENCES {self.referenced_table} ({column2_list})" + defer_clause = " DEFERRABLE INITIALLY DEFERRED" if self.deferred else "" + return f"FOREIGN KEY ({column1_list}) REFERENCES {self.referenced_table} ({column2_list}) {defer_clause}" @attr.s(auto_attribs=True) diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index dabe603c8cba..b2cda52ce593 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -146,7 +146,9 @@ def __init__( update_name="event_forward_extremities_event_id_foreign_key_constraint_update", table="event_forward_extremities", constraint_name="event_forward_extremities_event_id", - constraint=ForeignKeyConstraint("events", [("event_id", "event_id")]), + constraint=ForeignKeyConstraint( + "events", [("event_id", "event_id")], deferred=True + ), unique_columns=("event_id", "room_id"), ) diff --git a/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py b/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py index f12e2a8f3ee9..bf8c57dbe8ba 100644 --- a/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py +++ b/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py @@ -28,19 +28,25 @@ event_id TEXT NOT NULL, room_id TEXT NOT NULL, UNIQUE (event_id, room_id), - CONSTRAINT event_forward_extremities_event_id FOREIGN KEY (event_id) REFERENCES events (event_id) + CONSTRAINT event_forward_extremities_event_id FOREIGN KEY (event_id) REFERENCES events (event_id) DEFERRABLE INITIALLY DEFERRED ) """ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: + # We mark this as a deferred constraint, as the previous version of Synapse + # inserted the event into the forward extremities *before* the events table. + # By marking as deferred we ensure that downgrading to the previous version + # will continue to work. run_validate_constraint_and_delete_rows_schema_delta( cur, ordering=7803, update_name="event_forward_extremities_event_id_foreign_key_constraint_update", table="event_forward_extremities", constraint_name="event_forward_extremities_event_id", - constraint=ForeignKeyConstraint("events", [("event_id", "event_id")]), + constraint=ForeignKeyConstraint( + "events", [("event_id", "event_id")], deferred=True + ), sqlite_table_name="event_forward_extremities2", sqlite_table_schema=FORWARD_EXTREMITIES_TABLE_SCHEMA, ) diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index 6ca546f3f76a..a4a823a25242 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -586,7 +586,9 @@ def delta(txn: LoggingTransaction) -> None: update_name="test_bg_update", table="test_constraint", constraint_name="test_constraint_name", - constraint=ForeignKeyConstraint("base_table", [("b", "b")]), + constraint=ForeignKeyConstraint( + "base_table", [("b", "b")], deferred=False + ), sqlite_table_name="test_constraint2", sqlite_table_schema=table2_sqlite, ) @@ -604,7 +606,9 @@ def delta(txn: LoggingTransaction) -> None: "test_bg_update", table="test_constraint", constraint_name="test_constraint_name", - constraint=ForeignKeyConstraint("base_table", [("b", "b")]), + constraint=ForeignKeyConstraint( + "base_table", [("b", "b")], deferred=False + ), unique_columns=["a"], ) From a704a35dd71cbd5eb5baba1757da07fdc49dfd8a Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 10 Jul 2023 10:26:04 -0500 Subject: [PATCH 144/153] Revert "Placeholder changelog" This reverts commit 6e731e86bfa9d92f983f7df9367e37aa80733078. --- changelog.d/15910.misc | 1 - 1 file changed, 1 deletion(-) delete mode 100644 changelog.d/15910.misc diff --git a/changelog.d/15910.misc b/changelog.d/15910.misc deleted file mode 100644 index d8f68a561073..000000000000 --- a/changelog.d/15910.misc +++ /dev/null @@ -1 +0,0 @@ -Placeholder changelog so we can see more of the CI Pass (remove before merging). From c9bf644fa0c2c06f8143b14ccdb655feebed97df Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 10 Jul 2023 11:10:20 -0500 Subject: [PATCH 145/153] Revert "Federation outbound proxy" (#15910) Revert "Federation outbound proxy (#15773)" This reverts commit b07b14b494ae1dd564b4c44f844c9a9545b3d08a. --- changelog.d/15773.feature | 1 - .../configuration/config_documentation.md | 31 +-- docs/workers.md | 20 -- synapse/app/_base.py | 2 - synapse/app/generic_worker.py | 1 - synapse/app/homeserver.py | 1 - synapse/config/workers.py | 40 +-- synapse/http/client.py | 7 +- synapse/http/matrixfederationclient.py | 132 +--------- synapse/http/proxy.py | 249 ------------------ synapse/http/proxyagent.py | 79 +----- synapse/http/server.py | 55 ++-- synapse/http/site.py | 26 +- tests/app/test_openid_listener.py | 8 +- tests/handlers/test_device.py | 3 +- tests/handlers/test_federation.py | 2 +- tests/handlers/test_presence.py | 1 + tests/handlers/test_typing.py | 10 - tests/http/test_matrixfederationclient.py | 189 +------------ tests/http/test_proxy.py | 53 ---- tests/replication/_base.py | 3 +- .../test_federation_sender_shard.py | 22 +- tests/rest/client/test_presence.py | 1 + tests/rest/client/test_rooms.py | 2 + tests/storage/test_e2e_room_keys.py | 2 +- tests/storage/test_purge.py | 2 +- tests/storage/test_rollback_worker.py | 4 +- tests/test_server.py | 33 +-- tests/unittest.py | 1 - 29 files changed, 90 insertions(+), 890 deletions(-) delete mode 100644 changelog.d/15773.feature delete mode 100644 synapse/http/proxy.py delete mode 100644 tests/http/test_proxy.py diff --git a/changelog.d/15773.feature b/changelog.d/15773.feature deleted file mode 100644 index 0d77fae2dc68..000000000000 --- a/changelog.d/15773.feature +++ /dev/null @@ -1 +0,0 @@ -Allow configuring the set of workers to proxy outbound federation traffic through via `outbound_federation_restricted_to`. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 04e8390ffedf..ff59cbccc19a 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -3930,14 +3930,13 @@ federation_sender_instances: --- ### `instance_map` -When using workers this should be a map from [`worker_name`](#worker_name) to the HTTP -replication listener of the worker, if configured, and to the main process. Each worker -declared under [`stream_writers`](../../workers.md#stream-writers) and -[`outbound_federation_restricted_to`](#outbound_federation_restricted_to) needs a HTTP replication listener, and that -listener should be included in the `instance_map`. The main process also needs an entry -on the `instance_map`, and it should be listed under `main` **if even one other worker -exists**. Ensure the port matches with what is declared inside the `listener` block for -a `replication` listener. +When using workers this should be a map from [`worker_name`](#worker_name) to the +HTTP replication listener of the worker, if configured, and to the main process. +Each worker declared under [`stream_writers`](../../workers.md#stream-writers) needs +a HTTP replication listener, and that listener should be included in the `instance_map`. +The main process also needs an entry on the `instance_map`, and it should be listed under +`main` **if even one other worker exists**. Ensure the port matches with what is declared +inside the `listener` block for a `replication` listener. Example configuration: @@ -3967,22 +3966,6 @@ stream_writers: typing: worker1 ``` --- -### `outbound_federation_restricted_to` - -When using workers, you can restrict outbound federation traffic to only go through a -specific subset of workers. Any worker specified here must also be in the -[`instance_map`](#instance_map). - -```yaml -outbound_federation_restricted_to: - - federation_sender1 - - federation_sender2 -``` - -Also see the [worker -documentation](../../workers.md#restrict-outbound-federation-traffic-to-a-specific-set-of-workers) -for more info. ---- ### `run_background_tasks_on` The [worker](../../workers.md#background-tasks) that is used to run diff --git a/docs/workers.md b/docs/workers.md index 03415c6eb3e4..828f082e7551 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -528,26 +528,6 @@ the stream writer for the `presence` stream: ^/_matrix/client/(api/v1|r0|v3|unstable)/presence/ -#### Restrict outbound federation traffic to a specific set of workers - -The `outbound_federation_restricted_to` configuration is useful to make sure outbound -federation traffic only goes through a specified subset of workers. This allows you to -set more strict access controls (like a firewall) for all workers and only allow the -`federation_sender`'s to contact the outside world. - -```yaml -instance_map: - main: - host: localhost - port: 8030 - federation_sender1: - host: localhost - port: 8034 - -outbound_federation_restricted_to: - - federation_sender1 -``` - #### Background tasks There is also support for moving background tasks to a separate diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 938ab40f278e..936b1b043037 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -386,7 +386,6 @@ def listen_unix( def listen_http( - hs: "HomeServer", listener_config: ListenerConfig, root_resource: Resource, version_string: str, @@ -407,7 +406,6 @@ def listen_http( version_string, max_request_body_size=max_request_body_size, reactor=reactor, - federation_agent=hs.get_federation_http_client().agent, ) if isinstance(listener_config, TCPListenerConfig): diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index dc79efcc142f..7406c3948cb1 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -221,7 +221,6 @@ def _listen_http(self, listener_config: ListenerConfig) -> None: root_resource = create_resource_tree(resources, OptionsResource()) _base.listen_http( - self, listener_config, root_resource, self.version_string, diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f188c7265a50..84236ac299e0 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -139,7 +139,6 @@ def _listener_http( root_resource = OptionsResource() ports = listen_http( - self, listener_config, create_resource_tree(resources, root_resource), self.version_string, diff --git a/synapse/config/workers.py b/synapse/config/workers.py index 5c81eb5c6744..ccfe75eaf3a6 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -15,7 +15,7 @@ import argparse import logging -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Union import attr from pydantic import BaseModel, Extra, StrictBool, StrictInt, StrictStr @@ -154,27 +154,6 @@ class WriterLocations: ) -@attr.s(auto_attribs=True) -class OutboundFederationRestrictedTo: - """Whether we limit outbound federation to a certain set of instances. - - Attributes: - instances: optional list of instances that can make outbound federation - requests. If None then all instances can make federation requests. - locations: list of instance locations to connect to proxy via. - """ - - instances: Optional[List[str]] - locations: List[InstanceLocationConfig] = attr.Factory(list) - - def __contains__(self, instance: str) -> bool: - # It feels a bit dirty to return `True` if `instances` is `None`, but it makes - # sense in downstream usage in the sense that if - # `outbound_federation_restricted_to` is not configured, then any instance can - # talk to federation (no restrictions so always return `True`). - return self.instances is None or instance in self.instances - - class WorkerConfig(Config): """The workers are processes run separately to the main synapse process. They have their own pid_file and listener configuration. They use the @@ -386,23 +365,6 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: new_option_name="update_user_directory_from_worker", ) - outbound_federation_restricted_to = config.get( - "outbound_federation_restricted_to", None - ) - self.outbound_federation_restricted_to = OutboundFederationRestrictedTo( - outbound_federation_restricted_to - ) - if outbound_federation_restricted_to: - for instance in outbound_federation_restricted_to: - if instance not in self.instance_map: - raise ConfigError( - "Instance %r is configured in 'outbound_federation_restricted_to' but does not appear in `instance_map` config." - % (instance,) - ) - self.outbound_federation_restricted_to.locations.append( - self.instance_map[instance] - ) - def _should_this_worker_perform_duty( self, config: Dict[str, Any], diff --git a/synapse/http/client.py b/synapse/http/client.py index ca2cdbc6e243..09ea93e10d0b 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -1037,12 +1037,7 @@ def connectionLost(self, reason: Failure = connectionDone) -> None: if reason.check(ResponseDone): self.deferred.callback(self.length) elif reason.check(PotentialDataLoss): - # This applies to requests which don't set `Content-Length` or a - # `Transfer-Encoding` in the response because in this case the end of the - # response is indicated by the connection being closed, an event which may - # also be due to a transient network problem or other error. But since this - # behavior is expected of some servers (like YouTube), let's ignore it. - # Stolen from https://github.com/twisted/treq/pull/49/files + # stolen from https://github.com/twisted/treq/pull/49/files # http://twistedmatrix.com/trac/ticket/4840 self.deferred.callback(self.length) else: diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index b00396fdc71b..cc4e258b0fba 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -50,7 +50,7 @@ from twisted.internet.task import Cooperator from twisted.web.client import ResponseFailed from twisted.web.http_headers import Headers -from twisted.web.iweb import IAgent, IBodyProducer, IResponse +from twisted.web.iweb import IBodyProducer, IResponse import synapse.metrics import synapse.util.retryutils @@ -72,7 +72,6 @@ read_body_with_max_size, ) from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent -from synapse.http.proxyagent import ProxyAgent from synapse.http.types import QueryParams from synapse.logging import opentracing from synapse.logging.context import make_deferred_yieldable, run_in_background @@ -394,32 +393,17 @@ def __init__( if hs.config.server.user_agent_suffix: user_agent = "%s %s" % (user_agent, hs.config.server.user_agent_suffix) - outbound_federation_restricted_to = ( - hs.config.worker.outbound_federation_restricted_to + federation_agent = MatrixFederationAgent( + self.reactor, + tls_client_options_factory, + user_agent.encode("ascii"), + hs.config.server.federation_ip_range_allowlist, + hs.config.server.federation_ip_range_blocklist, ) - if hs.get_instance_name() in outbound_federation_restricted_to: - # Talk to federation directly - federation_agent: IAgent = MatrixFederationAgent( - self.reactor, - tls_client_options_factory, - user_agent.encode("ascii"), - hs.config.server.federation_ip_range_allowlist, - hs.config.server.federation_ip_range_blocklist, - ) - else: - # We need to talk to federation via the proxy via one of the configured - # locations - federation_proxies = outbound_federation_restricted_to.locations - federation_agent = ProxyAgent( - self.reactor, - self.reactor, - tls_client_options_factory, - federation_proxies=federation_proxies, - ) # Use a BlocklistingAgentWrapper to prevent circumventing the IP # blocking via IP literals in server names - self.agent: IAgent = BlocklistingAgentWrapper( + self.agent = BlocklistingAgentWrapper( federation_agent, ip_blocklist=hs.config.server.federation_ip_range_blocklist, ) @@ -428,6 +412,7 @@ def __init__( self._store = hs.get_datastores().main self.version_string_bytes = hs.version_string.encode("ascii") self.default_timeout_seconds = hs.config.federation.client_timeout_ms / 1000 + self.max_long_retry_delay_seconds = ( hs.config.federation.max_long_retry_delay_ms / 1000 ) @@ -1146,101 +1131,6 @@ async def get_json( Succeeds when we get a 2xx HTTP response. The result will be the decoded JSON body. - Raises: - HttpResponseException: If we get an HTTP response code >= 300 - (except 429). - NotRetryingDestination: If we are not yet ready to retry this - server. - FederationDeniedError: If this destination is not on our - federation whitelist - RequestSendFailed: If there were problems connecting to the - remote, due to e.g. DNS failures, connection timeouts etc. - """ - json_dict, _ = await self.get_json_with_headers( - destination=destination, - path=path, - args=args, - retry_on_dns_fail=retry_on_dns_fail, - timeout=timeout, - ignore_backoff=ignore_backoff, - try_trailing_slash_on_400=try_trailing_slash_on_400, - parser=parser, - ) - return json_dict - - @overload - async def get_json_with_headers( - self, - destination: str, - path: str, - args: Optional[QueryParams] = None, - retry_on_dns_fail: bool = True, - timeout: Optional[int] = None, - ignore_backoff: bool = False, - try_trailing_slash_on_400: bool = False, - parser: Literal[None] = None, - ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: - ... - - @overload - async def get_json_with_headers( - self, - destination: str, - path: str, - args: Optional[QueryParams] = ..., - retry_on_dns_fail: bool = ..., - timeout: Optional[int] = ..., - ignore_backoff: bool = ..., - try_trailing_slash_on_400: bool = ..., - parser: ByteParser[T] = ..., - ) -> Tuple[T, Dict[bytes, List[bytes]]]: - ... - - async def get_json_with_headers( - self, - destination: str, - path: str, - args: Optional[QueryParams] = None, - retry_on_dns_fail: bool = True, - timeout: Optional[int] = None, - ignore_backoff: bool = False, - try_trailing_slash_on_400: bool = False, - parser: Optional[ByteParser[T]] = None, - ) -> Tuple[Union[JsonDict, T], Dict[bytes, List[bytes]]]: - """GETs some json from the given host homeserver and path - - Args: - destination: The remote server to send the HTTP request to. - - path: The HTTP path. - - args: A dictionary used to create query strings, defaults to - None. - - retry_on_dns_fail: true if the request should be retried on DNS failures - - timeout: number of milliseconds to wait for the response. - self._default_timeout (60s) by default. - - Note that we may make several attempts to send the request; this - timeout applies to the time spent waiting for response headers for - *each* attempt (including connection time) as well as the time spent - reading the response body after a 200 response. - - ignore_backoff: true to ignore the historical backoff data - and try the request anyway. - - try_trailing_slash_on_400: True if on a 400 M_UNRECOGNIZED - response we should try appending a trailing slash to the end of - the request. Workaround for #3622 in Synapse <= v0.99.3. - - parser: The parser to use to decode the response. Defaults to - parsing as JSON. - - Returns: - Succeeds when we get a 2xx HTTP response. The result will be a tuple of the - decoded JSON body and a dict of the response headers. - Raises: HttpResponseException: If we get an HTTP response code >= 300 (except 429). @@ -1266,8 +1156,6 @@ async def get_json_with_headers( timeout=timeout, ) - headers = dict(response.headers.getAllRawHeaders()) - if timeout is not None: _sec_timeout = timeout / 1000 else: @@ -1285,7 +1173,7 @@ async def get_json_with_headers( parser=parser, ) - return body, headers + return body async def delete_json( self, diff --git a/synapse/http/proxy.py b/synapse/http/proxy.py deleted file mode 100644 index 0874d67760ae..000000000000 --- a/synapse/http/proxy.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright 2023 The Matrix.org Foundation C.I.C. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import json -import logging -import urllib.parse -from typing import TYPE_CHECKING, Any, Optional, Set, Tuple, cast - -from twisted.internet import protocol -from twisted.internet.interfaces import ITCPTransport -from twisted.internet.protocol import connectionDone -from twisted.python import failure -from twisted.python.failure import Failure -from twisted.web.client import ResponseDone -from twisted.web.http_headers import Headers -from twisted.web.iweb import IAgent, IResponse -from twisted.web.resource import IResource -from twisted.web.server import Site - -from synapse.api.errors import Codes -from synapse.http import QuieterFileBodyProducer -from synapse.http.server import _AsyncResource -from synapse.logging.context import make_deferred_yieldable, run_in_background -from synapse.types import ISynapseReactor -from synapse.util.async_helpers import timeout_deferred - -if TYPE_CHECKING: - from synapse.http.site import SynapseRequest - -logger = logging.getLogger(__name__) - -# "Hop-by-hop" headers (as opposed to "end-to-end" headers) as defined by RFC2616 -# section 13.5.1 and referenced in RFC9110 section 7.6.1. These are meant to only be -# consumed by the immediate recipient and not be forwarded on. -HOP_BY_HOP_HEADERS = { - "Connection", - "Keep-Alive", - "Proxy-Authenticate", - "Proxy-Authorization", - "TE", - "Trailers", - "Transfer-Encoding", - "Upgrade", -} - - -def parse_connection_header_value( - connection_header_value: Optional[bytes], -) -> Set[str]: - """ - Parse the `Connection` header to determine which headers we should not be copied - over from the remote response. - - As defined by RFC2616 section 14.10 and RFC9110 section 7.6.1 - - Example: `Connection: close, X-Foo, X-Bar` will return `{"Close", "X-Foo", "X-Bar"}` - - Even though "close" is a special directive, let's just treat it as just another - header for simplicity. If people want to check for this directive, they can simply - check for `"Close" in headers`. - - Args: - connection_header_value: The value of the `Connection` header. - - Returns: - The set of header names that should not be copied over from the remote response. - The keys are capitalized in canonical capitalization. - """ - headers = Headers() - extra_headers_to_remove: Set[str] = set() - if connection_header_value: - extra_headers_to_remove = { - headers._canonicalNameCaps(connection_option.strip()).decode("ascii") - for connection_option in connection_header_value.split(b",") - } - - return extra_headers_to_remove - - -class ProxyResource(_AsyncResource): - """ - A stub resource that proxies any requests with a `matrix-federation://` scheme - through the given `federation_agent` to the remote homeserver and ferries back the - info. - """ - - isLeaf = True - - def __init__(self, reactor: ISynapseReactor, federation_agent: IAgent): - super().__init__(True) - - self.reactor = reactor - self.agent = federation_agent - - async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]: - uri = urllib.parse.urlparse(request.uri) - assert uri.scheme == b"matrix-federation" - - headers = Headers() - for header_name in (b"User-Agent", b"Authorization", b"Content-Type"): - header_value = request.getHeader(header_name) - if header_value: - headers.addRawHeader(header_name, header_value) - - request_deferred = run_in_background( - self.agent.request, - request.method, - request.uri, - headers=headers, - bodyProducer=QuieterFileBodyProducer(request.content), - ) - request_deferred = timeout_deferred( - request_deferred, - # This should be set longer than the timeout in `MatrixFederationHttpClient` - # so that it has enough time to complete and pass us the data before we give - # up. - timeout=90, - reactor=self.reactor, - ) - - response = await make_deferred_yieldable(request_deferred) - - return response.code, response - - def _send_response( - self, - request: "SynapseRequest", - code: int, - response_object: Any, - ) -> None: - response = cast(IResponse, response_object) - response_headers = cast(Headers, response.headers) - - request.setResponseCode(code) - - # The `Connection` header also defines which headers should not be copied over. - connection_header = response_headers.getRawHeaders(b"connection") - extra_headers_to_remove = parse_connection_header_value( - connection_header[0] if connection_header else None - ) - - # Copy headers. - for k, v in response_headers.getAllRawHeaders(): - # Do not copy over any hop-by-hop headers. These are meant to only be - # consumed by the immediate recipient and not be forwarded on. - header_key = k.decode("ascii") - if ( - header_key in HOP_BY_HOP_HEADERS - or header_key in extra_headers_to_remove - ): - continue - - request.responseHeaders.setRawHeaders(k, v) - - response.deliverBody(_ProxyResponseBody(request)) - - def _send_error_response( - self, - f: failure.Failure, - request: "SynapseRequest", - ) -> None: - request.setResponseCode(502) - request.setHeader(b"Content-Type", b"application/json") - request.write( - ( - json.dumps( - { - "errcode": Codes.UNKNOWN, - "err": "ProxyResource: Error when proxying request: %s %s -> %s" - % ( - request.method.decode("ascii"), - request.uri.decode("ascii"), - f, - ), - } - ) - ).encode() - ) - request.finish() - - -class _ProxyResponseBody(protocol.Protocol): - """ - A protocol that proxies the given remote response data back out to the given local - request. - """ - - transport: Optional[ITCPTransport] = None - - def __init__(self, request: "SynapseRequest") -> None: - self._request = request - - def dataReceived(self, data: bytes) -> None: - # Avoid sending response data to the local request that already disconnected - if self._request._disconnected and self.transport is not None: - # Close the connection (forcefully) since all the data will get - # discarded anyway. - self.transport.abortConnection() - return - - self._request.write(data) - - def connectionLost(self, reason: Failure = connectionDone) -> None: - # If the local request is already finished (successfully or failed), don't - # worry about sending anything back. - if self._request.finished: - return - - if reason.check(ResponseDone): - self._request.finish() - else: - # Abort the underlying request since our remote request also failed. - self._request.transport.abortConnection() - - -class ProxySite(Site): - """ - Proxies any requests with a `matrix-federation://` scheme through the given - `federation_agent`. Otherwise, behaves like a normal `Site`. - """ - - def __init__( - self, - resource: IResource, - reactor: ISynapseReactor, - federation_agent: IAgent, - ): - super().__init__(resource, reactor=reactor) - - self._proxy_resource = ProxyResource(reactor, federation_agent) - - def getResourceFor(self, request: "SynapseRequest") -> IResource: - uri = urllib.parse.urlparse(request.uri) - if uri.scheme == b"matrix-federation": - return self._proxy_resource - - return super().getResourceFor(request) diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py index 1fa3adbef20c..7bdc4acae7da 100644 --- a/synapse/http/proxyagent.py +++ b/synapse/http/proxyagent.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import random import re -from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple +from typing import Any, Dict, Optional, Tuple from urllib.parse import urlparse from urllib.request import ( # type: ignore[attr-defined] getproxies_environment, @@ -25,12 +24,7 @@ from twisted.internet import defer from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS -from twisted.internet.interfaces import ( - IProtocol, - IProtocolFactory, - IReactorCore, - IStreamClientEndpoint, -) +from twisted.internet.interfaces import IReactorCore, IStreamClientEndpoint from twisted.python.failure import Failure from twisted.web.client import ( URI, @@ -42,10 +36,8 @@ from twisted.web.http_headers import Headers from twisted.web.iweb import IAgent, IBodyProducer, IPolicyForHTTPS, IResponse -from synapse.config.workers import InstanceLocationConfig from synapse.http import redact_uri from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint, ProxyCredentials -from synapse.logging.context import run_in_background logger = logging.getLogger(__name__) @@ -82,10 +74,6 @@ class ProxyAgent(_AgentBase): use_proxy: Whether proxy settings should be discovered and used from conventional environment variables. - federation_proxies: An optional list of locations to proxy outbound federation - traffic through (only requests that use the `matrix-federation://` scheme - will be proxied). - Raises: ValueError if use_proxy is set and the environment variables contain an invalid proxy specification. @@ -101,7 +89,6 @@ def __init__( bindAddress: Optional[bytes] = None, pool: Optional[HTTPConnectionPool] = None, use_proxy: bool = False, - federation_proxies: Collection[InstanceLocationConfig] = (), ): contextFactory = contextFactory or BrowserLikePolicyForHTTPS() @@ -140,27 +127,6 @@ def __init__( self._policy_for_https = contextFactory self._reactor = reactor - self._federation_proxy_endpoint: Optional[IStreamClientEndpoint] = None - if federation_proxies: - endpoints = [] - for federation_proxy in federation_proxies: - endpoint = HostnameEndpoint( - self.proxy_reactor, - federation_proxy.host, - federation_proxy.port, - ) - - if federation_proxy.tls: - tls_connection_creator = self._policy_for_https.creatorForNetloc( - federation_proxy.host, - federation_proxy.port, - ) - endpoint = wrapClientTLS(tls_connection_creator, endpoint) - - endpoints.append(endpoint) - - self._federation_proxy_endpoint = _ProxyEndpoints(endpoints) - def request( self, method: bytes, @@ -248,14 +214,6 @@ def request( parsed_uri.port, self.https_proxy_creds, ) - elif ( - parsed_uri.scheme == b"matrix-federation" - and self._federation_proxy_endpoint - ): - # Cache *all* connections under the same key, since we are only - # connecting to a single destination, the proxy: - endpoint = self._federation_proxy_endpoint - request_path = uri else: # not using a proxy endpoint = HostnameEndpoint( @@ -275,11 +233,6 @@ def request( endpoint = wrapClientTLS(tls_connection_creator, endpoint) elif parsed_uri.scheme == b"http": pass - elif ( - parsed_uri.scheme == b"matrix-federation" - and self._federation_proxy_endpoint - ): - pass else: return defer.fail( Failure( @@ -384,31 +337,3 @@ def parse_proxy( credentials = ProxyCredentials(b"".join([url.username, b":", url.password])) return url.scheme, url.hostname, url.port or default_port, credentials - - -@implementer(IStreamClientEndpoint) -class _ProxyEndpoints: - """An endpoint that randomly iterates through a given list of endpoints at - each connection attempt. - """ - - def __init__(self, endpoints: Sequence[IStreamClientEndpoint]) -> None: - assert endpoints - self._endpoints = endpoints - - def connect( - self, protocol_factory: IProtocolFactory - ) -> "defer.Deferred[IProtocol]": - """Implements IStreamClientEndpoint interface""" - - return run_in_background(self._do_connect, protocol_factory) - - async def _do_connect(self, protocol_factory: IProtocolFactory) -> IProtocol: - failures: List[Failure] = [] - for endpoint in random.sample(self._endpoints, k=len(self._endpoints)): - try: - return await endpoint.connect(protocol_factory) - except Exception: - failures.append(Failure()) - - failures.pop().raiseException() diff --git a/synapse/http/server.py b/synapse/http/server.py index ff3153a9d949..933172c87327 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -18,7 +18,6 @@ import logging import types import urllib -import urllib.parse from http import HTTPStatus from http.client import FOUND from inspect import isawaitable @@ -66,6 +65,7 @@ UnrecognizedRequestError, ) from synapse.config.homeserver import HomeServerConfig +from synapse.http.site import SynapseRequest from synapse.logging.context import defer_to_thread, preserve_fn, run_in_background from synapse.logging.opentracing import active_span, start_active_span, trace_servlet from synapse.util import json_encoder @@ -76,7 +76,6 @@ if TYPE_CHECKING: import opentracing - from synapse.http.site import SynapseRequest from synapse.server import HomeServer logger = logging.getLogger(__name__) @@ -103,7 +102,7 @@ def return_json_error( - f: failure.Failure, request: "SynapseRequest", config: Optional[HomeServerConfig] + f: failure.Failure, request: SynapseRequest, config: Optional[HomeServerConfig] ) -> None: """Sends a JSON error response to clients.""" @@ -221,8 +220,8 @@ def return_html_error( def wrap_async_request_handler( - h: Callable[["_AsyncResource", "SynapseRequest"], Awaitable[None]] -) -> Callable[["_AsyncResource", "SynapseRequest"], "defer.Deferred[None]"]: + h: Callable[["_AsyncResource", SynapseRequest], Awaitable[None]] +) -> Callable[["_AsyncResource", SynapseRequest], "defer.Deferred[None]"]: """Wraps an async request handler so that it calls request.processing. This helps ensure that work done by the request handler after the request is completed @@ -236,7 +235,7 @@ def wrap_async_request_handler( """ async def wrapped_async_request_handler( - self: "_AsyncResource", request: "SynapseRequest" + self: "_AsyncResource", request: SynapseRequest ) -> None: with request.processing(): await h(self, request) @@ -301,7 +300,7 @@ def __init__(self, extract_context: bool = False): self._extract_context = extract_context - def render(self, request: "SynapseRequest") -> int: + def render(self, request: SynapseRequest) -> int: """This gets called by twisted every time someone sends us a request.""" request.render_deferred = defer.ensureDeferred( self._async_render_wrapper(request) @@ -309,7 +308,7 @@ def render(self, request: "SynapseRequest") -> int: return NOT_DONE_YET @wrap_async_request_handler - async def _async_render_wrapper(self, request: "SynapseRequest") -> None: + async def _async_render_wrapper(self, request: SynapseRequest) -> None: """This is a wrapper that delegates to `_async_render` and handles exceptions, return values, metrics, etc. """ @@ -327,15 +326,9 @@ async def _async_render_wrapper(self, request: "SynapseRequest") -> None: # of our stack, and thus gives us a sensible stack # trace. f = failure.Failure() - logger.exception( - "Error handling request", - exc_info=(f.type, f.value, f.getTracebackObject()), - ) self._send_error_response(f, request) - async def _async_render( - self, request: "SynapseRequest" - ) -> Optional[Tuple[int, Any]]: + async def _async_render(self, request: SynapseRequest) -> Optional[Tuple[int, Any]]: """Delegates to `_async_render_` methods, or returns a 400 if no appropriate method exists. Can be overridden in sub classes for different routing. @@ -365,7 +358,7 @@ async def _async_render( @abc.abstractmethod def _send_response( self, - request: "SynapseRequest", + request: SynapseRequest, code: int, response_object: Any, ) -> None: @@ -375,7 +368,7 @@ def _send_response( def _send_error_response( self, f: failure.Failure, - request: "SynapseRequest", + request: SynapseRequest, ) -> None: raise NotImplementedError() @@ -391,7 +384,7 @@ def __init__(self, canonical_json: bool = False, extract_context: bool = False): def _send_response( self, - request: "SynapseRequest", + request: SynapseRequest, code: int, response_object: Any, ) -> None: @@ -408,7 +401,7 @@ def _send_response( def _send_error_response( self, f: failure.Failure, - request: "SynapseRequest", + request: SynapseRequest, ) -> None: """Implements _AsyncResource._send_error_response""" return_json_error(f, request, None) @@ -480,7 +473,7 @@ def register_paths( ) def _get_handler_for_request( - self, request: "SynapseRequest" + self, request: SynapseRequest ) -> Tuple[ServletCallback, str, Dict[str, str]]: """Finds a callback method to handle the given request. @@ -510,7 +503,7 @@ def _get_handler_for_request( # Huh. No one wanted to handle that? Fiiiiiine. raise UnrecognizedRequestError(code=404) - async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]: + async def _async_render(self, request: SynapseRequest) -> Tuple[int, Any]: callback, servlet_classname, group_dict = self._get_handler_for_request(request) request.is_render_cancellable = is_function_cancellable(callback) @@ -542,7 +535,7 @@ async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]: def _send_error_response( self, f: failure.Failure, - request: "SynapseRequest", + request: SynapseRequest, ) -> None: """Implements _AsyncResource._send_error_response""" return_json_error(f, request, self.hs.config) @@ -558,7 +551,7 @@ class DirectServeHtmlResource(_AsyncResource): def _send_response( self, - request: "SynapseRequest", + request: SynapseRequest, code: int, response_object: Any, ) -> None: @@ -572,7 +565,7 @@ def _send_response( def _send_error_response( self, f: failure.Failure, - request: "SynapseRequest", + request: SynapseRequest, ) -> None: """Implements _AsyncResource._send_error_response""" return_html_error(f, request, self.ERROR_TEMPLATE) @@ -599,7 +592,7 @@ class UnrecognizedRequestResource(resource.Resource): errcode of M_UNRECOGNIZED. """ - def render(self, request: "SynapseRequest") -> int: + def render(self, request: SynapseRequest) -> int: f = failure.Failure(UnrecognizedRequestError(code=404)) return_json_error(f, request, None) # A response has already been sent but Twisted requires either NOT_DONE_YET @@ -629,7 +622,7 @@ def getChild(self, name: str, request: Request) -> resource.Resource: class OptionsResource(resource.Resource): """Responds to OPTION requests for itself and all children.""" - def render_OPTIONS(self, request: "SynapseRequest") -> bytes: + def render_OPTIONS(self, request: SynapseRequest) -> bytes: request.setResponseCode(204) request.setHeader(b"Content-Length", b"0") @@ -744,7 +737,7 @@ def _encode_json_bytes(json_object: object) -> bytes: def respond_with_json( - request: "SynapseRequest", + request: SynapseRequest, code: int, json_object: Any, send_cors: bool = False, @@ -794,7 +787,7 @@ def respond_with_json( def respond_with_json_bytes( - request: "SynapseRequest", + request: SynapseRequest, code: int, json_bytes: bytes, send_cors: bool = False, @@ -832,7 +825,7 @@ def respond_with_json_bytes( async def _async_write_json_to_request_in_thread( - request: "SynapseRequest", + request: SynapseRequest, json_encoder: Callable[[Any], bytes], json_object: Any, ) -> None: @@ -890,7 +883,7 @@ def _write_bytes_to_request(request: Request, bytes_to_write: bytes) -> None: _ByteProducer(request, bytes_generator) -def set_cors_headers(request: "SynapseRequest") -> None: +def set_cors_headers(request: SynapseRequest) -> None: """Set the CORS headers so that javascript running in a web browsers can use this API @@ -988,7 +981,7 @@ def set_clickjacking_protection_headers(request: Request) -> None: def respond_with_redirect( - request: "SynapseRequest", url: bytes, statusCode: int = FOUND, cors: bool = False + request: SynapseRequest, url: bytes, statusCode: int = FOUND, cors: bool = False ) -> None: """ Write a 302 (or other specified status code) response to the request, if it is still alive. diff --git a/synapse/http/site.py b/synapse/http/site.py index 0ee259834559..5b5a7c1e598f 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -21,28 +21,25 @@ from twisted.internet.address import UNIXAddress from twisted.internet.defer import Deferred -from twisted.internet.interfaces import IAddress +from twisted.internet.interfaces import IAddress, IReactorTime from twisted.python.failure import Failure from twisted.web.http import HTTPChannel -from twisted.web.iweb import IAgent from twisted.web.resource import IResource, Resource -from twisted.web.server import Request +from twisted.web.server import Request, Site from synapse.config.server import ListenerConfig from synapse.http import get_request_user_agent, redact_uri -from synapse.http.proxy import ProxySite from synapse.http.request_metrics import RequestMetrics, requests_counter from synapse.logging.context import ( ContextRequest, LoggingContext, PreserveLoggingContext, ) -from synapse.types import ISynapseReactor, Requester +from synapse.types import Requester if TYPE_CHECKING: import opentracing - logger = logging.getLogger(__name__) _next_request_seq = 0 @@ -105,7 +102,7 @@ def __init__( # A boolean indicating whether `render_deferred` should be cancelled if the # client disconnects early. Expected to be set by the coroutine started by # `Resource.render`, if rendering is asynchronous. - self.is_render_cancellable: bool = False + self.is_render_cancellable = False global _next_request_seq self.request_seq = _next_request_seq @@ -604,7 +601,7 @@ class _XForwardedForAddress: host: str -class SynapseSite(ProxySite): +class SynapseSite(Site): """ Synapse-specific twisted http Site @@ -626,8 +623,7 @@ def __init__( resource: IResource, server_version_string: str, max_request_body_size: int, - reactor: ISynapseReactor, - federation_agent: IAgent, + reactor: IReactorTime, ): """ @@ -642,11 +638,7 @@ def __init__( dropping the connection reactor: reactor to be used to manage connection timeouts """ - super().__init__( - resource=resource, - reactor=reactor, - federation_agent=federation_agent, - ) + Site.__init__(self, resource, reactor=reactor) self.site_tag = site_tag self.reactor = reactor @@ -657,9 +649,7 @@ def __init__( request_id_header = config.http_options.request_id_header - self.experimental_cors_msc3886: bool = ( - config.http_options.experimental_cors_msc3886 - ) + self.experimental_cors_msc3886 = config.http_options.experimental_cors_msc3886 def request_factory(channel: HTTPChannel, queued: bool) -> Request: return request_class( diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py index 21c530974083..5a965f233b05 100644 --- a/tests/app/test_openid_listener.py +++ b/tests/app/test_openid_listener.py @@ -31,7 +31,9 @@ class FederationReaderOpenIDListenerTests(HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver(homeserver_to_use=GenericWorkerServer) + hs = self.setup_test_homeserver( + federation_http_client=None, homeserver_to_use=GenericWorkerServer + ) return hs def default_config(self) -> JsonDict: @@ -89,7 +91,9 @@ def test_openid_listener(self, names: List[str], expectation: str) -> None: @patch("synapse.app.homeserver.KeyResource", new=Mock()) class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver(homeserver_to_use=SynapseHomeServer) + hs = self.setup_test_homeserver( + federation_http_client=None, homeserver_to_use=SynapseHomeServer + ) return hs @parameterized.expand( diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index 66215af2b890..ee48f9e546e8 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -41,6 +41,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.appservice_api = mock.Mock() hs = self.setup_test_homeserver( "server", + federation_http_client=None, application_service_api=self.appservice_api, ) handler = hs.get_device_handler() @@ -400,7 +401,7 @@ def test_on_federation_query_user_devices_appservice(self) -> None: class DehydrationTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver("server") + hs = self.setup_test_homeserver("server", federation_http_client=None) handler = hs.get_device_handler() assert isinstance(handler, DeviceHandler) self.handler = handler diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index 5f11d5df11ad..bf0862ed541c 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -57,7 +57,7 @@ class FederationTestCase(unittest.FederatingHomeserverTestCase): ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver() + hs = self.setup_test_homeserver(federation_http_client=None) self.handler = hs.get_federation_handler() self.store = hs.get_datastores().main return hs diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index fd66d573d221..19f5322317a1 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -993,6 +993,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs = self.setup_test_homeserver( "server", + federation_http_client=None, federation_sender=Mock(spec=FederationSender), ) return hs diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 5da1d95f0b22..94518a7196b4 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -17,8 +17,6 @@ from typing import Dict, List, Set from unittest.mock import ANY, Mock, call -from netaddr import IPSet - from twisted.test.proto_helpers import MemoryReactor from twisted.web.resource import Resource @@ -26,7 +24,6 @@ from synapse.api.errors import AuthError from synapse.federation.transport.server import TransportLayerServer from synapse.handlers.typing import TypingWriterHandler -from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent from synapse.server import HomeServer from synapse.types import JsonDict, Requester, UserID, create_requester from synapse.util import Clock @@ -79,13 +76,6 @@ def make_homeserver( # we mock out the federation client too self.mock_federation_client = Mock(spec=["put_json"]) self.mock_federation_client.put_json.return_value = make_awaitable((200, "OK")) - self.mock_federation_client.agent = MatrixFederationAgent( - reactor, - tls_client_options_factory=None, - user_agent=b"SynapseInTrialTest/0.0.0", - ip_allowlist=None, - ip_blocklist=IPSet(), - ) # the tests assume that we are starting at unix time 1000 reactor.pump((1000,)) diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index a8b9737d1fb7..b5f4a60fe500 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -11,8 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, Generator -from unittest.mock import ANY, Mock, create_autospec +from typing import Generator +from unittest.mock import Mock from netaddr import IPSet from parameterized import parameterized @@ -21,11 +21,10 @@ from twisted.internet.defer import Deferred, TimeoutError from twisted.internet.error import ConnectingCancelledError, DNSLookupError from twisted.test.proto_helpers import MemoryReactor, StringTransport -from twisted.web.client import Agent, ResponseNeverReceived +from twisted.web.client import ResponseNeverReceived from twisted.web.http import HTTPChannel -from twisted.web.http_headers import Headers -from synapse.api.errors import HttpResponseException, RequestSendFailed +from synapse.api.errors import RequestSendFailed from synapse.http.matrixfederationclient import ( ByteParser, MatrixFederationHttpClient, @@ -40,9 +39,7 @@ from synapse.server import HomeServer from synapse.util import Clock -from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.server import FakeTransport -from tests.test_utils import FakeResponse from tests.unittest import HomeserverTestCase, override_config @@ -661,181 +658,3 @@ def test_configurable_retry_and_delay_values(self) -> None: self.assertEqual(self.cl.max_short_retry_delay_seconds, 7) self.assertEqual(self.cl.max_long_retries, 20) self.assertEqual(self.cl.max_short_retries, 5) - - -class FederationClientProxyTests(BaseMultiWorkerStreamTestCase): - def default_config(self) -> Dict[str, Any]: - conf = super().default_config() - conf["instance_map"] = { - "main": {"host": "testserv", "port": 8765}, - "federation_sender": {"host": "testserv", "port": 1001}, - } - return conf - - @override_config({"outbound_federation_restricted_to": ["federation_sender"]}) - def test_proxy_requests_through_federation_sender_worker(self) -> None: - """ - Test that all outbound federation requests go through the `federation_sender` - worker - """ - # Mock out the `MatrixFederationHttpClient` of the `federation_sender` instance - # so we can act like some remote server responding to requests - mock_client_on_federation_sender = Mock() - mock_agent_on_federation_sender = create_autospec(Agent, spec_set=True) - mock_client_on_federation_sender.agent = mock_agent_on_federation_sender - - # Create the `federation_sender` worker - self.federation_sender = self.make_worker_hs( - "synapse.app.generic_worker", - {"worker_name": "federation_sender"}, - federation_http_client=mock_client_on_federation_sender, - ) - - # Fake `remoteserv:8008` responding to requests - mock_agent_on_federation_sender.request.side_effect = ( - lambda *args, **kwargs: defer.succeed( - FakeResponse.json( - payload={ - "foo": "bar", - } - ) - ) - ) - - # This federation request from the main process should be proxied through the - # `federation_sender` worker off to the remote server - test_request_from_main_process_d = defer.ensureDeferred( - self.hs.get_federation_http_client().get_json("remoteserv:8008", "foo/bar") - ) - - # Pump the reactor so our deferred goes through the motions - self.pump() - - # Make sure that the request was proxied through the `federation_sender` worker - mock_agent_on_federation_sender.request.assert_called_once_with( - b"GET", - b"matrix-federation://remoteserv:8008/foo/bar", - headers=ANY, - bodyProducer=ANY, - ) - - # Make sure the response is as expected back on the main worker - res = self.successResultOf(test_request_from_main_process_d) - self.assertEqual(res, {"foo": "bar"}) - - @override_config({"outbound_federation_restricted_to": ["federation_sender"]}) - def test_proxy_request_with_network_error_through_federation_sender_worker( - self, - ) -> None: - """ - Test that when the outbound federation request fails with a network related - error, a sensible error makes its way back to the main process. - """ - # Mock out the `MatrixFederationHttpClient` of the `federation_sender` instance - # so we can act like some remote server responding to requests - mock_client_on_federation_sender = Mock() - mock_agent_on_federation_sender = create_autospec(Agent, spec_set=True) - mock_client_on_federation_sender.agent = mock_agent_on_federation_sender - - # Create the `federation_sender` worker - self.federation_sender = self.make_worker_hs( - "synapse.app.generic_worker", - {"worker_name": "federation_sender"}, - federation_http_client=mock_client_on_federation_sender, - ) - - # Fake `remoteserv:8008` responding to requests - mock_agent_on_federation_sender.request.side_effect = ( - lambda *args, **kwargs: defer.fail(ResponseNeverReceived("fake error")) - ) - - # This federation request from the main process should be proxied through the - # `federation_sender` worker off to the remote server - test_request_from_main_process_d = defer.ensureDeferred( - self.hs.get_federation_http_client().get_json("remoteserv:8008", "foo/bar") - ) - - # Pump the reactor so our deferred goes through the motions. We pump with 10 - # seconds (0.1 * 100) so the `MatrixFederationHttpClient` runs out of retries - # and finally passes along the error response. - self.pump(0.1) - - # Make sure that the request was proxied through the `federation_sender` worker - mock_agent_on_federation_sender.request.assert_called_with( - b"GET", - b"matrix-federation://remoteserv:8008/foo/bar", - headers=ANY, - bodyProducer=ANY, - ) - - # Make sure we get some sort of error back on the main worker - failure_res = self.failureResultOf(test_request_from_main_process_d) - self.assertIsInstance(failure_res.value, RequestSendFailed) - self.assertIsInstance(failure_res.value.inner_exception, HttpResponseException) - - @override_config({"outbound_federation_restricted_to": ["federation_sender"]}) - def test_proxy_requests_and_discards_hop_by_hop_headers(self) -> None: - """ - Test to make sure hop-by-hop headers and addional headers defined in the - `Connection` header are discarded when proxying requests - """ - # Mock out the `MatrixFederationHttpClient` of the `federation_sender` instance - # so we can act like some remote server responding to requests - mock_client_on_federation_sender = Mock() - mock_agent_on_federation_sender = create_autospec(Agent, spec_set=True) - mock_client_on_federation_sender.agent = mock_agent_on_federation_sender - - # Create the `federation_sender` worker - self.federation_sender = self.make_worker_hs( - "synapse.app.generic_worker", - {"worker_name": "federation_sender"}, - federation_http_client=mock_client_on_federation_sender, - ) - - # Fake `remoteserv:8008` responding to requests - mock_agent_on_federation_sender.request.side_effect = lambda *args, **kwargs: defer.succeed( - FakeResponse( - code=200, - body=b'{"foo": "bar"}', - headers=Headers( - { - "Content-Type": ["application/json"], - "Connection": ["close, X-Foo, X-Bar"], - # Should be removed because it's defined in the `Connection` header - "X-Foo": ["foo"], - "X-Bar": ["bar"], - # Should be removed because it's a hop-by-hop header - "Proxy-Authorization": "abcdef", - } - ), - ) - ) - - # This federation request from the main process should be proxied through the - # `federation_sender` worker off to the remote server - test_request_from_main_process_d = defer.ensureDeferred( - self.hs.get_federation_http_client().get_json_with_headers( - "remoteserv:8008", "foo/bar" - ) - ) - - # Pump the reactor so our deferred goes through the motions - self.pump() - - # Make sure that the request was proxied through the `federation_sender` worker - mock_agent_on_federation_sender.request.assert_called_once_with( - b"GET", - b"matrix-federation://remoteserv:8008/foo/bar", - headers=ANY, - bodyProducer=ANY, - ) - - res, headers = self.successResultOf(test_request_from_main_process_d) - header_names = set(headers.keys()) - - # Make sure the response does not include the hop-by-hop headers - self.assertNotIn(b"X-Foo", header_names) - self.assertNotIn(b"X-Bar", header_names) - self.assertNotIn(b"Proxy-Authorization", header_names) - # Make sure the response is as expected back on the main worker - self.assertEqual(res, {"foo": "bar"}) diff --git a/tests/http/test_proxy.py b/tests/http/test_proxy.py deleted file mode 100644 index 0dc9ba8e0582..000000000000 --- a/tests/http/test_proxy.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2023 The Matrix.org Foundation C.I.C. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Set - -from parameterized import parameterized - -from synapse.http.proxy import parse_connection_header_value - -from tests.unittest import TestCase - - -class ProxyTests(TestCase): - @parameterized.expand( - [ - [b"close, X-Foo, X-Bar", {"Close", "X-Foo", "X-Bar"}], - # No whitespace - [b"close,X-Foo,X-Bar", {"Close", "X-Foo", "X-Bar"}], - # More whitespace - [b"close, X-Foo, X-Bar", {"Close", "X-Foo", "X-Bar"}], - # "close" directive in not the first position - [b"X-Foo, X-Bar, close", {"X-Foo", "X-Bar", "Close"}], - # Normalizes header capitalization - [b"keep-alive, x-fOo, x-bAr", {"Keep-Alive", "X-Foo", "X-Bar"}], - # Handles header names with whitespace - [ - b"keep-alive, x foo, x bar", - {"Keep-Alive", "X foo", "X bar"}, - ], - ] - ) - def test_parse_connection_header_value( - self, - connection_header_value: bytes, - expected_extra_headers_to_remove: Set[str], - ) -> None: - """ - Tests that the connection header value is parsed correctly - """ - self.assertEqual( - expected_extra_headers_to_remove, - parse_connection_header_value(connection_header_value), - ) diff --git a/tests/replication/_base.py b/tests/replication/_base.py index 96badc46b04d..eb9b1f1cd9be 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -69,10 +69,10 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # Make a new HomeServer object for the worker self.reactor.lookups["testserv"] = "1.2.3.4" self.worker_hs = self.setup_test_homeserver( + federation_http_client=None, homeserver_to_use=GenericWorkerServer, config=self._get_worker_hs_config(), reactor=self.reactor, - federation_http_client=None, ) # Since we use sqlite in memory databases we need to make sure the @@ -380,7 +380,6 @@ def make_worker_hs( server_version_string="1", max_request_body_size=8192, reactor=self.reactor, - federation_agent=worker_hs.get_federation_http_client().agent, ) worker_hs.get_replication_command_handler().start_replication(worker_hs) diff --git a/tests/replication/test_federation_sender_shard.py b/tests/replication/test_federation_sender_shard.py index a324b4d31dde..08703206a9c9 100644 --- a/tests/replication/test_federation_sender_shard.py +++ b/tests/replication/test_federation_sender_shard.py @@ -14,18 +14,14 @@ import logging from unittest.mock import Mock -from netaddr import IPSet - from synapse.api.constants import EventTypes, Membership from synapse.events.builder import EventBuilderFactory from synapse.handlers.typing import TypingWriterHandler -from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent from synapse.rest.admin import register_servlets_for_client_rest_resource from synapse.rest.client import login, room from synapse.types import UserID, create_requester from tests.replication._base import BaseMultiWorkerStreamTestCase -from tests.server import get_clock from tests.test_utils import make_awaitable logger = logging.getLogger(__name__) @@ -45,25 +41,13 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase): room.register_servlets, ] - def setUp(self) -> None: - super().setUp() - - reactor, _ = get_clock() - self.matrix_federation_agent = MatrixFederationAgent( - reactor, - tls_client_options_factory=None, - user_agent=b"SynapseInTrialTest/0.0.0", - ip_allowlist=None, - ip_blocklist=IPSet(), - ) - def test_send_event_single_sender(self) -> None: """Test that using a single federation sender worker correctly sends a new event. """ mock_client = Mock(spec=["put_json"]) mock_client.put_json.return_value = make_awaitable({}) - mock_client.agent = self.matrix_federation_agent + self.make_worker_hs( "synapse.app.generic_worker", { @@ -94,7 +78,6 @@ def test_send_event_sharded(self) -> None: """ mock_client1 = Mock(spec=["put_json"]) mock_client1.put_json.return_value = make_awaitable({}) - mock_client1.agent = self.matrix_federation_agent self.make_worker_hs( "synapse.app.generic_worker", { @@ -109,7 +92,6 @@ def test_send_event_sharded(self) -> None: mock_client2 = Mock(spec=["put_json"]) mock_client2.put_json.return_value = make_awaitable({}) - mock_client2.agent = self.matrix_federation_agent self.make_worker_hs( "synapse.app.generic_worker", { @@ -163,7 +145,6 @@ def test_send_typing_sharded(self) -> None: """ mock_client1 = Mock(spec=["put_json"]) mock_client1.put_json.return_value = make_awaitable({}) - mock_client1.agent = self.matrix_federation_agent self.make_worker_hs( "synapse.app.generic_worker", { @@ -178,7 +159,6 @@ def test_send_typing_sharded(self) -> None: mock_client2 = Mock(spec=["put_json"]) mock_client2.put_json.return_value = make_awaitable({}) - mock_client2.agent = self.matrix_federation_agent self.make_worker_hs( "synapse.app.generic_worker", { diff --git a/tests/rest/client/test_presence.py b/tests/rest/client/test_presence.py index e12098102b96..dcbb125a3bba 100644 --- a/tests/rest/client/test_presence.py +++ b/tests/rest/client/test_presence.py @@ -40,6 +40,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs = self.setup_test_homeserver( "red", + federation_http_client=None, federation_client=Mock(), presence_handler=self.presence_handler, ) diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index d013e75d55d7..f1b4e1ad2fc1 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -67,6 +67,8 @@ class RoomBase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver( "red", + federation_http_client=None, + federation_client=Mock(), ) self.hs.get_federation_handler = Mock() # type: ignore[assignment] diff --git a/tests/storage/test_e2e_room_keys.py b/tests/storage/test_e2e_room_keys.py index f6df31aba4ca..9cb326d90a87 100644 --- a/tests/storage/test_e2e_room_keys.py +++ b/tests/storage/test_e2e_room_keys.py @@ -31,7 +31,7 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver("server") + hs = self.setup_test_homeserver("server", federation_http_client=None) self.store = hs.get_datastores().main return hs diff --git a/tests/storage/test_purge.py b/tests/storage/test_purge.py index 02826731679e..857e2caf2e24 100644 --- a/tests/storage/test_purge.py +++ b/tests/storage/test_purge.py @@ -27,7 +27,7 @@ class PurgeTests(HomeserverTestCase): servlets = [room.register_servlets] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver("server") + hs = self.setup_test_homeserver("server", federation_http_client=None) return hs def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: diff --git a/tests/storage/test_rollback_worker.py b/tests/storage/test_rollback_worker.py index 809c9f175d42..6861d3a6c9e5 100644 --- a/tests/storage/test_rollback_worker.py +++ b/tests/storage/test_rollback_worker.py @@ -45,7 +45,9 @@ def fake_listdir(filepath: str) -> List[str]: class WorkerSchemaTests(HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - hs = self.setup_test_homeserver(homeserver_to_use=GenericWorkerServer) + hs = self.setup_test_homeserver( + federation_http_client=None, homeserver_to_use=GenericWorkerServer + ) return hs def default_config(self) -> JsonDict: diff --git a/tests/test_server.py b/tests/test_server.py index fe5afebdcda4..e266c06a2cbf 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -38,7 +38,7 @@ from tests.server import ( FakeChannel, FakeSite, - get_clock, + ThreadedMemoryReactorClock, make_request, setup_test_homeserver, ) @@ -46,11 +46,12 @@ class JsonResourceTests(unittest.TestCase): def setUp(self) -> None: - reactor, clock = get_clock() - self.reactor = reactor + self.reactor = ThreadedMemoryReactorClock() + self.hs_clock = Clock(self.reactor) self.homeserver = setup_test_homeserver( self.addCleanup, - clock=clock, + federation_http_client=None, + clock=self.hs_clock, reactor=self.reactor, ) @@ -208,13 +209,7 @@ def _callback( class OptionsResourceTests(unittest.TestCase): def setUp(self) -> None: - reactor, clock = get_clock() - self.reactor = reactor - self.homeserver = setup_test_homeserver( - self.addCleanup, - clock=clock, - reactor=self.reactor, - ) + self.reactor = ThreadedMemoryReactorClock() class DummyResource(Resource): isLeaf = True @@ -247,7 +242,6 @@ def _make_request( "1.0", max_request_body_size=4096, reactor=self.reactor, - federation_agent=self.homeserver.get_federation_http_client().agent, ) # render the request and return the channel @@ -350,8 +344,7 @@ async def _async_render_GET(self, request: SynapseRequest) -> None: await self.callback(request) def setUp(self) -> None: - reactor, _ = get_clock() - self.reactor = reactor + self.reactor = ThreadedMemoryReactorClock() def test_good_response(self) -> None: async def callback(request: SynapseRequest) -> None: @@ -469,9 +462,9 @@ class DirectServeJsonResourceCancellationTests(unittest.TestCase): """Tests for `DirectServeJsonResource` cancellation.""" def setUp(self) -> None: - reactor, clock = get_clock() - self.reactor = reactor - self.resource = CancellableDirectServeJsonResource(clock) + self.reactor = ThreadedMemoryReactorClock() + self.clock = Clock(self.reactor) + self.resource = CancellableDirectServeJsonResource(self.clock) self.site = FakeSite(self.resource, self.reactor) def test_cancellable_disconnect(self) -> None: @@ -503,9 +496,9 @@ class DirectServeHtmlResourceCancellationTests(unittest.TestCase): """Tests for `DirectServeHtmlResource` cancellation.""" def setUp(self) -> None: - reactor, clock = get_clock() - self.reactor = reactor - self.resource = CancellableDirectServeHtmlResource(clock) + self.reactor = ThreadedMemoryReactorClock() + self.clock = Clock(self.reactor) + self.resource = CancellableDirectServeHtmlResource(self.clock) self.site = FakeSite(self.resource, self.reactor) def test_cancellable_disconnect(self) -> None: diff --git a/tests/unittest.py b/tests/unittest.py index 334a95a9175d..c73195b32b00 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -358,7 +358,6 @@ def setUp(self) -> None: server_version_string="1", max_request_body_size=4096, reactor=self.reactor, - federation_agent=self.hs.get_federation_http_client().agent, ) from tests.rest.client.utils import RestHelper From 5e82b07d2c79df9623e5567f935e74395c2e5492 Mon Sep 17 00:00:00 2001 From: Shay Date: Mon, 10 Jul 2023 10:39:36 -0700 Subject: [PATCH 146/153] Drop debian buster (#15893) --- .github/workflows/twisted_trunk.yml | 6 +++++- changelog.d/15893.misc | 1 + docs/deprecation_policy.md | 2 +- docs/development/contributing_guide.md | 2 +- scripts-dev/build_debian_packages.py | 1 - 5 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15893.misc diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 55081f8133b2..f7a4ee7c1375 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -96,7 +96,11 @@ jobs: if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest container: - image: matrixdotorg/sytest-synapse:buster + # We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version. + # This job is a canary to warn us about unreleased twisted changes that would cause problems for us if + # they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest + # version, assuming that any incompatibilities on newer versions would also be present on the oldest. + image: matrixdotorg/sytest-synapse:focal volumes: - ${{ github.workspace }}:/src diff --git a/changelog.d/15893.misc b/changelog.d/15893.misc new file mode 100644 index 000000000000..656d73b23120 --- /dev/null +++ b/changelog.d/15893.misc @@ -0,0 +1 @@ +Drop Debian Buster since we no longer support Python 3.7. diff --git a/docs/deprecation_policy.md b/docs/deprecation_policy.md index 46c18d7d3205..840366485072 100644 --- a/docs/deprecation_policy.md +++ b/docs/deprecation_policy.md @@ -23,7 +23,7 @@ people building from source should ensure they can fetch recent versions of Rust (e.g. by using [rustup](https://rustup.rs/)). The oldest supported version of SQLite is the version -[provided](https://packages.debian.org/buster/libsqlite3-0) by +[provided](https://packages.debian.org/bullseye/libsqlite3-0) by [Debian oldstable](https://wiki.debian.org/DebianOldStable). Context diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md index f5ba55afb7a3..e9210b177695 100644 --- a/docs/development/contributing_guide.md +++ b/docs/development/contributing_guide.md @@ -322,7 +322,7 @@ The following command will let you run the integration test with the most common configuration: ```sh -$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster +$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:focal ``` (Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.) diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py index 4c9f134ddd83..8fe10f2cb57f 100755 --- a/scripts-dev/build_debian_packages.py +++ b/scripts-dev/build_debian_packages.py @@ -23,7 +23,6 @@ # These are expanded inside the dockerfile to be a fully qualified image name. # e.g. docker.io/library/debian:bullseye DISTS = ( - "debian:buster", # oldstable: EOL 2022-08 "debian:bullseye", "debian:bookworm", "debian:sid", From 2328e90fbb65216ff84a08834d3cd99573bccdff Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 10 Jul 2023 17:23:11 -0500 Subject: [PATCH 147/153] Make the media `/upload` tracing less ambiguous (#15888) A lot of the functions have the same name in this space like `store_file`, and we also do it multiple times for different reasons (main media repo, other storage providers, thumbnails, etc) so it's good to differentiate them so your head doesn't explode. Follow-up to https://github.com/matrix-org/synapse/pull/15850 Tracing instrumentation to media `/upload` code paths to investigate https://github.com/matrix-org/synapse/issues/15841 --- changelog.d/15888.misc | 1 + synapse/media/media_storage.py | 78 +++++++++++++++++++------------ synapse/media/storage_provider.py | 25 +++++----- 3 files changed, 62 insertions(+), 42 deletions(-) create mode 100644 changelog.d/15888.misc diff --git a/changelog.d/15888.misc b/changelog.d/15888.misc new file mode 100644 index 000000000000..0e49ab23fed3 --- /dev/null +++ b/changelog.d/15888.misc @@ -0,0 +1 @@ +Add tracing to media `/upload` code paths. diff --git a/synapse/media/media_storage.py b/synapse/media/media_storage.py index eebcbc48e8c9..a17ccb3d801c 100644 --- a/synapse/media/media_storage.py +++ b/synapse/media/media_storage.py @@ -38,7 +38,7 @@ from synapse.api.errors import NotFoundError from synapse.logging.context import defer_to_thread, make_deferred_yieldable -from synapse.logging.opentracing import trace +from synapse.logging.opentracing import start_active_span, trace, trace_with_opname from synapse.util import Clock from synapse.util.file_consumer import BackgroundFileConsumer @@ -77,7 +77,7 @@ def __init__( self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.clock = hs.get_clock() - @trace + @trace_with_opname("MediaStorage.store_file") async def store_file(self, source: IO, file_info: FileInfo) -> str: """Write `source` to the on disk media store, and also any other configured storage providers @@ -91,18 +91,19 @@ async def store_file(self, source: IO, file_info: FileInfo) -> str: """ with self.store_into_file(file_info) as (f, fname, finish_cb): - # Write to the main repository + # Write to the main media repository await self.write_to_file(source, f) + # Write to the other storage providers await finish_cb() return fname - @trace + @trace_with_opname("MediaStorage.write_to_file") async def write_to_file(self, source: IO, output: IO) -> None: """Asynchronously write the `source` to `output`.""" await defer_to_thread(self.reactor, _write_file_synchronously, source, output) - @trace + @trace_with_opname("MediaStorage.store_into_file") @contextlib.contextmanager def store_into_file( self, file_info: FileInfo @@ -117,9 +118,9 @@ def store_into_file( fname can be used to read the contents from after upload, e.g. to generate thumbnails. - finish_cb must be called and waited on after the file has been - successfully been written to. Should not be called if there was an - error. + finish_cb must be called and waited on after the file has been successfully been + written to. Should not be called if there was an error. Checks for spam and + stores the file into the configured storage providers. Args: file_info: Info about the file to store @@ -139,35 +140,48 @@ def store_into_file( finished_called = [False] + main_media_repo_write_trace_scope = start_active_span( + "writing to main media repo" + ) + main_media_repo_write_trace_scope.__enter__() + try: with open(fname, "wb") as f: async def finish() -> None: - # Ensure that all writes have been flushed and close the - # file. - f.flush() - f.close() - - spam_check = await self._spam_checker_module_callbacks.check_media_file_for_spam( - ReadableFileWrapper(self.clock, fname), file_info - ) - if spam_check != self._spam_checker_module_callbacks.NOT_SPAM: - logger.info("Blocking media due to spam checker") - # Note that we'll delete the stored media, due to the - # try/except below. The media also won't be stored in - # the DB. - # We currently ignore any additional field returned by - # the spam-check API. - raise SpamMediaException(errcode=spam_check[0]) - - for provider in self.storage_providers: - await provider.store_file(path, file_info) - - finished_called[0] = True + # When someone calls finish, we assume they are done writing to the main media repo + main_media_repo_write_trace_scope.__exit__(None, None, None) + + with start_active_span("writing to other storage providers"): + # Ensure that all writes have been flushed and close the + # file. + f.flush() + f.close() + + spam_check = await self._spam_checker_module_callbacks.check_media_file_for_spam( + ReadableFileWrapper(self.clock, fname), file_info + ) + if spam_check != self._spam_checker_module_callbacks.NOT_SPAM: + logger.info("Blocking media due to spam checker") + # Note that we'll delete the stored media, due to the + # try/except below. The media also won't be stored in + # the DB. + # We currently ignore any additional field returned by + # the spam-check API. + raise SpamMediaException(errcode=spam_check[0]) + + for provider in self.storage_providers: + with start_active_span(str(provider)): + await provider.store_file(path, file_info) + + finished_called[0] = True yield f, fname, finish except Exception as e: try: + main_media_repo_write_trace_scope.__exit__( + type(e), None, e.__traceback__ + ) os.remove(fname) except Exception: pass @@ -175,7 +189,11 @@ async def finish() -> None: raise e from None if not finished_called: - raise Exception("Finished callback not called") + exc = Exception("Finished callback not called") + main_media_repo_write_trace_scope.__exit__( + type(exc), None, exc.__traceback__ + ) + raise exc async def fetch_media(self, file_info: FileInfo) -> Optional[Responder]: """Attempts to fetch media described by file_info from the local cache diff --git a/synapse/media/storage_provider.py b/synapse/media/storage_provider.py index 0aea3a7a0d87..70a45cfd5b78 100644 --- a/synapse/media/storage_provider.py +++ b/synapse/media/storage_provider.py @@ -20,7 +20,7 @@ from synapse.config._base import Config from synapse.logging.context import defer_to_thread, run_in_background -from synapse.logging.opentracing import trace +from synapse.logging.opentracing import start_active_span, trace_with_opname from synapse.util.async_helpers import maybe_awaitable from ._base import FileInfo, Responder @@ -87,7 +87,7 @@ def __init__( def __str__(self) -> str: return "StorageProviderWrapper[%s]" % (self.backend,) - @trace + @trace_with_opname("StorageProviderWrapper.store_file") async def store_file(self, path: str, file_info: FileInfo) -> None: if not file_info.server_name and not self.store_local: return None @@ -116,7 +116,7 @@ async def store() -> None: run_in_background(store) - @trace + @trace_with_opname("StorageProviderWrapper.fetch") async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]: if file_info.url_cache: # Files in the URL preview cache definitely aren't stored here, @@ -144,7 +144,7 @@ def __init__(self, hs: "HomeServer", config: str): def __str__(self) -> str: return "FileStorageProviderBackend[%s]" % (self.base_directory,) - @trace + @trace_with_opname("FileStorageProviderBackend.store_file") async def store_file(self, path: str, file_info: FileInfo) -> None: """See StorageProvider.store_file""" @@ -156,14 +156,15 @@ async def store_file(self, path: str, file_info: FileInfo) -> None: # mypy needs help inferring the type of the second parameter, which is generic shutil_copyfile: Callable[[str, str], str] = shutil.copyfile - await defer_to_thread( - self.hs.get_reactor(), - shutil_copyfile, - primary_fname, - backup_fname, - ) - - @trace + with start_active_span("shutil_copyfile"): + await defer_to_thread( + self.hs.get_reactor(), + shutil_copyfile, + primary_fname, + backup_fname, + ) + + @trace_with_opname("FileStorageProviderBackend.fetch") async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]: """See StorageProvider.fetch""" From b516d919995f3bf36045263376628ff0aa298095 Mon Sep 17 00:00:00 2001 From: Michael Telatynski <7t3chguy@gmail.com> Date: Tue, 11 Jul 2023 09:18:50 +0100 Subject: [PATCH 148/153] Add `Server` to Access-Control-Expose-Headers header (#15908) --- changelog.d/15908.misc | 1 + synapse/http/server.py | 2 +- tests/test_server.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15908.misc diff --git a/changelog.d/15908.misc b/changelog.d/15908.misc new file mode 100644 index 000000000000..3ab8674e03e1 --- /dev/null +++ b/changelog.d/15908.misc @@ -0,0 +1 @@ +Add `Server` to Access-Control-Expose-Headers header. diff --git a/synapse/http/server.py b/synapse/http/server.py index 933172c87327..e411ac7e6287 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -910,7 +910,7 @@ def set_cors_headers(request: SynapseRequest) -> None: ) request.setHeader( b"Access-Control-Expose-Headers", - b"Synapse-Trace-Id", + b"Synapse-Trace-Id, Server", ) diff --git a/tests/test_server.py b/tests/test_server.py index e266c06a2cbf..dc491e06edcb 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -268,7 +268,7 @@ def _check_cors_standard_headers(self, channel: FakeChannel) -> None: ) self.assertEqual( channel.headers.getRawHeaders(b"Access-Control-Expose-Headers"), - [b"Synapse-Trace-Id"], + [b"Synapse-Trace-Id, Server"], ) def _check_cors_msc3886_headers(self, channel: FakeChannel) -> None: From 7c7bd9898b071385400ef2e083ae0b85c4935242 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 11 Jul 2023 10:28:11 +0100 Subject: [PATCH 149/153] 1.88.0rc1 --- CHANGES.md | 49 +++++++++++++++++++++++++++++++++++++++ changelog.d/15751.misc | 1 - changelog.d/15782.misc | 1 - changelog.d/15787.misc | 1 - changelog.d/15826.misc | 1 - changelog.d/15844.feature | 1 - changelog.d/15850.misc | 1 - changelog.d/15851.removal | 1 - changelog.d/15852.doc | 1 - changelog.d/15853.misc | 1 - changelog.d/15854.misc | 1 - changelog.d/15860.removal | 1 - changelog.d/15861.misc | 1 - changelog.d/15862.bugfix | 3 --- changelog.d/15872.doc | 1 - changelog.d/15874.misc | 1 - changelog.d/15876.bugfix | 1 - changelog.d/15888.misc | 1 - changelog.d/15892.misc | 1 - changelog.d/15893.misc | 1 - changelog.d/15906.misc | 1 - changelog.d/15907.misc | 1 - changelog.d/15908.misc | 1 - debian/changelog | 6 +++++ pyproject.toml | 2 +- 25 files changed, 56 insertions(+), 25 deletions(-) delete mode 100644 changelog.d/15751.misc delete mode 100644 changelog.d/15782.misc delete mode 100644 changelog.d/15787.misc delete mode 100644 changelog.d/15826.misc delete mode 100644 changelog.d/15844.feature delete mode 100644 changelog.d/15850.misc delete mode 100644 changelog.d/15851.removal delete mode 100644 changelog.d/15852.doc delete mode 100644 changelog.d/15853.misc delete mode 100644 changelog.d/15854.misc delete mode 100644 changelog.d/15860.removal delete mode 100644 changelog.d/15861.misc delete mode 100644 changelog.d/15862.bugfix delete mode 100644 changelog.d/15872.doc delete mode 100644 changelog.d/15874.misc delete mode 100644 changelog.d/15876.bugfix delete mode 100644 changelog.d/15888.misc delete mode 100644 changelog.d/15892.misc delete mode 100644 changelog.d/15893.misc delete mode 100644 changelog.d/15906.misc delete mode 100644 changelog.d/15907.misc delete mode 100644 changelog.d/15908.misc diff --git a/CHANGES.md b/CHANGES.md index 860e89ed99ff..f3ba4a454f38 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,52 @@ +# Synapse 1.88.0rc1 (2023-07-11) + +Note that the minimum supported version of Python is now 3.8, as Python 3.7 is now [end-of-life](https://devguide.python.org/versions/). + +### Features + +- Add `not_user_type` param to the [list accounts admin API](https://matrix-org.github.io/synapse/v1.88/admin_api/user_admin_api.html#list-accounts). ([\#15844](https://github.com/matrix-org/synapse/issues/15844)) + +### Bugfixes + +- Pin `pydantic` to `^=1.7.4` to avoid backwards-incompatible API changes from the 2.0.0 release. + Contributed by @PaarthShah. ([\#15862](https://github.com/matrix-org/synapse/issues/15862)) +- Correctly resize thumbnails with pillow version >=10. ([\#15876](https://github.com/matrix-org/synapse/issues/15876)) + +### Improved Documentation + +- Fixed header levels on the [Admin API "Users"](https://matrix-org.github.io/synapse/v1.87/admin_api/user_admin_api.html) documentation page. Contributed by @sumnerevans at @beeper. ([\#15852](https://github.com/matrix-org/synapse/issues/15852)) +- Remove deprecated `worker_replication_host`, `worker_replication_http_port` and `worker_replication_http_tls` configuration options. ([\#15872](https://github.com/matrix-org/synapse/issues/15872)) + +### Deprecations and Removals + +- Remove support for Python 3.7 and hence for Debian Buster. ([\#15851](https://github.com/matrix-org/synapse/issues/15851), [\#15892](https://github.com/matrix-org/synapse/issues/15892), [\#15893](https://github.com/matrix-org/synapse/issues/15893)) +- Remove deprecated `worker_replication_host`, `worker_replication_http_port` and `worker_replication_http_tls` configuration options. ([\#15860](https://github.com/matrix-org/synapse/issues/15860)) + +### Internal Changes + +- Add foreign key constraint to `event_forward_extremities`. ([\#15751](https://github.com/matrix-org/synapse/issues/15751), [\#15907](https://github.com/matrix-org/synapse/issues/15907)) +- Add read/write style cross-worker locks. ([\#15782](https://github.com/matrix-org/synapse/issues/15782)) +- Stop writing to column `user_id` of tables `profiles` and `user_filters`. ([\#15787](https://github.com/matrix-org/synapse/issues/15787)) +- Use lower isolation level when cleaning old presence stream data to avoid serialization errors. ([\#15826](https://github.com/matrix-org/synapse/issues/15826)) +- Add tracing to media `/upload` code paths. ([\#15850](https://github.com/matrix-org/synapse/issues/15850), [\#15888](https://github.com/matrix-org/synapse/issues/15888)) +- Add a timeout that aborts any Postgres statement taking more than 1 hour. ([\#15853](https://github.com/matrix-org/synapse/issues/15853)) +- Fix the `devenv up` configuration which was ignoring the config overrides. ([\#15854](https://github.com/matrix-org/synapse/issues/15854)) +- Optimised cleanup of old entries in `device_lists_stream`. ([\#15861](https://github.com/matrix-org/synapse/issues/15861)) +- Update the Matrix clients link in the _It works! Synapse is running_ landing page. ([\#15874](https://github.com/matrix-org/synapse/issues/15874)) +- Fix building Synapse with the nightly Rust compiler. ([\#15906](https://github.com/matrix-org/synapse/issues/15906)) +- Add `Server` to Access-Control-Expose-Headers header. ([\#15908](https://github.com/matrix-org/synapse/issues/15908)) + +### Updates to locked dependencies + +* Bump authlib from 1.2.0 to 1.2.1. ([\#15864](https://github.com/matrix-org/synapse/issues/15864)) +* Bump importlib-metadata from 6.6.0 to 6.7.0. ([\#15865](https://github.com/matrix-org/synapse/issues/15865)) +* Bump lxml from 4.9.2 to 4.9.3. ([\#15897](https://github.com/matrix-org/synapse/issues/15897)) +* Bump regex from 1.8.4 to 1.9.1. ([\#15902](https://github.com/matrix-org/synapse/issues/15902)) +* Bump ruff from 0.0.275 to 0.0.277. ([\#15900](https://github.com/matrix-org/synapse/issues/15900)) +* Bump sentry-sdk from 1.25.1 to 1.26.0. ([\#15867](https://github.com/matrix-org/synapse/issues/15867)) +* Bump serde_json from 1.0.99 to 1.0.100. ([\#15901](https://github.com/matrix-org/synapse/issues/15901)) +* Bump types-pyopenssl from 23.2.0.0 to 23.2.0.1. ([\#15866](https://github.com/matrix-org/synapse/issues/15866)) + # Synapse 1.87.0 (2023-07-04) Please note that this will be the last release of Synapse that is compatible with diff --git a/changelog.d/15751.misc b/changelog.d/15751.misc deleted file mode 100644 index e0ecea6c2fdb..000000000000 --- a/changelog.d/15751.misc +++ /dev/null @@ -1 +0,0 @@ -Add foreign key constraint to `event_forward_extremities`. diff --git a/changelog.d/15782.misc b/changelog.d/15782.misc deleted file mode 100644 index aae493b97378..000000000000 --- a/changelog.d/15782.misc +++ /dev/null @@ -1 +0,0 @@ -Add read/write style cross-worker locks. diff --git a/changelog.d/15787.misc b/changelog.d/15787.misc deleted file mode 100644 index bd7536d36ec1..000000000000 --- a/changelog.d/15787.misc +++ /dev/null @@ -1 +0,0 @@ -Stop writing to column `user_id` of tables `profiles` and `user_filters`. diff --git a/changelog.d/15826.misc b/changelog.d/15826.misc deleted file mode 100644 index 88903f3f7c7a..000000000000 --- a/changelog.d/15826.misc +++ /dev/null @@ -1 +0,0 @@ -Use lower isolation level when cleaning old presence stream data to avoid serialization errors. diff --git a/changelog.d/15844.feature b/changelog.d/15844.feature deleted file mode 100644 index c220055d41c9..000000000000 --- a/changelog.d/15844.feature +++ /dev/null @@ -1 +0,0 @@ -Add `not_user_type` param to the list accounts admin API. diff --git a/changelog.d/15850.misc b/changelog.d/15850.misc deleted file mode 100644 index 0e49ab23fed3..000000000000 --- a/changelog.d/15850.misc +++ /dev/null @@ -1 +0,0 @@ -Add tracing to media `/upload` code paths. diff --git a/changelog.d/15851.removal b/changelog.d/15851.removal deleted file mode 100644 index e08df4c1366a..000000000000 --- a/changelog.d/15851.removal +++ /dev/null @@ -1 +0,0 @@ -Remove support for Python 3.7. diff --git a/changelog.d/15852.doc b/changelog.d/15852.doc deleted file mode 100644 index 060b55d106de..000000000000 --- a/changelog.d/15852.doc +++ /dev/null @@ -1 +0,0 @@ -Fixed header levels on the Admin API "Users" documentation page. Contributed by @sumnerevans at @beeper. diff --git a/changelog.d/15853.misc b/changelog.d/15853.misc deleted file mode 100644 index 3e9516b1ad82..000000000000 --- a/changelog.d/15853.misc +++ /dev/null @@ -1 +0,0 @@ -Add a timeout that aborts any Postgres statement taking more than 1 hour. \ No newline at end of file diff --git a/changelog.d/15854.misc b/changelog.d/15854.misc deleted file mode 100644 index 8c940dd9c5aa..000000000000 --- a/changelog.d/15854.misc +++ /dev/null @@ -1 +0,0 @@ -Fix the `devenv up` configuration which was ignoring the config overrides. \ No newline at end of file diff --git a/changelog.d/15860.removal b/changelog.d/15860.removal deleted file mode 100644 index 1993bf02997b..000000000000 --- a/changelog.d/15860.removal +++ /dev/null @@ -1 +0,0 @@ -Remove deprecated `worker_replication_host`, `worker_replication_http_port` and `worker_replication_http_tls` configuration options. diff --git a/changelog.d/15861.misc b/changelog.d/15861.misc deleted file mode 100644 index 6f320eab81c9..000000000000 --- a/changelog.d/15861.misc +++ /dev/null @@ -1 +0,0 @@ -Optimised cleanup of old entries in device_lists_stream. diff --git a/changelog.d/15862.bugfix b/changelog.d/15862.bugfix deleted file mode 100644 index 8eb6aa9a7f8c..000000000000 --- a/changelog.d/15862.bugfix +++ /dev/null @@ -1,3 +0,0 @@ -Pin `pydantic` to ^=1.7.4 to avoid backwards-incompatible API changes from the 2.0.0 release. -Resolves https://github.com/matrix-org/synapse/issues/15858. -Contributed by @PaarthShah. diff --git a/changelog.d/15872.doc b/changelog.d/15872.doc deleted file mode 100644 index 1993bf02997b..000000000000 --- a/changelog.d/15872.doc +++ /dev/null @@ -1 +0,0 @@ -Remove deprecated `worker_replication_host`, `worker_replication_http_port` and `worker_replication_http_tls` configuration options. diff --git a/changelog.d/15874.misc b/changelog.d/15874.misc deleted file mode 100644 index 0d434bef5d92..000000000000 --- a/changelog.d/15874.misc +++ /dev/null @@ -1 +0,0 @@ -Updated the link in home page from https://matrix.org/docs/projects/try-matrix-now.html#clients to https://matrix.org/ecosystem/clients/. diff --git a/changelog.d/15876.bugfix b/changelog.d/15876.bugfix deleted file mode 100644 index 9dbae04c4f40..000000000000 --- a/changelog.d/15876.bugfix +++ /dev/null @@ -1 +0,0 @@ -Correctly resize thumbnails with pillow version >=10. diff --git a/changelog.d/15888.misc b/changelog.d/15888.misc deleted file mode 100644 index 0e49ab23fed3..000000000000 --- a/changelog.d/15888.misc +++ /dev/null @@ -1 +0,0 @@ -Add tracing to media `/upload` code paths. diff --git a/changelog.d/15892.misc b/changelog.d/15892.misc deleted file mode 100644 index e5a123d21864..000000000000 --- a/changelog.d/15892.misc +++ /dev/null @@ -1 +0,0 @@ -Stop running sytest on buster/python3.7. diff --git a/changelog.d/15893.misc b/changelog.d/15893.misc deleted file mode 100644 index 656d73b23120..000000000000 --- a/changelog.d/15893.misc +++ /dev/null @@ -1 +0,0 @@ -Drop Debian Buster since we no longer support Python 3.7. diff --git a/changelog.d/15906.misc b/changelog.d/15906.misc deleted file mode 100644 index b721b88d568d..000000000000 --- a/changelog.d/15906.misc +++ /dev/null @@ -1 +0,0 @@ -Fix building rust with nightly rust compiler. diff --git a/changelog.d/15907.misc b/changelog.d/15907.misc deleted file mode 100644 index e0ecea6c2fdb..000000000000 --- a/changelog.d/15907.misc +++ /dev/null @@ -1 +0,0 @@ -Add foreign key constraint to `event_forward_extremities`. diff --git a/changelog.d/15908.misc b/changelog.d/15908.misc deleted file mode 100644 index 3ab8674e03e1..000000000000 --- a/changelog.d/15908.misc +++ /dev/null @@ -1 +0,0 @@ -Add `Server` to Access-Control-Expose-Headers header. diff --git a/debian/changelog b/debian/changelog index 0d9216bee8ad..763edb8ec256 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.88.0~rc1) stable; urgency=medium + + * New Synapse release 1.88.0rc1. + + -- Synapse Packaging team Tue, 11 Jul 2023 10:20:19 +0100 + matrix-synapse-py3 (1.87.0) stable; urgency=medium * New Synapse release 1.87.0. diff --git a/pyproject.toml b/pyproject.toml index fc1b8c0dadd2..b61b8b17acf9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.87.0" +version = "1.88.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 4ccfa16081c042b935b58e63e06995af5d3e08f8 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 11 Jul 2023 10:34:09 +0100 Subject: [PATCH 150/153] Call out upgrade notes in README --- CHANGES.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index f3ba4a454f38..d71d4124fa46 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,10 @@ # Synapse 1.88.0rc1 (2023-07-11) -Note that the minimum supported version of Python is now 3.8, as Python 3.7 is now [end-of-life](https://devguide.python.org/versions/). +This release + - raises the minimum supported version of Python to 3.8, as Python 3.7 is now [end-of-life](https://devguide.python.org/versions/), and + - removes deprecated config options related to worker deployment. + +See [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.88/docs/upgrade.md#upgrading-to-v1880) for more information. ### Features @@ -19,8 +23,8 @@ Note that the minimum supported version of Python is now 3.8, as Python 3.7 is n ### Deprecations and Removals +- **Remove deprecated `worker_replication_host`, `worker_replication_http_port` and `worker_replication_http_tls` configuration options.** See the [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.88/docs/upgrade.md#removal-of-worker_replication_-settings) for more details. ([\#15860](https://github.com/matrix-org/synapse/issues/15860)) - Remove support for Python 3.7 and hence for Debian Buster. ([\#15851](https://github.com/matrix-org/synapse/issues/15851), [\#15892](https://github.com/matrix-org/synapse/issues/15892), [\#15893](https://github.com/matrix-org/synapse/issues/15893)) -- Remove deprecated `worker_replication_host`, `worker_replication_http_port` and `worker_replication_http_tls` configuration options. ([\#15860](https://github.com/matrix-org/synapse/issues/15860)) ### Internal Changes From 92014fbf7286afa9099d162b22feda868af820f8 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 11 Jul 2023 15:16:19 +0100 Subject: [PATCH 151/153] Don't build wheels for Python 3.7 (#15917) * Don't build wheels for CPython or PyPy 3.7 * Update pyproject.toml comments * Manually update the changelog --- CHANGES.md | 2 +- pyproject.toml | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index d71d4124fa46..22d56a9a01ac 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -24,7 +24,7 @@ See [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.88 ### Deprecations and Removals - **Remove deprecated `worker_replication_host`, `worker_replication_http_port` and `worker_replication_http_tls` configuration options.** See the [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.88/docs/upgrade.md#removal-of-worker_replication_-settings) for more details. ([\#15860](https://github.com/matrix-org/synapse/issues/15860)) -- Remove support for Python 3.7 and hence for Debian Buster. ([\#15851](https://github.com/matrix-org/synapse/issues/15851), [\#15892](https://github.com/matrix-org/synapse/issues/15892), [\#15893](https://github.com/matrix-org/synapse/issues/15893)) +- Remove support for Python 3.7 and hence for Debian Buster. ([\#15851](https://github.com/matrix-org/synapse/issues/15851), [\#15892](https://github.com/matrix-org/synapse/issues/15892), [\#15893](https://github.com/matrix-org/synapse/issues/15893), [\#15917](https://github.com/matrix-org/synapse/pull/15917)) ### Internal Changes diff --git a/pyproject.toml b/pyproject.toml index b61b8b17acf9..d56602b2df14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -373,7 +373,15 @@ build-backend = "poetry.core.masonry.api" [tool.cibuildwheel] # Skip unsupported platforms (by us or by Rust). -skip = "cp36* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64" +# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets. +# We skip: +# - CPython 3.6 and 3.7: EOLed +# - PyPy 3.7: we only support Python 3.8+ +# - musllinux i686: excluded to reduce number of wheels we build. +# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677 +# - PyPy on Aarch64 and musllinux on aarch64: too slow to build. +# c.f. https://github.com/matrix-org/synapse/pull/14259 +skip = "cp36* cp37* pp37* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64" # We need a rust compiler before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal" From e625c3dca0b8147a204fe812af36b784473a6b50 Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 18 Jul 2023 03:44:09 -0700 Subject: [PATCH 152/153] Revert "Stop writing to column `user_id` of tables `profiles` and `user_filters`. (#15953) * Revert "Stop writing to column `user_id` of tables `profiles` and `user_filters` (#15787)" This reverts commit f25b0f88081bb436bef914983cff7087b54eba5f. * newsfragement --- changelog.d/15953.misc | 1 + synapse/storage/database.py | 2 - synapse/storage/databases/main/__init__.py | 6 +- synapse/storage/databases/main/filtering.py | 5 +- synapse/storage/databases/main/profile.py | 12 ++- synapse/storage/schema/__init__.py | 9 +- .../79/01_drop_user_id_constraint_profiles.py | 50 ---------- ...02_drop_user_id_constraint_user_filters.py | 54 ----------- tests/storage/test_profile.py | 63 +++++++++++++ tests/storage/test_user_filters.py | 94 +++++++++++++++++++ 10 files changed, 174 insertions(+), 122 deletions(-) create mode 100644 changelog.d/15953.misc delete mode 100644 synapse/storage/schema/main/delta/79/01_drop_user_id_constraint_profiles.py delete mode 100644 synapse/storage/schema/main/delta/79/02_drop_user_id_constraint_user_filters.py create mode 100644 tests/storage/test_user_filters.py diff --git a/changelog.d/15953.misc b/changelog.d/15953.misc new file mode 100644 index 000000000000..a20e78178fff --- /dev/null +++ b/changelog.d/15953.misc @@ -0,0 +1 @@ +Revert "Stop writing to column user_id of tables profiles and user_filters`. diff --git a/synapse/storage/database.py b/synapse/storage/database.py index c9d687fb2faf..a1c8fb0f46a4 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -98,8 +98,6 @@ "event_push_summary": "event_push_summary_unique_index2", "receipts_linearized": "receipts_linearized_unique_index", "receipts_graph": "receipts_graph_unique_index", - "profiles": "profiles_full_user_id_key_idx", - "user_filters": "full_users_filters_unique_idx", } diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index b6028853c939..80c0304b1917 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -15,7 +15,7 @@ # limitations under the License. import logging -from typing import TYPE_CHECKING, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, List, Optional, Tuple, cast from synapse.api.constants import Direction from synapse.config.homeserver import HomeServerConfig @@ -196,7 +196,7 @@ def get_users_paginate_txn( txn: LoggingTransaction, ) -> Tuple[List[JsonDict], int]: filters = [] - args: List[Union[str, int]] = [] + args = [self.hs.config.server.server_name] # Set ordering order_by_column = UserSortOrder(order_by).value @@ -263,7 +263,7 @@ def get_users_paginate_txn( sql_base = f""" FROM users as u - LEFT JOIN profiles AS p ON u.name = p.full_user_id + LEFT JOIN profiles AS p ON u.name = '@' || p.user_id || ':' || ? LEFT JOIN erased_users AS eu ON u.name = eu.user_id {where_clause} """ diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py index 75f7fe875651..fff417f9e3bb 100644 --- a/synapse/storage/databases/main/filtering.py +++ b/synapse/storage/databases/main/filtering.py @@ -188,13 +188,14 @@ def _do_txn(txn: LoggingTransaction) -> int: filter_id = max_id + 1 sql = ( - "INSERT INTO user_filters (full_user_id, filter_id, filter_json)" - "VALUES(?, ?, ?)" + "INSERT INTO user_filters (full_user_id, user_id, filter_id, filter_json)" + "VALUES(?, ?, ?, ?)" ) txn.execute( sql, ( user_id.to_string(), + user_id.localpart, filter_id, bytearray(def_json), ), diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index 660a5507b75a..3ba9cc88537a 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -173,9 +173,10 @@ async def get_profile_avatar_url(self, user_id: UserID) -> Optional[str]: ) async def create_profile(self, user_id: UserID) -> None: + user_localpart = user_id.localpart await self.db_pool.simple_insert( table="profiles", - values={"full_user_id": user_id.to_string()}, + values={"user_id": user_localpart, "full_user_id": user_id.to_string()}, desc="create_profile", ) @@ -190,11 +191,13 @@ async def set_profile_displayname( new_displayname: The new display name. If this is None, the user's display name is removed. """ + user_localpart = user_id.localpart await self.db_pool.simple_upsert( table="profiles", - keyvalues={"full_user_id": user_id.to_string()}, + keyvalues={"user_id": user_localpart}, values={ "displayname": new_displayname, + "full_user_id": user_id.to_string(), }, desc="set_profile_displayname", ) @@ -210,10 +213,11 @@ async def set_profile_avatar_url( new_avatar_url: The new avatar URL. If this is None, the user's avatar is removed. """ + user_localpart = user_id.localpart await self.db_pool.simple_upsert( table="profiles", - keyvalues={"full_user_id": user_id.to_string()}, - values={"avatar_url": new_avatar_url}, + keyvalues={"user_id": user_localpart}, + values={"avatar_url": new_avatar_url, "full_user_id": user_id.to_string()}, desc="set_profile_avatar_url", ) diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 6d14963c0abe..fc190a8b13cf 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -SCHEMA_VERSION = 79 # remember to update the list below when updating +SCHEMA_VERSION = 78 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -106,9 +106,6 @@ Changes in SCHEMA_VERSION = 78 - Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters - -Changes in SCHEMA_VERSION = 79 - - We no longer write to column user_id of tables profiles and user_filters """ @@ -121,9 +118,7 @@ # # insertions to the column `full_user_id` of tables profiles and user_filters can no # longer be null - # - # we no longer write to column `full_user_id` of tables profiles and user_filters - 78 + 76 ) """Limit on how far the synapse codebase can be rolled back without breaking db compat diff --git a/synapse/storage/schema/main/delta/79/01_drop_user_id_constraint_profiles.py b/synapse/storage/schema/main/delta/79/01_drop_user_id_constraint_profiles.py deleted file mode 100644 index 3541266f7db4..000000000000 --- a/synapse/storage/schema/main/delta/79/01_drop_user_id_constraint_profiles.py +++ /dev/null @@ -1,50 +0,0 @@ -from synapse.storage.database import LoggingTransaction -from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine - - -def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: - """ - Update to drop the NOT NULL constraint on column user_id so that we can cease to - write to it without inserts to other columns triggering the constraint - """ - - if isinstance(database_engine, PostgresEngine): - drop_sql = """ - ALTER TABLE profiles ALTER COLUMN user_id DROP NOT NULL - """ - cur.execute(drop_sql) - else: - # irritatingly in SQLite we need to rewrite the table to drop the constraint. - cur.execute("DROP TABLE IF EXISTS temp_profiles") - - create_sql = """ - CREATE TABLE temp_profiles ( - full_user_id text NOT NULL, - user_id text, - displayname text, - avatar_url text, - UNIQUE (full_user_id), - UNIQUE (user_id) - ) - """ - cur.execute(create_sql) - - copy_sql = """ - INSERT INTO temp_profiles ( - user_id, - displayname, - avatar_url, - full_user_id) - SELECT user_id, displayname, avatar_url, full_user_id FROM profiles - """ - cur.execute(copy_sql) - - drop_sql = """ - DROP TABLE profiles - """ - cur.execute(drop_sql) - - rename_sql = """ - ALTER TABLE temp_profiles RENAME to profiles - """ - cur.execute(rename_sql) diff --git a/synapse/storage/schema/main/delta/79/02_drop_user_id_constraint_user_filters.py b/synapse/storage/schema/main/delta/79/02_drop_user_id_constraint_user_filters.py deleted file mode 100644 index 8e7569c47073..000000000000 --- a/synapse/storage/schema/main/delta/79/02_drop_user_id_constraint_user_filters.py +++ /dev/null @@ -1,54 +0,0 @@ -from synapse.storage.database import LoggingTransaction -from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine - - -def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: - """ - Update to drop the NOT NULL constraint on column user_id so that we can cease to - write to it without inserts to other columns triggering the constraint - """ - if isinstance(database_engine, PostgresEngine): - drop_sql = """ - ALTER TABLE user_filters ALTER COLUMN user_id DROP NOT NULL - """ - cur.execute(drop_sql) - - else: - # irritatingly in SQLite we need to rewrite the table to drop the constraint. - cur.execute("DROP TABLE IF EXISTS temp_user_filters") - - create_sql = """ - CREATE TABLE temp_user_filters ( - full_user_id text NOT NULL, - user_id text, - filter_id bigint NOT NULL, - filter_json bytea NOT NULL - ) - """ - cur.execute(create_sql) - - index_sql = """ - CREATE UNIQUE INDEX IF NOT EXISTS user_filters_full_user_id_unique ON - temp_user_filters (full_user_id, filter_id) - """ - cur.execute(index_sql) - - copy_sql = """ - INSERT INTO temp_user_filters ( - user_id, - filter_id, - filter_json, - full_user_id) - SELECT user_id, filter_id, filter_json, full_user_id FROM user_filters - """ - cur.execute(copy_sql) - - drop_sql = """ - DROP TABLE user_filters - """ - cur.execute(drop_sql) - - rename_sql = """ - ALTER TABLE temp_user_filters RENAME to user_filters - """ - cur.execute(rename_sql) diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index bbe8bd88bc5b..fe5bb7791336 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -15,6 +15,8 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.server import HomeServer +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import PostgresEngine from synapse.types import UserID from synapse.util import Clock @@ -62,3 +64,64 @@ def test_avatar_url(self) -> None: self.assertIsNone( self.get_success(self.store.get_profile_avatar_url(self.u_frank)) ) + + def test_profiles_bg_migration(self) -> None: + """ + Test background job that copies entries from column user_id to full_user_id, adding + the hostname in the process. + """ + updater = self.hs.get_datastores().main.db_pool.updates + + # drop the constraint so we can insert nulls in full_user_id to populate the test + if isinstance(self.store.database_engine, PostgresEngine): + + def f(txn: LoggingTransaction) -> None: + txn.execute( + "ALTER TABLE profiles DROP CONSTRAINT full_user_id_not_null" + ) + + self.get_success(self.store.db_pool.runInteraction("", f)) + + for i in range(0, 70): + self.get_success( + self.store.db_pool.simple_insert( + "profiles", + {"user_id": f"hello{i:02}"}, + ) + ) + + # re-add the constraint so that when it's validated it actually exists + if isinstance(self.store.database_engine, PostgresEngine): + + def f(txn: LoggingTransaction) -> None: + txn.execute( + "ALTER TABLE profiles ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID" + ) + + self.get_success(self.store.db_pool.runInteraction("", f)) + + self.get_success( + self.store.db_pool.simple_insert( + "background_updates", + values={ + "update_name": "populate_full_user_id_profiles", + "progress_json": "{}", + }, + ) + ) + + self.get_success( + updater.run_background_updates(False), + ) + + expected_values = [] + for i in range(0, 70): + expected_values.append((f"@hello{i:02}:{self.hs.hostname}",)) + + res = self.get_success( + self.store.db_pool.execute( + "", None, "SELECT full_user_id from profiles ORDER BY full_user_id" + ) + ) + self.assertEqual(len(res), len(expected_values)) + self.assertEqual(res, expected_values) diff --git a/tests/storage/test_user_filters.py b/tests/storage/test_user_filters.py new file mode 100644 index 000000000000..bab802f56ec6 --- /dev/null +++ b/tests/storage/test_user_filters.py @@ -0,0 +1,94 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from twisted.test.proto_helpers import MemoryReactor + +from synapse.server import HomeServer +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import PostgresEngine +from synapse.util import Clock + +from tests import unittest + + +class UserFiltersStoreTestCase(unittest.HomeserverTestCase): + """ + Test background migration that copies entries from column user_id to full_user_id, adding + the hostname in the process. + """ + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + + def test_bg_migration(self) -> None: + updater = self.hs.get_datastores().main.db_pool.updates + + # drop the constraint so we can insert nulls in full_user_id to populate the test + if isinstance(self.store.database_engine, PostgresEngine): + + def f(txn: LoggingTransaction) -> None: + txn.execute( + "ALTER TABLE user_filters DROP CONSTRAINT full_user_id_not_null" + ) + + self.get_success(self.store.db_pool.runInteraction("", f)) + + for i in range(0, 70): + self.get_success( + self.store.db_pool.simple_insert( + "user_filters", + { + "user_id": f"hello{i:02}", + "filter_id": i, + "filter_json": bytearray(i), + }, + ) + ) + + # re-add the constraint so that when it's validated it actually exists + if isinstance(self.store.database_engine, PostgresEngine): + + def f(txn: LoggingTransaction) -> None: + txn.execute( + "ALTER TABLE user_filters ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID" + ) + + self.get_success(self.store.db_pool.runInteraction("", f)) + + self.get_success( + self.store.db_pool.simple_insert( + "background_updates", + values={ + "update_name": "populate_full_user_id_user_filters", + "progress_json": "{}", + }, + ) + ) + + self.get_success( + updater.run_background_updates(False), + ) + + expected_values = [] + for i in range(0, 70): + expected_values.append((f"@hello{i:02}:{self.hs.hostname}",)) + + res = self.get_success( + self.store.db_pool.execute( + "", None, "SELECT full_user_id from user_filters ORDER BY full_user_id" + ) + ) + self.assertEqual(len(res), len(expected_values)) + self.assertEqual(res, expected_values) From 69699a9bd11822635fcf2166f5dec2ce2365219c Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Tue, 18 Jul 2023 14:06:00 +0100 Subject: [PATCH 153/153] 1.88.0 --- CHANGES.md | 10 +++++++++- changelog.d/15953.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/15953.misc diff --git a/CHANGES.md b/CHANGES.md index 22d56a9a01ac..f379c994f03f 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,4 @@ -# Synapse 1.88.0rc1 (2023-07-11) +# Synapse 1.88.0 (2023-07-18) This release - raises the minimum supported version of Python to 3.8, as Python 3.7 is now [end-of-life](https://devguide.python.org/versions/), and @@ -6,6 +6,14 @@ This release See [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.88/docs/upgrade.md#upgrading-to-v1880) for more information. + +### Bugfixes + +- Revert "Stop writing to column `user_id` of tables `profiles` and `user_filters`", which was introduced in Synapse 1.88.0rc1. ([\#15953](https://github.com/matrix-org/synapse/issues/15953)) + + +# Synapse 1.88.0rc1 (2023-07-11) + ### Features - Add `not_user_type` param to the [list accounts admin API](https://matrix-org.github.io/synapse/v1.88/admin_api/user_admin_api.html#list-accounts). ([\#15844](https://github.com/matrix-org/synapse/issues/15844)) diff --git a/changelog.d/15953.misc b/changelog.d/15953.misc deleted file mode 100644 index a20e78178fff..000000000000 --- a/changelog.d/15953.misc +++ /dev/null @@ -1 +0,0 @@ -Revert "Stop writing to column user_id of tables profiles and user_filters`. diff --git a/debian/changelog b/debian/changelog index 763edb8ec256..a369e0e5c27f 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.88.0) stable; urgency=medium + + * New Synapse release 1.88.0. + + -- Synapse Packaging team Tue, 18 Jul 2023 13:59:28 +0100 + matrix-synapse-py3 (1.88.0~rc1) stable; urgency=medium * New Synapse release 1.88.0rc1. diff --git a/pyproject.toml b/pyproject.toml index d56602b2df14..4382ff38e53d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.88.0rc1" +version = "1.88.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0"