mirror of
https://github.com/element-hq/synapse.git
synced 2025-03-31 03:45:13 +00:00
Merge branch 'develop' into register-email-3pid-race
This commit is contained in:
commit
0066ba43c0
32 changed files with 199 additions and 150 deletions
.ci/scripts
.github/workflows
Cargo.lockchangelog.d
contrib/cmdclient
docker/conf-workers
docs
poetry.locksynapse
api
handlers
logging
module_api
rest/client
storage
util
tests/rest/admin
|
@ -60,7 +60,7 @@ trial_postgres_tests = [
|
|||
{
|
||||
"python-version": "3.9",
|
||||
"database": "postgres",
|
||||
"postgres-version": "11",
|
||||
"postgres-version": "13",
|
||||
"extras": "all",
|
||||
}
|
||||
]
|
||||
|
|
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
|
@ -581,7 +581,7 @@ jobs:
|
|||
matrix:
|
||||
include:
|
||||
- python-version: "3.9"
|
||||
postgres-version: "11"
|
||||
postgres-version: "13"
|
||||
|
||||
- python-version: "3.13"
|
||||
postgres-version: "17"
|
||||
|
|
8
Cargo.lock
generated
8
Cargo.lock
generated
|
@ -13,9 +13,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.94"
|
||||
version = "1.0.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
|
||||
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
|
@ -451,9 +451,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.133"
|
||||
version = "1.0.134"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
|
||||
checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
|
|
1
changelog.d/17994.doc
Normal file
1
changelog.d/17994.doc
Normal file
|
@ -0,0 +1 @@
|
|||
Fix example in reverse proxy docs to include server port.
|
1
changelog.d/18017.misc
Normal file
1
changelog.d/18017.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Disable DB statement timeout when doing a purge room since it can be quite long.
|
1
changelog.d/18020.misc
Normal file
1
changelog.d/18020.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Remove some remaining uses of `twisted.internet.defer.returnValue`. Contributed by Colin Watson.
|
1
changelog.d/18024.bugfix
Normal file
1
changelog.d/18024.bugfix
Normal file
|
@ -0,0 +1 @@
|
|||
Properly purge state groups tables when purging a room with the admin API.
|
1
changelog.d/18029.bugfix
Normal file
1
changelog.d/18029.bugfix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix a bug preventing the admin redaction endpoint from working on messages from remote users.
|
1
changelog.d/18034.removal
Normal file
1
changelog.d/18034.removal
Normal file
|
@ -0,0 +1 @@
|
|||
Remove support for PostgreSQL 11 and 12. Contributed by @clokep.
|
1
changelog.d/18043.bugfix
Normal file
1
changelog.d/18043.bugfix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix a bug preventing the admin redaction endpoint from working on messages from remote users.
|
1
changelog.d/18063.misc
Normal file
1
changelog.d/18063.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Refactor `get_profile` to no longer include fields with a value of `None`.
|
|
@ -245,7 +245,7 @@ class SynapseCmd(cmd.Cmd):
|
|||
|
||||
if "flows" not in json_res:
|
||||
print("Failed to find any login flows.")
|
||||
defer.returnValue(False)
|
||||
return False
|
||||
|
||||
flow = json_res["flows"][0] # assume first is the one we want.
|
||||
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
||||
|
@ -254,8 +254,8 @@ class SynapseCmd(cmd.Cmd):
|
|||
"Unable to login via the command line client. Please visit "
|
||||
"%s to login." % fallback_url
|
||||
)
|
||||
defer.returnValue(False)
|
||||
defer.returnValue(True)
|
||||
return False
|
||||
return True
|
||||
|
||||
def do_emailrequest(self, line):
|
||||
"""Requests the association of a third party identifier
|
||||
|
|
|
@ -78,7 +78,7 @@ class TwistedHttpClient(HttpClient):
|
|||
url, data, headers_dict={"Content-Type": ["application/json"]}
|
||||
)
|
||||
body = yield readBody(response)
|
||||
defer.returnValue((response.code, body))
|
||||
return response.code, body
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_json(self, url, args=None):
|
||||
|
@ -88,7 +88,7 @@ class TwistedHttpClient(HttpClient):
|
|||
url = "%s?%s" % (url, qs)
|
||||
response = yield self._create_get_request(url)
|
||||
body = yield readBody(response)
|
||||
defer.returnValue(json.loads(body))
|
||||
return json.loads(body)
|
||||
|
||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||
"""Wrapper of _create_request to issue a PUT request"""
|
||||
|
@ -134,7 +134,7 @@ class TwistedHttpClient(HttpClient):
|
|||
response = yield self._create_request(method, url)
|
||||
|
||||
body = yield readBody(response)
|
||||
defer.returnValue(json.loads(body))
|
||||
return json.loads(body)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _create_request(
|
||||
|
@ -173,7 +173,7 @@ class TwistedHttpClient(HttpClient):
|
|||
if self.verbose:
|
||||
print("Status %s %s" % (response.code, response.phrase))
|
||||
print(pformat(list(response.headers.getAllRawHeaders())))
|
||||
defer.returnValue(response)
|
||||
return response
|
||||
|
||||
def sleep(self, seconds):
|
||||
d = defer.Deferred()
|
||||
|
|
|
@ -38,6 +38,9 @@ server {
|
|||
{% if using_unix_sockets %}
|
||||
proxy_pass http://unix:/run/main_public.sock;
|
||||
{% else %}
|
||||
# note: do not add a path (even a single /) after the port in `proxy_pass`,
|
||||
# otherwise nginx will canonicalise the URI and cause signature verification
|
||||
# errors.
|
||||
proxy_pass http://localhost:8080;
|
||||
{% endif %}
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
|
|
|
@ -74,7 +74,7 @@ server {
|
|||
proxy_pass http://localhost:8008;
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Host $host:$server_port;
|
||||
|
||||
# Nginx by default only allows file uploads up to 1M in size
|
||||
# Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
||||
|
|
|
@ -117,6 +117,14 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
|||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.122.0
|
||||
|
||||
## Dropping support for PostgreSQL 11 and 12
|
||||
|
||||
In line with our [deprecation policy](deprecation_policy.md), we've dropped
|
||||
support for PostgreSQL 11 and 12, as they are no longer supported upstream.
|
||||
This release of Synapse requires PostgreSQL 13+.
|
||||
|
||||
# Upgrading to v1.120.0
|
||||
|
||||
## Removal of experimental MSC3886 feature
|
||||
|
|
36
poetry.lock
generated
36
poetry.lock
generated
|
@ -1,4 +1,4 @@
|
|||
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
|
@ -32,13 +32,13 @@ tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
|
|||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.3.2"
|
||||
version = "1.4.0"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"},
|
||||
{file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"},
|
||||
{file = "Authlib-1.4.0-py2.py3-none-any.whl", hash = "sha256:4bb20b978c8b636222b549317c1815e1fe62234fc1c5efe8855d84aebf3a74e3"},
|
||||
{file = "authlib-1.4.0.tar.gz", hash = "sha256:1c1e6608b5ed3624aeeee136ca7f8c120d6f51f731aa152b153d54741840e1f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
@ -1377,17 +1377,17 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "mypy-zope"
|
||||
version = "1.0.8"
|
||||
version = "1.0.9"
|
||||
description = "Plugin for mypy to support zope interfaces"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "mypy_zope-1.0.8-py3-none-any.whl", hash = "sha256:8794a77dae0c7e2f28b8ac48569091310b3ee45bb9d6cd4797dcb837c40f9976"},
|
||||
{file = "mypy_zope-1.0.8.tar.gz", hash = "sha256:854303a95aefc4289e8a0796808e002c2c7ecde0a10a8f7b8f48092f94ef9b9f"},
|
||||
{file = "mypy_zope-1.0.9-py3-none-any.whl", hash = "sha256:6666c1556891a3cb186137519dbd7a58cb30fb72b2504798cad47b35391921ba"},
|
||||
{file = "mypy_zope-1.0.9.tar.gz", hash = "sha256:37d6985dfb05a4c27b35cff47577fd5bad878db4893ddedf54d165f7389a1cdb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy = ">=1.0.0,<1.13.0"
|
||||
mypy = ">=1.0.0,<1.14.0"
|
||||
"zope.interface" = "*"
|
||||
"zope.schema" = "*"
|
||||
|
||||
|
@ -2627,19 +2627,20 @@ docs = ["sphinx (<7.0.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "twine"
|
||||
version = "5.1.1"
|
||||
version = "6.0.1"
|
||||
description = "Collection of utilities for publishing packages on PyPI"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "twine-5.1.1-py3-none-any.whl", hash = "sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997"},
|
||||
{file = "twine-5.1.1.tar.gz", hash = "sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db"},
|
||||
{file = "twine-6.0.1-py3-none-any.whl", hash = "sha256:9c6025b203b51521d53e200f4a08b116dee7500a38591668c6a6033117bdc218"},
|
||||
{file = "twine-6.0.1.tar.gz", hash = "sha256:36158b09df5406e1c9c1fb8edb24fc2be387709443e7376689b938531582ee27"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
importlib-metadata = ">=3.6"
|
||||
keyring = ">=15.1"
|
||||
pkginfo = ">=1.8.1,<1.11"
|
||||
importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""}
|
||||
keyring = {version = ">=15.1", markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\""}
|
||||
packaging = "*"
|
||||
pkginfo = ">=1.8.1"
|
||||
readme-renderer = ">=35.0"
|
||||
requests = ">=2.20"
|
||||
requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0"
|
||||
|
@ -2647,6 +2648,9 @@ rfc3986 = ">=1.4.0"
|
|||
rich = ">=12.0.0"
|
||||
urllib3 = ">=1.26.0"
|
||||
|
||||
[package.extras]
|
||||
keyring = ["keyring (>=15.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "twisted"
|
||||
version = "24.7.0"
|
||||
|
@ -3138,4 +3142,4 @@ user-search = ["pyicu"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.9.0"
|
||||
content-hash = "170c8bc97d4cd16a38a98ed9d48bef0ef09c994d8b129160ef262306cf689db7"
|
||||
content-hash = "d71159b19349fdc0b7cd8e06e8c8778b603fc37b941c6df34ddc31746783d94d"
|
||||
|
|
|
@ -320,3 +320,8 @@ class ApprovalNoticeMedium:
|
|||
class Direction(enum.Enum):
|
||||
BACKWARDS = "b"
|
||||
FORWARDS = "f"
|
||||
|
||||
|
||||
class ProfileFields:
|
||||
DISPLAYNAME: Final = "displayname"
|
||||
AVATAR_URL: Final = "avatar_url"
|
||||
|
|
|
@ -473,7 +473,7 @@ class AdminHandler:
|
|||
"type": EventTypes.Redaction,
|
||||
"content": {"reason": reason} if reason else {},
|
||||
"room_id": room,
|
||||
"sender": user_id,
|
||||
"sender": requester.user.to_string(),
|
||||
}
|
||||
if room_version.updated_redaction_rules:
|
||||
event_dict["content"]["redacts"] = event.event_id
|
||||
|
|
|
@ -22,6 +22,7 @@ import logging
|
|||
import random
|
||||
from typing import TYPE_CHECKING, List, Optional, Union
|
||||
|
||||
from synapse.api.constants import ProfileFields
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
|
@ -83,7 +84,7 @@ class ProfileHandler:
|
|||
|
||||
Returns:
|
||||
A JSON dictionary. For local queries this will include the displayname and avatar_url
|
||||
fields. For remote queries it may contain arbitrary information.
|
||||
fields, if set. For remote queries it may contain arbitrary information.
|
||||
"""
|
||||
target_user = UserID.from_string(user_id)
|
||||
|
||||
|
@ -92,10 +93,13 @@ class ProfileHandler:
|
|||
if profileinfo.display_name is None and profileinfo.avatar_url is None:
|
||||
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
|
||||
|
||||
return {
|
||||
"displayname": profileinfo.display_name,
|
||||
"avatar_url": profileinfo.avatar_url,
|
||||
}
|
||||
# Do not include display name or avatar if unset.
|
||||
ret = {}
|
||||
if profileinfo.display_name is not None:
|
||||
ret[ProfileFields.DISPLAYNAME] = profileinfo.display_name
|
||||
if profileinfo.avatar_url is not None:
|
||||
ret[ProfileFields.AVATAR_URL] = profileinfo.avatar_url
|
||||
return ret
|
||||
else:
|
||||
try:
|
||||
result = await self.federation.make_query(
|
||||
|
|
|
@ -43,7 +43,7 @@ from typing_extensions import Protocol
|
|||
from twisted.web.iweb import IRequest
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.api.constants import LoginType
|
||||
from synapse.api.constants import LoginType, ProfileFields
|
||||
from synapse.api.errors import Codes, NotFoundError, RedirectException, SynapseError
|
||||
from synapse.config.sso import SsoAttributeRequirement
|
||||
from synapse.handlers.device import DeviceHandler
|
||||
|
@ -813,9 +813,10 @@ class SsoHandler:
|
|||
|
||||
# bail if user already has the same avatar
|
||||
profile = await self._profile_handler.get_profile(user_id)
|
||||
if profile["avatar_url"] is not None:
|
||||
server_name = profile["avatar_url"].split("/")[-2]
|
||||
media_id = profile["avatar_url"].split("/")[-1]
|
||||
if ProfileFields.AVATAR_URL in profile:
|
||||
avatar_url_parts = profile[ProfileFields.AVATAR_URL].split("/")
|
||||
server_name = avatar_url_parts[-2]
|
||||
media_id = avatar_url_parts[-1]
|
||||
if self._is_mine_server_name(server_name):
|
||||
media = await self._media_repo.store.get_local_media(media_id) # type: ignore[has-type]
|
||||
if media is not None and upload_name == media.upload_name:
|
||||
|
|
|
@ -26,7 +26,13 @@ from typing import TYPE_CHECKING, List, Optional, Set, Tuple
|
|||
from twisted.internet.interfaces import IDelayedCall
|
||||
|
||||
import synapse.metrics
|
||||
from synapse.api.constants import EventTypes, HistoryVisibility, JoinRules, Membership
|
||||
from synapse.api.constants import (
|
||||
EventTypes,
|
||||
HistoryVisibility,
|
||||
JoinRules,
|
||||
Membership,
|
||||
ProfileFields,
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.handlers.state_deltas import MatchChange, StateDeltasHandler
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
|
@ -756,6 +762,10 @@ class UserDirectoryHandler(StateDeltasHandler):
|
|||
|
||||
await self.store.update_profile_in_user_dir(
|
||||
user_id,
|
||||
display_name=non_null_str_or_none(profile.get("displayname")),
|
||||
avatar_url=non_null_str_or_none(profile.get("avatar_url")),
|
||||
display_name=non_null_str_or_none(
|
||||
profile.get(ProfileFields.DISPLAYNAME)
|
||||
),
|
||||
avatar_url=non_null_str_or_none(
|
||||
profile.get(ProfileFields.AVATAR_URL)
|
||||
),
|
||||
)
|
||||
|
|
|
@ -20,13 +20,10 @@
|
|||
#
|
||||
|
||||
import logging
|
||||
from types import TracebackType
|
||||
from typing import Optional, Type
|
||||
from typing import Optional
|
||||
|
||||
from opentracing import Scope, ScopeManager, Span
|
||||
|
||||
import twisted
|
||||
|
||||
from synapse.logging.context import (
|
||||
LoggingContext,
|
||||
current_context,
|
||||
|
@ -112,9 +109,6 @@ class _LogContextScope(Scope):
|
|||
"""
|
||||
A custom opentracing scope, associated with a LogContext
|
||||
|
||||
* filters out _DefGen_Return exceptions which arise from calling
|
||||
`defer.returnValue` in Twisted code
|
||||
|
||||
* When the scope is closed, the logcontext's active scope is reset to None.
|
||||
and - if enter_logcontext was set - the logcontext is finished too.
|
||||
"""
|
||||
|
@ -146,17 +140,6 @@ class _LogContextScope(Scope):
|
|||
self._finish_on_close = finish_on_close
|
||||
self._enter_logcontext = enter_logcontext
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> None:
|
||||
if exc_type == twisted.internet.defer._DefGen_Return:
|
||||
# filter out defer.returnValue() calls
|
||||
exc_type = value = traceback = None
|
||||
super().__exit__(exc_type, value, traceback)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Scope<{self.span}>"
|
||||
|
||||
|
|
|
@ -45,6 +45,7 @@ from twisted.internet.interfaces import IDelayedCall
|
|||
from twisted.web.resource import Resource
|
||||
|
||||
from synapse.api import errors
|
||||
from synapse.api.constants import ProfileFields
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.config import ConfigError
|
||||
|
@ -1086,7 +1087,10 @@ class ModuleApi:
|
|||
content = {}
|
||||
|
||||
# Set the profile if not already done by the module.
|
||||
if "avatar_url" not in content or "displayname" not in content:
|
||||
if (
|
||||
ProfileFields.AVATAR_URL not in content
|
||||
or ProfileFields.DISPLAYNAME not in content
|
||||
):
|
||||
try:
|
||||
# Try to fetch the user's profile.
|
||||
profile = await self._hs.get_profile_handler().get_profile(
|
||||
|
@ -1095,8 +1099,8 @@ class ModuleApi:
|
|||
except SynapseError as e:
|
||||
# If the profile couldn't be found, use default values.
|
||||
profile = {
|
||||
"displayname": target_user_id.localpart,
|
||||
"avatar_url": None,
|
||||
ProfileFields.DISPLAYNAME: target_user_id.localpart,
|
||||
ProfileFields.AVATAR_URL: None,
|
||||
}
|
||||
|
||||
if e.code != 404:
|
||||
|
@ -1109,11 +1113,9 @@ class ModuleApi:
|
|||
)
|
||||
|
||||
# Set the profile where it needs to be set.
|
||||
if "avatar_url" not in content:
|
||||
content["avatar_url"] = profile["avatar_url"]
|
||||
|
||||
if "displayname" not in content:
|
||||
content["displayname"] = profile["displayname"]
|
||||
for field_name in [ProfileFields.AVATAR_URL, ProfileFields.DISPLAYNAME]:
|
||||
if field_name not in content and field_name in profile:
|
||||
content[field_name] = profile[field_name]
|
||||
|
||||
event_id, _ = await self._hs.get_room_member_handler().update_membership(
|
||||
requester=requester,
|
||||
|
|
|
@ -227,14 +227,7 @@ class ProfileRestServlet(RestServlet):
|
|||
user = UserID.from_string(user_id)
|
||||
await self.profile_handler.check_profile_query_allowed(user, requester_user)
|
||||
|
||||
displayname = await self.profile_handler.get_displayname(user)
|
||||
avatar_url = await self.profile_handler.get_avatar_url(user)
|
||||
|
||||
ret = {}
|
||||
if displayname is not None:
|
||||
ret["displayname"] = displayname
|
||||
if avatar_url is not None:
|
||||
ret["avatar_url"] = avatar_url
|
||||
ret = await self.profile_handler.get_profile(user_id)
|
||||
|
||||
return 200, ret
|
||||
|
||||
|
|
|
@ -42,8 +42,8 @@ class PurgeEventsStorageController:
|
|||
"""Deletes all record of a room"""
|
||||
|
||||
with nested_logging_context(room_id):
|
||||
state_groups_to_delete = await self.stores.main.purge_room(room_id)
|
||||
await self.stores.state.purge_room_state(room_id, state_groups_to_delete)
|
||||
await self.stores.main.purge_room(room_id)
|
||||
await self.stores.state.purge_room_state(room_id)
|
||||
|
||||
async def purge_history(
|
||||
self, room_id: str, token: str, delete_local_events: bool
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
#
|
||||
|
||||
import logging
|
||||
from typing import Any, List, Set, Tuple, cast
|
||||
from typing import Any, Set, Tuple, cast
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.storage.database import LoggingTransaction
|
||||
|
@ -332,7 +332,7 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
|||
|
||||
return referenced_state_groups
|
||||
|
||||
async def purge_room(self, room_id: str) -> List[int]:
|
||||
async def purge_room(self, room_id: str) -> None:
|
||||
"""Deletes all record of a room
|
||||
|
||||
Args:
|
||||
|
@ -348,7 +348,7 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
|||
# purge any of those rows which were added during the first.
|
||||
|
||||
logger.info("[purge] Starting initial main purge of [1/2]")
|
||||
state_groups_to_delete = await self.db_pool.runInteraction(
|
||||
await self.db_pool.runInteraction(
|
||||
"purge_room",
|
||||
self._purge_room_txn,
|
||||
room_id=room_id,
|
||||
|
@ -356,18 +356,15 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
|||
)
|
||||
|
||||
logger.info("[purge] Starting secondary main purge of [2/2]")
|
||||
state_groups_to_delete.extend(
|
||||
await self.db_pool.runInteraction(
|
||||
"purge_room",
|
||||
self._purge_room_txn,
|
||||
room_id=room_id,
|
||||
),
|
||||
await self.db_pool.runInteraction(
|
||||
"purge_room",
|
||||
self._purge_room_txn,
|
||||
room_id=room_id,
|
||||
)
|
||||
|
||||
logger.info("[purge] Done with main purge")
|
||||
|
||||
return state_groups_to_delete
|
||||
|
||||
def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]:
|
||||
def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> None:
|
||||
# This collides with event persistence so we cannot write new events and metadata into
|
||||
# a room while deleting it or this transaction will fail.
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
|
@ -376,18 +373,10 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
|||
(room_id,),
|
||||
)
|
||||
|
||||
# First, fetch all the state groups that should be deleted, before
|
||||
# we delete that information.
|
||||
txn.execute(
|
||||
"""
|
||||
SELECT DISTINCT state_group FROM events
|
||||
INNER JOIN event_to_state_groups USING(event_id)
|
||||
WHERE events.room_id = ?
|
||||
""",
|
||||
(room_id,),
|
||||
)
|
||||
|
||||
state_groups = [row[0] for row in txn]
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
# Disable statement timeouts for this transaction; purging rooms can
|
||||
# take a while!
|
||||
txn.execute("SET LOCAL statement_timeout = 0")
|
||||
|
||||
# Get all the auth chains that are referenced by events that are to be
|
||||
# deleted.
|
||||
|
@ -508,5 +497,3 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
|||
# periodically anyway (https://github.com/matrix-org/synapse/issues/5888)
|
||||
|
||||
self._invalidate_caches_for_room_and_stream(txn, room_id)
|
||||
|
||||
return state_groups
|
||||
|
|
|
@ -840,60 +840,42 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
|
|||
|
||||
return dict(rows)
|
||||
|
||||
async def purge_room_state(
|
||||
self, room_id: str, state_groups_to_delete: Collection[int]
|
||||
) -> None:
|
||||
"""Deletes all record of a room from state tables
|
||||
|
||||
Args:
|
||||
room_id:
|
||||
state_groups_to_delete: State groups to delete
|
||||
"""
|
||||
|
||||
logger.info("[purge] Starting state purge")
|
||||
await self.db_pool.runInteraction(
|
||||
async def purge_room_state(self, room_id: str) -> None:
|
||||
return await self.db_pool.runInteraction(
|
||||
"purge_room_state",
|
||||
self._purge_room_state_txn,
|
||||
room_id,
|
||||
state_groups_to_delete,
|
||||
)
|
||||
logger.info("[purge] Done with state purge")
|
||||
|
||||
def _purge_room_state_txn(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
room_id: str,
|
||||
state_groups_to_delete: Collection[int],
|
||||
) -> None:
|
||||
# first we have to delete the state groups states
|
||||
logger.info("[purge] removing %s from state_groups_state", room_id)
|
||||
|
||||
self.db_pool.simple_delete_many_txn(
|
||||
txn,
|
||||
table="state_groups_state",
|
||||
column="state_group",
|
||||
values=state_groups_to_delete,
|
||||
keyvalues={},
|
||||
)
|
||||
|
||||
# ... and the state group edges
|
||||
# Delete all edges that reference a state group linked to room_id
|
||||
logger.info("[purge] removing %s from state_group_edges", room_id)
|
||||
|
||||
self.db_pool.simple_delete_many_txn(
|
||||
txn,
|
||||
table="state_group_edges",
|
||||
column="state_group",
|
||||
values=state_groups_to_delete,
|
||||
keyvalues={},
|
||||
txn.execute(
|
||||
"""
|
||||
DELETE FROM state_group_edges AS sge WHERE sge.state_group IN (
|
||||
SELECT id FROM state_groups AS sg WHERE sg.room_id = ?
|
||||
)""",
|
||||
(room_id,),
|
||||
)
|
||||
|
||||
# ... and the state groups
|
||||
logger.info("[purge] removing %s from state_groups", room_id)
|
||||
# state_groups_state table has a room_id column but no index on it, unlike state_groups,
|
||||
# so we delete them by matching the room_id through the state_groups table.
|
||||
logger.info("[purge] removing %s from state_groups_state", room_id)
|
||||
txn.execute(
|
||||
"""
|
||||
DELETE FROM state_groups_state AS sgs WHERE sgs.state_group IN (
|
||||
SELECT id FROM state_groups AS sg WHERE sg.room_id = ?
|
||||
)""",
|
||||
(room_id,),
|
||||
)
|
||||
|
||||
self.db_pool.simple_delete_many_txn(
|
||||
logger.info("[purge] removing %s from state_groups", room_id)
|
||||
self.db_pool.simple_delete_txn(
|
||||
txn,
|
||||
table="state_groups",
|
||||
column="id",
|
||||
values=state_groups_to_delete,
|
||||
keyvalues={},
|
||||
keyvalues={"room_id": room_id},
|
||||
)
|
||||
|
|
|
@ -99,8 +99,8 @@ class PostgresEngine(
|
|||
allow_unsafe_locale = self.config.get("allow_unsafe_locale", False)
|
||||
|
||||
# Are we on a supported PostgreSQL version?
|
||||
if not allow_outdated_version and self._version < 110000:
|
||||
raise RuntimeError("Synapse requires PostgreSQL 11 or above.")
|
||||
if not allow_outdated_version and self._version < 130000:
|
||||
raise RuntimeError("Synapse requires PostgreSQL 13 or above.")
|
||||
|
||||
with db_conn.cursor() as txn:
|
||||
txn.execute("SHOW SERVER_ENCODING")
|
||||
|
|
|
@ -162,7 +162,7 @@ def _check_yield_points(
|
|||
d = result.throwExceptionIntoGenerator(gen)
|
||||
else:
|
||||
d = gen.send(result)
|
||||
except (StopIteration, defer._DefGen_Return) as e:
|
||||
except StopIteration as e:
|
||||
if current_context() != expected_context:
|
||||
# This happens when the context is lost sometime *after* the
|
||||
# final yield and returning. E.g. we forgot to yield on a
|
||||
|
@ -183,7 +183,7 @@ def _check_yield_points(
|
|||
)
|
||||
)
|
||||
changes.append(err)
|
||||
# The `StopIteration` or `_DefGen_Return` contains the return value from the
|
||||
# The `StopIteration` contains the return value from the
|
||||
# generator.
|
||||
return cast(T, e.value)
|
||||
|
||||
|
|
|
@ -3050,7 +3050,7 @@ PURGE_TABLES = [
|
|||
"pusher_throttle",
|
||||
"room_account_data",
|
||||
"room_tags",
|
||||
# "state_groups", # Current impl leaves orphaned state groups around.
|
||||
"state_groups",
|
||||
"state_groups_state",
|
||||
"federation_inbound_events_staging",
|
||||
]
|
||||
|
|
|
@ -60,6 +60,7 @@ from synapse.util import Clock
|
|||
from tests import unittest
|
||||
from tests.replication._base import BaseMultiWorkerStreamTestCase
|
||||
from tests.test_utils import SMALL_PNG
|
||||
from tests.test_utils.event_injection import inject_event
|
||||
from tests.unittest import override_config
|
||||
|
||||
|
||||
|
@ -5408,6 +5409,64 @@ class UserRedactionTestCase(unittest.HomeserverTestCase):
|
|||
# we redacted 6 messages
|
||||
self.assertEqual(len(matches), 6)
|
||||
|
||||
def test_redactions_for_remote_user_succeed_with_admin_priv_in_room(self) -> None:
|
||||
"""
|
||||
Test that if the admin requester has privileges in a room, redaction requests
|
||||
succeed for a remote user
|
||||
"""
|
||||
|
||||
# inject some messages from remote user and collect event ids
|
||||
original_message_ids = []
|
||||
for i in range(5):
|
||||
event = self.get_success(
|
||||
inject_event(
|
||||
self.hs,
|
||||
room_id=self.rm1,
|
||||
type="m.room.message",
|
||||
sender="@remote:remote_server",
|
||||
content={"msgtype": "m.text", "body": f"nefarious_chatter{i}"},
|
||||
)
|
||||
)
|
||||
original_message_ids.append(event.event_id)
|
||||
|
||||
# send a request to redact a remote user's messages in a room.
|
||||
# the server admin created this room and has admin privilege in room
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
"/_synapse/admin/v1/user/@remote:remote_server/redact",
|
||||
content={"rooms": [self.rm1]},
|
||||
access_token=self.admin_tok,
|
||||
)
|
||||
self.assertEqual(channel.code, 200)
|
||||
id = channel.json_body.get("redact_id")
|
||||
|
||||
# check that there were no failed redactions
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/user/redact_status/{id}",
|
||||
access_token=self.admin_tok,
|
||||
)
|
||||
self.assertEqual(channel.code, 200)
|
||||
self.assertEqual(channel.json_body.get("status"), "complete")
|
||||
failed_redactions = channel.json_body.get("failed_redactions")
|
||||
self.assertEqual(failed_redactions, {})
|
||||
|
||||
filter = json.dumps({"types": [EventTypes.Redaction]})
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"rooms/{self.rm1}/messages?filter={filter}&limit=50",
|
||||
access_token=self.admin_tok,
|
||||
)
|
||||
self.assertEqual(channel.code, 200)
|
||||
|
||||
for event in channel.json_body["chunk"]:
|
||||
for event_id in original_message_ids:
|
||||
if event["type"] == "m.room.redaction" and event["redacts"] == event_id:
|
||||
original_message_ids.remove(event_id)
|
||||
break
|
||||
# we originally sent 5 messages so 5 should be redacted
|
||||
self.assertEqual(len(original_message_ids), 0)
|
||||
|
||||
|
||||
class UserRedactionBackgroundTaskTestCase(BaseMultiWorkerStreamTestCase):
|
||||
servlets = [
|
||||
|
|
Loading…
Add table
Reference in a new issue