1
0
Fork 0
mirror of https://github.com/element-hq/synapse.git synced 2025-03-31 03:45:13 +00:00

Merge branch 'develop' into hs/quarantined-media-by-hash

This commit is contained in:
Will Hunt 2025-03-27 14:50:03 +00:00 committed by GitHub
commit 9edf6dce31
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 100 additions and 28 deletions

View file

@ -1,3 +1,10 @@
# Synapse 1.127.1 (2025-03-26)
## Security
- Fix [CVE-2025-30355](https://www.cve.org/CVERecord?id=CVE-2025-30355) / [GHSA-v56r-hwv5-mxg6](https://github.com/element-hq/synapse/security/advisories/GHSA-v56r-hwv5-mxg6). **High severity vulnerability affecting federation. The vulnerability has been exploited in the wild.**
# Synapse 1.127.0 (2025-03-25)
No significant changes since 1.127.0rc1.

1
changelog.d/18283.doc Normal file
View file

@ -0,0 +1 @@
Add docs for how to clear out the Poetry wheel cache.

6
debian/changelog vendored
View file

@ -4,6 +4,12 @@ matrix-synapse-py3 (1.128.0~rc1+nmu1) UNRELEASED; urgency=medium
-- Synapse Packaging team <packages@matrix.org> Wed, 19 Mar 2025 17:38:49 +0000
matrix-synapse-py3 (1.127.1) stable; urgency=medium
* New Synapse release 1.127.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Mar 2025 21:07:31 +0000
matrix-synapse-py3 (1.127.0) stable; urgency=medium
* New Synapse release 1.127.0.

View file

@ -150,6 +150,28 @@ $ poetry shell
$ poetry install --extras all
```
If you want to go even further and remove the Poetry caches:
```shell
# Find your Poetry cache directory
# Docs: https://github.com/python-poetry/poetry/blob/main/docs/configuration.md#cache-directory
$ poetry config cache-dir
# Remove packages from all cached repositories
$ poetry cache clear --all .
# Go completely nuclear and clear out everything Poetry cache related
# including the wheel artifacts which is not covered by the above command
# (see https://github.com/python-poetry/poetry/issues/10304)
#
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
# in order to incorporate the correct dynamically linked library locations otherwise you
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
$ rm -rf $(poetry config cache-dir)
```
## ...run a command in the `poetry` virtualenv?
Use `poetry run cmd args` when you need the python virtualenv context.

View file

@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
version = "1.127.0"
version = "1.127.1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "AGPL-3.0-or-later"

View file

@ -29,8 +29,13 @@ from typing import Final
# the max size of a (canonical-json-encoded) event
MAX_PDU_SIZE = 65536
# the "depth" field on events is limited to 2**63 - 1
MAX_DEPTH = 2**63 - 1
# Max/min size of ints in canonical JSON
CANONICALJSON_MAX_INT = (2**53) - 1
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
# the "depth" field on events is limited to the same as what
# canonicaljson accepts
MAX_DEPTH = CANONICALJSON_MAX_INT
# the maximum length for a room alias is 255 characters
MAX_ALIAS_LENGTH = 255

View file

@ -40,6 +40,8 @@ import attr
from canonicaljson import encode_canonical_json
from synapse.api.constants import (
CANONICALJSON_MAX_INT,
CANONICALJSON_MIN_INT,
MAX_PDU_SIZE,
EventContentFields,
EventTypes,
@ -61,9 +63,6 @@ SPLIT_FIELD_REGEX = re.compile(r"\\*\.")
# Find escaped characters, e.g. those with a \ in front of them.
ESCAPE_SEQUENCE_PATTERN = re.compile(r"\\(.)")
CANONICALJSON_MAX_INT = (2**53) - 1
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
# Module API callback that allows adding fields to the unsigned section of
# events that are sent to clients.

View file

@ -86,9 +86,7 @@ class EventValidator:
# Depending on the room version, ensure the data is spec compliant JSON.
if event.room_version.strict_canonicaljson:
# Note that only the client controlled portion of the event is
# checked, since we trust the portions of the event we created.
validate_canonicaljson(event.content)
validate_canonicaljson(event.get_pdu_json())
if event.type == EventTypes.Aliases:
if "aliases" in event.content:

View file

@ -20,7 +20,7 @@
#
#
import logging
from typing import TYPE_CHECKING, Awaitable, Callable, Optional
from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Sequence
from synapse.api.constants import MAX_DEPTH, EventContentFields, EventTypes, Membership
from synapse.api.errors import Codes, SynapseError
@ -29,6 +29,7 @@ from synapse.crypto.event_signing import check_event_content_hash
from synapse.crypto.keyring import Keyring
from synapse.events import EventBase, make_event_from_dict
from synapse.events.utils import prune_event, validate_canonicaljson
from synapse.federation.units import filter_pdus_for_valid_depth
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.opentracing import log_kv, trace
from synapse.types import JsonDict, get_domain_from_id
@ -267,6 +268,15 @@ def _is_invite_via_3pid(event: EventBase) -> bool:
)
def parse_events_from_pdu_json(
pdus_json: Sequence[JsonDict], room_version: RoomVersion
) -> List[EventBase]:
return [
event_from_pdu_json(pdu_json, room_version)
for pdu_json in filter_pdus_for_valid_depth(pdus_json)
]
def event_from_pdu_json(pdu_json: JsonDict, room_version: RoomVersion) -> EventBase:
"""Construct an EventBase from an event json received over federation

View file

@ -68,6 +68,7 @@ from synapse.federation.federation_base import (
FederationBase,
InvalidEventSignatureError,
event_from_pdu_json,
parse_events_from_pdu_json,
)
from synapse.federation.transport.client import SendJoinResponse
from synapse.http.client import is_unknown_endpoint
@ -349,7 +350,7 @@ class FederationClient(FederationBase):
room_version = await self.store.get_room_version(room_id)
pdus = [event_from_pdu_json(p, room_version) for p in transaction_data_pdus]
pdus = parse_events_from_pdu_json(transaction_data_pdus, room_version)
# Check signatures and hash of pdus, removing any from the list that fail checks
pdus[:] = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
@ -393,9 +394,7 @@ class FederationClient(FederationBase):
transaction_data,
)
pdu_list: List[EventBase] = [
event_from_pdu_json(p, room_version) for p in transaction_data["pdus"]
]
pdu_list = parse_events_from_pdu_json(transaction_data["pdus"], room_version)
if pdu_list and pdu_list[0]:
pdu = pdu_list[0]
@ -809,7 +808,7 @@ class FederationClient(FederationBase):
room_version = await self.store.get_room_version(room_id)
auth_chain = [event_from_pdu_json(p, room_version) for p in res["auth_chain"]]
auth_chain = parse_events_from_pdu_json(res["auth_chain"], room_version)
signed_auth = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
destination, auth_chain, room_version=room_version
@ -1529,9 +1528,7 @@ class FederationClient(FederationBase):
room_version = await self.store.get_room_version(room_id)
events = [
event_from_pdu_json(e, room_version) for e in content.get("events", [])
]
events = parse_events_from_pdu_json(content.get("events", []), room_version)
signed_events = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
destination, events, room_version=room_version

View file

@ -66,7 +66,7 @@ from synapse.federation.federation_base import (
event_from_pdu_json,
)
from synapse.federation.persistence import TransactionActions
from synapse.federation.units import Edu, Transaction
from synapse.federation.units import Edu, Transaction, serialize_and_filter_pdus
from synapse.handlers.worker_lock import NEW_EVENT_DURING_PURGE_LOCK_NAME
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
@ -469,7 +469,12 @@ class FederationServer(FederationBase):
logger.info("Ignoring PDU: %s", e)
continue
event = event_from_pdu_json(p, room_version)
try:
event = event_from_pdu_json(p, room_version)
except SynapseError as e:
logger.info("Ignoring PDU for failing to deserialize: %s", e)
continue
pdus_by_room.setdefault(room_id, []).append(event)
if event.origin_server_ts > newest_pdu_ts:
@ -636,8 +641,8 @@ class FederationServer(FederationBase):
)
return {
"pdus": [pdu.get_pdu_json() for pdu in pdus],
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
"pdus": serialize_and_filter_pdus(pdus),
"auth_chain": serialize_and_filter_pdus(auth_chain),
}
async def on_pdu_request(
@ -761,8 +766,8 @@ class FederationServer(FederationBase):
event_json = event.get_pdu_json(time_now)
resp = {
"event": event_json,
"state": [p.get_pdu_json(time_now) for p in state_events],
"auth_chain": [p.get_pdu_json(time_now) for p in auth_chain_events],
"state": serialize_and_filter_pdus(state_events, time_now),
"auth_chain": serialize_and_filter_pdus(auth_chain_events, time_now),
"members_omitted": caller_supports_partial_state,
}
@ -1005,7 +1010,7 @@ class FederationServer(FederationBase):
time_now = self._clock.time_msec()
auth_pdus = await self.handler.on_event_auth(event_id)
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
res = {"auth_chain": serialize_and_filter_pdus(auth_pdus, time_now)}
return 200, res
async def on_query_client_keys(
@ -1090,7 +1095,7 @@ class FederationServer(FederationBase):
time_now = self._clock.time_msec()
return {"events": [ev.get_pdu_json(time_now) for ev in missing_events]}
return {"events": serialize_and_filter_pdus(missing_events, time_now)}
async def on_openid_userinfo(self, token: str) -> Optional[str]:
ts_now_ms = self._clock.time_msec()

View file

@ -24,10 +24,12 @@ server protocol.
"""
import logging
from typing import List, Optional
from typing import List, Optional, Sequence
import attr
from synapse.api.constants import CANONICALJSON_MAX_INT, CANONICALJSON_MIN_INT
from synapse.events import EventBase
from synapse.types import JsonDict
logger = logging.getLogger(__name__)
@ -104,8 +106,28 @@ class Transaction:
result = {
"origin": self.origin,
"origin_server_ts": self.origin_server_ts,
"pdus": self.pdus,
"pdus": filter_pdus_for_valid_depth(self.pdus),
}
if self.edus:
result["edus"] = self.edus
return result
def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> List[JsonDict]:
filtered_pdus = []
for pdu in pdus:
# Drop PDUs that have a depth that is outside of the range allowed
# by canonical json.
if (
"depth" in pdu
and CANONICALJSON_MIN_INT <= pdu["depth"] <= CANONICALJSON_MAX_INT
):
filtered_pdus.append(pdu)
return filtered_pdus
def serialize_and_filter_pdus(
pdus: Sequence[EventBase], time_now: Optional[int] = None
) -> List[JsonDict]:
return filter_pdus_for_valid_depth([pdu.get_pdu_json(time_now) for pdu in pdus])