1
0
Fork 0
mirror of https://github.com/element-hq/synapse.git synced 2025-03-31 03:45:13 +00:00

Merge branch 'develop' into johannes/msc4277

This commit is contained in:
Johannes Marbach 2025-03-27 13:47:17 +01:00
commit 25781a1130
22 changed files with 133 additions and 40 deletions

View file

@ -1,3 +1,17 @@
# Synapse 1.127.1 (2025-03-26)
## Security
- Fix [CVE-2025-30355](https://www.cve.org/CVERecord?id=CVE-2025-30355) / [GHSA-v56r-hwv5-mxg6](https://github.com/element-hq/synapse/security/advisories/GHSA-v56r-hwv5-mxg6). **High severity vulnerability affecting federation. The vulnerability has been exploited in the wild.**
# Synapse 1.127.0 (2025-03-25)
No significant changes since 1.127.0rc1.
# Synapse 1.127.0rc1 (2025-03-18)
### Features

4
Cargo.lock generated
View file

@ -223,9 +223,9 @@ checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
[[package]]
name = "log"
version = "0.4.26"
version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "memchr"

1
changelog.d/18271.docker Normal file
View file

@ -0,0 +1 @@
Specify the architecture of installed packages via an APT config option, which is more reliable than appending package names with ":{arch}".

1
changelog.d/18272.docker Normal file
View file

@ -0,0 +1 @@
Always specify base image debian versions with a build argument.

1
changelog.d/18273.docker Normal file
View file

@ -0,0 +1 @@
Allow passing arguments to start_for_complement.sh (to be sent to configure_workers_and_start.py).

1
changelog.d/18276.doc Normal file
View file

@ -0,0 +1 @@
Correct a small typo in the SSO mapping providers documentation.

1
changelog.d/18283.doc Normal file
View file

@ -0,0 +1 @@
Add docs for how to clear out the Poetry wheel cache.

14
debian/changelog vendored
View file

@ -1,9 +1,21 @@
matrix-synapse-py3 (1.127.0~rc1+nmu1) UNRELEASED; urgency=medium
matrix-synapse-py3 (1.128.0~rc1+nmu1) UNRELEASED; urgency=medium
* Update Poetry to 2.1.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 19 Mar 2025 17:38:49 +0000
matrix-synapse-py3 (1.127.1) stable; urgency=medium
* New Synapse release 1.127.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Mar 2025 21:07:31 +0000
matrix-synapse-py3 (1.127.0) stable; urgency=medium
* New Synapse release 1.127.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Mar 2025 12:04:15 +0000
matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium
* New Synapse release 1.127.0rc1.

View file

@ -148,7 +148,7 @@ RUN \
for arch in arm64 amd64; do \
mkdir -p /tmp/debs-${arch} && \
cd /tmp/debs-${arch} && \
apt-get download $(sed "s/$/:${arch}/" /tmp/pkg-list); \
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
done
# Extract the debs for each architecture

View file

@ -2,12 +2,13 @@
ARG SYNAPSE_VERSION=latest
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
ARG DEBIAN_VERSION=bookworm
# first of all, we create a base image with an nginx which we can copy into the
# target image. For repeated rebuilds, this is much faster than apt installing
# each time.
FROM docker.io/library/debian:bookworm-slim AS deps_base
FROM docker.io/library/debian:${DEBIAN_VERSION}-slim AS deps_base
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
@ -21,7 +22,7 @@ FROM docker.io/library/debian:bookworm-slim AS deps_base
# which makes it much easier to copy (but we need to make sure we use an image
# based on the same debian version as the synapse image, to make sure we get
# the expected version of libc.
FROM docker.io/library/redis:7-bookworm AS redis_base
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
# now build the final image, based on the the regular Synapse docker image
FROM $FROM

View file

@ -9,6 +9,9 @@
ARG SYNAPSE_VERSION=latest
# This is an intermediate image, to be built locally (not pulled from a registry).
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
ARG DEBIAN_VERSION=bookworm
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
FROM $FROM
# First of all, we copy postgres server from the official postgres image,
@ -20,8 +23,8 @@ FROM $FROM
# the same debian version as Synapse's docker image (so the versions of the
# shared libraries match).
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
ENV PGDATA=/var/lib/postgresql/data

View file

@ -5,12 +5,12 @@
set -e
echo "Complement Synapse launcher"
echo " Args: $@"
echo " Args: $*"
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
function log {
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
echo "$d $@"
echo "$d $*"
}
# Set the server name of the homeserver
@ -131,4 +131,4 @@ export SYNAPSE_TLS_KEY=/conf/server.tls.key
# Run the script that writes the necessary config files and starts supervisord, which in turn
# starts everything else
exec /configure_workers_and_start.py
exec /configure_workers_and_start.py "$@"

View file

@ -150,6 +150,28 @@ $ poetry shell
$ poetry install --extras all
```
If you want to go even further and remove the Poetry caches:
```shell
# Find your Poetry cache directory
# Docs: https://github.com/python-poetry/poetry/blob/main/docs/configuration.md#cache-directory
$ poetry config cache-dir
# Remove packages from all cached repositories
$ poetry cache clear --all .
# Go completely nuclear and clear out everything Poetry cache related
# including the wheel artifacts which is not covered by the above command
# (see https://github.com/python-poetry/poetry/issues/10304)
#
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
# in order to incorporate the correct dynamically linked library locations otherwise you
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
$ rm -rf $(poetry config cache-dir)
```
## ...run a command in the `poetry` virtualenv?
Use `poetry run cmd args` when you need the python virtualenv context.

View file

@ -10,7 +10,7 @@ As an example, a SSO service may return the email address
to turn that into a displayname when creating a Matrix user for this individual.
It may choose `John Smith`, or `Smith, John [Example.com]` or any number of
variations. As each Synapse configuration may want something different, this is
where SAML mapping providers come into play.
where SSO mapping providers come into play.
SSO mapping providers are currently supported for OpenID and SAML SSO
configurations. Please see the details below for how to implement your own.

View file

@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
version = "1.127.0rc1"
version = "1.127.1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "AGPL-3.0-or-later"

View file

@ -29,8 +29,13 @@ from typing import Final
# the max size of a (canonical-json-encoded) event
MAX_PDU_SIZE = 65536
# the "depth" field on events is limited to 2**63 - 1
MAX_DEPTH = 2**63 - 1
# Max/min size of ints in canonical JSON
CANONICALJSON_MAX_INT = (2**53) - 1
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
# the "depth" field on events is limited to the same as what
# canonicaljson accepts
MAX_DEPTH = CANONICALJSON_MAX_INT
# the maximum length for a room alias is 255 characters
MAX_ALIAS_LENGTH = 255

View file

@ -40,6 +40,8 @@ import attr
from canonicaljson import encode_canonical_json
from synapse.api.constants import (
CANONICALJSON_MAX_INT,
CANONICALJSON_MIN_INT,
MAX_PDU_SIZE,
EventContentFields,
EventTypes,
@ -61,9 +63,6 @@ SPLIT_FIELD_REGEX = re.compile(r"\\*\.")
# Find escaped characters, e.g. those with a \ in front of them.
ESCAPE_SEQUENCE_PATTERN = re.compile(r"\\(.)")
CANONICALJSON_MAX_INT = (2**53) - 1
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
# Module API callback that allows adding fields to the unsigned section of
# events that are sent to clients.

View file

@ -86,9 +86,7 @@ class EventValidator:
# Depending on the room version, ensure the data is spec compliant JSON.
if event.room_version.strict_canonicaljson:
# Note that only the client controlled portion of the event is
# checked, since we trust the portions of the event we created.
validate_canonicaljson(event.content)
validate_canonicaljson(event.get_pdu_json())
if event.type == EventTypes.Aliases:
if "aliases" in event.content:

View file

@ -20,7 +20,7 @@
#
#
import logging
from typing import TYPE_CHECKING, Awaitable, Callable, Optional
from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Sequence
from synapse.api.constants import MAX_DEPTH, EventContentFields, EventTypes, Membership
from synapse.api.errors import Codes, SynapseError
@ -29,6 +29,7 @@ from synapse.crypto.event_signing import check_event_content_hash
from synapse.crypto.keyring import Keyring
from synapse.events import EventBase, make_event_from_dict
from synapse.events.utils import prune_event, validate_canonicaljson
from synapse.federation.units import filter_pdus_for_valid_depth
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.opentracing import log_kv, trace
from synapse.types import JsonDict, get_domain_from_id
@ -267,6 +268,15 @@ def _is_invite_via_3pid(event: EventBase) -> bool:
)
def parse_events_from_pdu_json(
pdus_json: Sequence[JsonDict], room_version: RoomVersion
) -> List[EventBase]:
return [
event_from_pdu_json(pdu_json, room_version)
for pdu_json in filter_pdus_for_valid_depth(pdus_json)
]
def event_from_pdu_json(pdu_json: JsonDict, room_version: RoomVersion) -> EventBase:
"""Construct an EventBase from an event json received over federation

View file

@ -68,6 +68,7 @@ from synapse.federation.federation_base import (
FederationBase,
InvalidEventSignatureError,
event_from_pdu_json,
parse_events_from_pdu_json,
)
from synapse.federation.transport.client import SendJoinResponse
from synapse.http.client import is_unknown_endpoint
@ -349,7 +350,7 @@ class FederationClient(FederationBase):
room_version = await self.store.get_room_version(room_id)
pdus = [event_from_pdu_json(p, room_version) for p in transaction_data_pdus]
pdus = parse_events_from_pdu_json(transaction_data_pdus, room_version)
# Check signatures and hash of pdus, removing any from the list that fail checks
pdus[:] = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
@ -393,9 +394,7 @@ class FederationClient(FederationBase):
transaction_data,
)
pdu_list: List[EventBase] = [
event_from_pdu_json(p, room_version) for p in transaction_data["pdus"]
]
pdu_list = parse_events_from_pdu_json(transaction_data["pdus"], room_version)
if pdu_list and pdu_list[0]:
pdu = pdu_list[0]
@ -809,7 +808,7 @@ class FederationClient(FederationBase):
room_version = await self.store.get_room_version(room_id)
auth_chain = [event_from_pdu_json(p, room_version) for p in res["auth_chain"]]
auth_chain = parse_events_from_pdu_json(res["auth_chain"], room_version)
signed_auth = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
destination, auth_chain, room_version=room_version
@ -1529,9 +1528,7 @@ class FederationClient(FederationBase):
room_version = await self.store.get_room_version(room_id)
events = [
event_from_pdu_json(e, room_version) for e in content.get("events", [])
]
events = parse_events_from_pdu_json(content.get("events", []), room_version)
signed_events = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
destination, events, room_version=room_version

View file

@ -66,7 +66,7 @@ from synapse.federation.federation_base import (
event_from_pdu_json,
)
from synapse.federation.persistence import TransactionActions
from synapse.federation.units import Edu, Transaction
from synapse.federation.units import Edu, Transaction, serialize_and_filter_pdus
from synapse.handlers.worker_lock import NEW_EVENT_DURING_PURGE_LOCK_NAME
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
@ -469,7 +469,12 @@ class FederationServer(FederationBase):
logger.info("Ignoring PDU: %s", e)
continue
event = event_from_pdu_json(p, room_version)
try:
event = event_from_pdu_json(p, room_version)
except SynapseError as e:
logger.info("Ignoring PDU for failing to deserialize: %s", e)
continue
pdus_by_room.setdefault(room_id, []).append(event)
if event.origin_server_ts > newest_pdu_ts:
@ -636,8 +641,8 @@ class FederationServer(FederationBase):
)
return {
"pdus": [pdu.get_pdu_json() for pdu in pdus],
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
"pdus": serialize_and_filter_pdus(pdus),
"auth_chain": serialize_and_filter_pdus(auth_chain),
}
async def on_pdu_request(
@ -761,8 +766,8 @@ class FederationServer(FederationBase):
event_json = event.get_pdu_json(time_now)
resp = {
"event": event_json,
"state": [p.get_pdu_json(time_now) for p in state_events],
"auth_chain": [p.get_pdu_json(time_now) for p in auth_chain_events],
"state": serialize_and_filter_pdus(state_events, time_now),
"auth_chain": serialize_and_filter_pdus(auth_chain_events, time_now),
"members_omitted": caller_supports_partial_state,
}
@ -1005,7 +1010,7 @@ class FederationServer(FederationBase):
time_now = self._clock.time_msec()
auth_pdus = await self.handler.on_event_auth(event_id)
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
res = {"auth_chain": serialize_and_filter_pdus(auth_pdus, time_now)}
return 200, res
async def on_query_client_keys(
@ -1090,7 +1095,7 @@ class FederationServer(FederationBase):
time_now = self._clock.time_msec()
return {"events": [ev.get_pdu_json(time_now) for ev in missing_events]}
return {"events": serialize_and_filter_pdus(missing_events, time_now)}
async def on_openid_userinfo(self, token: str) -> Optional[str]:
ts_now_ms = self._clock.time_msec()

View file

@ -24,10 +24,12 @@ server protocol.
"""
import logging
from typing import List, Optional
from typing import List, Optional, Sequence
import attr
from synapse.api.constants import CANONICALJSON_MAX_INT, CANONICALJSON_MIN_INT
from synapse.events import EventBase
from synapse.types import JsonDict
logger = logging.getLogger(__name__)
@ -104,8 +106,28 @@ class Transaction:
result = {
"origin": self.origin,
"origin_server_ts": self.origin_server_ts,
"pdus": self.pdus,
"pdus": filter_pdus_for_valid_depth(self.pdus),
}
if self.edus:
result["edus"] = self.edus
return result
def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> List[JsonDict]:
filtered_pdus = []
for pdu in pdus:
# Drop PDUs that have a depth that is outside of the range allowed
# by canonical json.
if (
"depth" in pdu
and CANONICALJSON_MIN_INT <= pdu["depth"] <= CANONICALJSON_MAX_INT
):
filtered_pdus.append(pdu)
return filtered_pdus
def serialize_and_filter_pdus(
pdus: Sequence[EventBase], time_now: Optional[int] = None
) -> List[JsonDict]:
return filter_pdus_for_valid_depth([pdu.get_pdu_json(time_now) for pdu in pdus])