2014-08-13 02:32:18 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 04:26:29 +00:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2019-03-25 16:36:56 +00:00
|
|
|
# Copyright 2018-2019 New Vector Ltd
|
2014-08-13 02:32:18 +00:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-08-13 06:47:46 +00:00
|
|
|
import atexit
|
2015-08-26 14:59:32 +00:00
|
|
|
import hashlib
|
2018-08-13 06:47:46 +00:00
|
|
|
import os
|
2018-09-20 10:14:34 +00:00
|
|
|
import time
|
2018-08-13 06:47:46 +00:00
|
|
|
import uuid
|
2018-09-20 10:14:34 +00:00
|
|
|
import warnings
|
2018-01-25 21:35:21 +00:00
|
|
|
from inspect import getcallargs
|
2014-08-12 14:10:52 +00:00
|
|
|
|
2018-01-25 21:35:21 +00:00
|
|
|
from mock import Mock, patch
|
2018-07-09 06:09:20 +00:00
|
|
|
from six.moves.urllib import parse as urlparse
|
|
|
|
|
2018-01-25 21:35:21 +00:00
|
|
|
from twisted.internet import defer, reactor
|
|
|
|
|
2019-04-01 09:24:38 +00:00
|
|
|
from synapse.api.constants import EventTypes
|
2018-01-25 21:35:21 +00:00
|
|
|
from synapse.api.errors import CodeMessageException, cs_error
|
2019-04-01 09:24:38 +00:00
|
|
|
from synapse.api.room_versions import RoomVersions
|
2019-03-15 15:50:37 +00:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2019-06-06 19:13:47 +00:00
|
|
|
from synapse.config.server import DEFAULT_ROOM_VERSION
|
2019-03-04 10:05:39 +00:00
|
|
|
from synapse.federation.transport import server as federation_server
|
2018-01-25 21:35:21 +00:00
|
|
|
from synapse.http.server import HttpServer
|
|
|
|
from synapse.server import HomeServer
|
2018-09-03 16:21:48 +00:00
|
|
|
from synapse.storage import DataStore
|
|
|
|
from synapse.storage.engines import PostgresEngine, create_engine
|
2018-08-13 06:47:46 +00:00
|
|
|
from synapse.storage.prepare_database import (
|
|
|
|
_get_or_create_schema_state,
|
|
|
|
_setup_new_database,
|
|
|
|
prepare_database,
|
|
|
|
)
|
2018-01-25 21:35:21 +00:00
|
|
|
from synapse.util.logcontext import LoggingContext
|
|
|
|
from synapse.util.ratelimitutils import FederationRateLimiter
|
2014-08-28 14:32:30 +00:00
|
|
|
|
2018-01-25 23:54:38 +00:00
|
|
|
# set this to True to run the tests against postgres instead of sqlite.
|
2019-03-25 16:36:56 +00:00
|
|
|
#
|
|
|
|
# When running under postgres, we first create a base database with the name
|
|
|
|
# POSTGRES_BASE_DB and update it to the current schema. Then, for each test case, we
|
|
|
|
# create another unique database, using the base database as a template.
|
2018-08-13 06:47:46 +00:00
|
|
|
USE_POSTGRES_FOR_TESTS = os.environ.get("SYNAPSE_POSTGRES", False)
|
2018-09-03 16:21:48 +00:00
|
|
|
LEAVE_DB = os.environ.get("SYNAPSE_LEAVE_DB", False)
|
2019-02-27 21:03:14 +00:00
|
|
|
POSTGRES_USER = os.environ.get("SYNAPSE_POSTGRES_USER", None)
|
|
|
|
POSTGRES_HOST = os.environ.get("SYNAPSE_POSTGRES_HOST", None)
|
|
|
|
POSTGRES_PASSWORD = os.environ.get("SYNAPSE_POSTGRES_PASSWORD", None)
|
2018-08-13 06:47:46 +00:00
|
|
|
POSTGRES_BASE_DB = "_synapse_unit_tests_base_%s" % (os.getpid(),)
|
|
|
|
|
2019-03-25 16:36:56 +00:00
|
|
|
# the dbname we will connect to in order to create the base database.
|
|
|
|
POSTGRES_DBNAME_FOR_INITIAL_CREATE = "postgres"
|
2018-08-13 06:47:46 +00:00
|
|
|
|
|
|
|
|
2019-03-25 16:36:56 +00:00
|
|
|
def setupdb():
|
2018-08-13 06:47:46 +00:00
|
|
|
# If we're using PostgreSQL, set up the db once
|
|
|
|
if USE_POSTGRES_FOR_TESTS:
|
2019-03-25 16:36:56 +00:00
|
|
|
# create a PostgresEngine
|
|
|
|
db_engine = create_engine({"name": "psycopg2", "args": {}})
|
|
|
|
|
|
|
|
# connect to postgres to create the base database.
|
2019-02-27 21:03:14 +00:00
|
|
|
db_conn = db_engine.module.connect(
|
2019-05-10 05:12:11 +00:00
|
|
|
user=POSTGRES_USER,
|
|
|
|
host=POSTGRES_HOST,
|
|
|
|
password=POSTGRES_PASSWORD,
|
2019-03-25 16:36:56 +00:00
|
|
|
dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
|
2019-02-27 21:03:14 +00:00
|
|
|
)
|
2018-08-13 06:47:46 +00:00
|
|
|
db_conn.autocommit = True
|
|
|
|
cur = db_conn.cursor()
|
|
|
|
cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
|
|
|
|
cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB,))
|
|
|
|
cur.close()
|
|
|
|
db_conn.close()
|
|
|
|
|
|
|
|
# Set up in the db
|
|
|
|
db_conn = db_engine.module.connect(
|
2019-02-27 21:03:14 +00:00
|
|
|
database=POSTGRES_BASE_DB,
|
|
|
|
user=POSTGRES_USER,
|
|
|
|
host=POSTGRES_HOST,
|
|
|
|
password=POSTGRES_PASSWORD,
|
2018-08-13 06:47:46 +00:00
|
|
|
)
|
|
|
|
cur = db_conn.cursor()
|
|
|
|
_get_or_create_schema_state(cur, db_engine)
|
|
|
|
_setup_new_database(cur, db_engine)
|
|
|
|
db_conn.commit()
|
|
|
|
cur.close()
|
|
|
|
db_conn.close()
|
|
|
|
|
|
|
|
def _cleanup():
|
2019-02-27 21:03:14 +00:00
|
|
|
db_conn = db_engine.module.connect(
|
2019-05-10 05:12:11 +00:00
|
|
|
user=POSTGRES_USER,
|
|
|
|
host=POSTGRES_HOST,
|
|
|
|
password=POSTGRES_PASSWORD,
|
2019-03-25 16:36:56 +00:00
|
|
|
dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
|
2019-02-27 21:03:14 +00:00
|
|
|
)
|
2018-08-13 06:47:46 +00:00
|
|
|
db_conn.autocommit = True
|
|
|
|
cur = db_conn.cursor()
|
|
|
|
cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
|
|
|
|
cur.close()
|
|
|
|
db_conn.close()
|
|
|
|
|
|
|
|
atexit.register(_cleanup)
|
2018-01-25 23:54:38 +00:00
|
|
|
|
2014-08-28 14:32:30 +00:00
|
|
|
|
2019-05-13 20:01:14 +00:00
|
|
|
def default_config(name, parse=False):
|
2018-10-01 14:11:58 +00:00
|
|
|
"""
|
|
|
|
Create a reasonable test config.
|
|
|
|
"""
|
2019-03-15 15:50:37 +00:00
|
|
|
config_dict = {
|
|
|
|
"server_name": name,
|
|
|
|
"media_store_path": "media",
|
|
|
|
"uploads_path": "uploads",
|
|
|
|
# the test signing key is just an arbitrary ed25519 key to keep the config
|
|
|
|
# parser happy
|
|
|
|
"signing_key": "ed25519 a_lPym qvioDNmfExFBRPgdTU+wtFYKq4JfwFRv7sYVgWvmgJg",
|
2019-05-13 20:01:14 +00:00
|
|
|
"event_cache_size": 1,
|
|
|
|
"enable_registration": True,
|
|
|
|
"enable_registration_captcha": False,
|
|
|
|
"macaroon_secret_key": "not even a little secret",
|
|
|
|
"expire_access_token": False,
|
|
|
|
"trusted_third_party_id_servers": [],
|
|
|
|
"room_invite_state_types": [],
|
|
|
|
"password_providers": [],
|
|
|
|
"worker_replication_url": "",
|
|
|
|
"worker_app": None,
|
|
|
|
"block_non_admin_invites": False,
|
|
|
|
"federation_domain_whitelist": None,
|
|
|
|
"filter_timeline_limit": 5000,
|
|
|
|
"user_directory_search_all_users": False,
|
|
|
|
"user_consent_server_notice_content": None,
|
|
|
|
"block_events_without_consent_error": None,
|
|
|
|
"user_consent_at_registration": False,
|
|
|
|
"user_consent_policy_name": "Privacy Policy",
|
|
|
|
"media_storage_providers": [],
|
|
|
|
"autocreate_auto_join_rooms": True,
|
|
|
|
"auto_join_rooms": [],
|
|
|
|
"limit_usage_by_mau": False,
|
|
|
|
"hs_disabled": False,
|
|
|
|
"hs_disabled_message": "",
|
|
|
|
"hs_disabled_limit_type": "",
|
|
|
|
"max_mau_value": 50,
|
|
|
|
"mau_trial_days": 0,
|
|
|
|
"mau_stats_only": False,
|
|
|
|
"mau_limits_reserved_threepids": [],
|
|
|
|
"admin_contact": None,
|
2019-05-15 17:06:04 +00:00
|
|
|
"rc_federation": {
|
|
|
|
"reject_limit": 10,
|
|
|
|
"sleep_limit": 10,
|
|
|
|
"sleep_delay": 10,
|
|
|
|
"concurrent": 10,
|
|
|
|
},
|
|
|
|
"rc_message": {"per_second": 10000, "burst_count": 10000},
|
2019-05-13 20:01:14 +00:00
|
|
|
"rc_registration": {"per_second": 10000, "burst_count": 10000},
|
|
|
|
"rc_login": {
|
|
|
|
"address": {"per_second": 10000, "burst_count": 10000},
|
|
|
|
"account": {"per_second": 10000, "burst_count": 10000},
|
|
|
|
"failed_attempts": {"per_second": 10000, "burst_count": 10000},
|
|
|
|
},
|
|
|
|
"saml2_enabled": False,
|
|
|
|
"public_baseurl": None,
|
|
|
|
"default_identity_server": None,
|
|
|
|
"key_refresh_interval": 24 * 60 * 60 * 1000,
|
|
|
|
"old_signing_keys": {},
|
|
|
|
"tls_fingerprints": [],
|
|
|
|
"use_frozen_dicts": False,
|
|
|
|
# We need a sane default_room_version, otherwise attempts to create
|
|
|
|
# rooms will fail.
|
2019-06-06 19:13:47 +00:00
|
|
|
"default_room_version": DEFAULT_ROOM_VERSION,
|
2019-05-13 20:01:14 +00:00
|
|
|
# disable user directory updates, because they get done in the
|
|
|
|
# background, which upsets the test runner.
|
|
|
|
"update_user_directory": False,
|
2019-03-15 15:50:37 +00:00
|
|
|
}
|
|
|
|
|
2019-05-13 20:01:14 +00:00
|
|
|
if parse:
|
|
|
|
config = HomeServerConfig()
|
|
|
|
config.parse_config_dict(config_dict)
|
|
|
|
return config
|
|
|
|
|
|
|
|
return config_dict
|
2018-10-01 14:11:58 +00:00
|
|
|
|
|
|
|
|
2018-08-28 16:21:05 +00:00
|
|
|
class TestHomeServer(HomeServer):
|
|
|
|
DATASTORE_CLASS = DataStore
|
|
|
|
|
|
|
|
|
2015-02-11 11:37:30 +00:00
|
|
|
@defer.inlineCallbacks
|
2018-08-10 13:54:09 +00:00
|
|
|
def setup_test_homeserver(
|
2018-09-06 16:58:18 +00:00
|
|
|
cleanup_func,
|
|
|
|
name="test",
|
|
|
|
datastore=None,
|
|
|
|
config=None,
|
|
|
|
reactor=None,
|
|
|
|
homeserverToUse=TestHomeServer,
|
|
|
|
**kargs
|
2018-08-10 13:54:09 +00:00
|
|
|
):
|
2018-08-13 06:47:46 +00:00
|
|
|
"""
|
|
|
|
Setup a homeserver suitable for running tests against. Keyword arguments
|
|
|
|
are passed to the Homeserver constructor.
|
|
|
|
|
|
|
|
If no datastore is supplied, one is created and given to the homeserver.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
cleanup_func : The function used to register a cleanup routine for
|
|
|
|
after the test.
|
2019-03-04 10:05:39 +00:00
|
|
|
|
|
|
|
Calling this method directly is deprecated: you should instead derive from
|
|
|
|
HomeserverTestCase.
|
2015-02-11 11:37:30 +00:00
|
|
|
"""
|
2018-06-22 08:37:10 +00:00
|
|
|
if reactor is None:
|
|
|
|
from twisted.internet import reactor
|
|
|
|
|
2015-02-11 11:37:30 +00:00
|
|
|
if config is None:
|
2019-05-13 20:01:14 +00:00
|
|
|
config = default_config(name, parse=True)
|
2018-08-31 16:11:11 +00:00
|
|
|
|
2016-06-06 00:05:57 +00:00
|
|
|
config.ldap_enabled = False
|
2016-02-11 14:10:00 +00:00
|
|
|
|
2015-04-01 13:12:33 +00:00
|
|
|
if "clock" not in kargs:
|
|
|
|
kargs["clock"] = MockClock()
|
|
|
|
|
2018-01-25 23:54:38 +00:00
|
|
|
if USE_POSTGRES_FOR_TESTS:
|
2018-08-13 06:47:46 +00:00
|
|
|
test_db = "synapse_test_%s" % uuid.uuid4().hex
|
|
|
|
|
2018-01-25 23:54:38 +00:00
|
|
|
config.database_config = {
|
|
|
|
"name": "psycopg2",
|
2019-02-27 21:03:14 +00:00
|
|
|
"args": {
|
|
|
|
"database": test_db,
|
|
|
|
"host": POSTGRES_HOST,
|
|
|
|
"password": POSTGRES_PASSWORD,
|
|
|
|
"user": POSTGRES_USER,
|
|
|
|
"cp_min": 1,
|
|
|
|
"cp_max": 5,
|
|
|
|
},
|
2018-01-25 23:54:38 +00:00
|
|
|
}
|
|
|
|
else:
|
|
|
|
config.database_config = {
|
|
|
|
"name": "sqlite3",
|
2018-08-10 13:54:09 +00:00
|
|
|
"args": {"database": ":memory:", "cp_min": 1, "cp_max": 1},
|
2018-01-25 23:54:38 +00:00
|
|
|
}
|
|
|
|
|
2018-01-25 21:12:46 +00:00
|
|
|
db_engine = create_engine(config.database_config)
|
2018-01-25 23:14:24 +00:00
|
|
|
|
2018-08-13 06:47:46 +00:00
|
|
|
# Create the database before we actually try and connect to it, based off
|
|
|
|
# the template database we generate in setupdb()
|
|
|
|
if datastore is None and isinstance(db_engine, PostgresEngine):
|
|
|
|
db_conn = db_engine.module.connect(
|
2019-02-27 21:03:14 +00:00
|
|
|
database=POSTGRES_BASE_DB,
|
|
|
|
user=POSTGRES_USER,
|
|
|
|
host=POSTGRES_HOST,
|
|
|
|
password=POSTGRES_PASSWORD,
|
2018-08-13 06:47:46 +00:00
|
|
|
)
|
|
|
|
db_conn.autocommit = True
|
|
|
|
cur = db_conn.cursor()
|
|
|
|
cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
|
|
|
|
cur.execute(
|
|
|
|
"CREATE DATABASE %s WITH TEMPLATE %s;" % (test_db, POSTGRES_BASE_DB)
|
|
|
|
)
|
|
|
|
cur.close()
|
|
|
|
db_conn.close()
|
|
|
|
|
2018-01-25 23:14:24 +00:00
|
|
|
# we need to configure the connection pool to run the on_new_connection
|
|
|
|
# function, so that we can test code that uses custom sqlite functions
|
|
|
|
# (like rank).
|
|
|
|
config.database_config["args"]["cp_openfun"] = db_engine.on_new_connection
|
|
|
|
|
2015-02-11 11:37:30 +00:00
|
|
|
if datastore is None:
|
2018-08-17 15:08:45 +00:00
|
|
|
hs = homeserverToUse(
|
2018-08-10 13:54:09 +00:00
|
|
|
name,
|
|
|
|
config=config,
|
2018-01-25 23:14:24 +00:00
|
|
|
db_config=config.database_config,
|
2015-02-18 16:51:33 +00:00
|
|
|
version_string="Synapse/tests",
|
2018-01-25 21:12:46 +00:00
|
|
|
database_engine=db_engine,
|
2016-11-21 11:53:02 +00:00
|
|
|
tls_server_context_factory=Mock(),
|
2018-06-24 20:38:43 +00:00
|
|
|
tls_client_options_factory=Mock(),
|
2018-06-22 08:37:10 +00:00
|
|
|
reactor=reactor,
|
2015-02-18 16:51:33 +00:00
|
|
|
**kargs
|
|
|
|
)
|
2018-08-13 06:47:46 +00:00
|
|
|
|
|
|
|
# Prepare the DB on SQLite -- PostgreSQL is a copy of an already up to
|
|
|
|
# date db
|
|
|
|
if not isinstance(db_engine, PostgresEngine):
|
|
|
|
db_conn = hs.get_db_conn()
|
|
|
|
yield prepare_database(db_conn, db_engine, config)
|
|
|
|
db_conn.commit()
|
|
|
|
db_conn.close()
|
|
|
|
|
|
|
|
else:
|
|
|
|
# We need to do cleanup on PostgreSQL
|
|
|
|
def cleanup():
|
2018-09-20 10:14:34 +00:00
|
|
|
import psycopg2
|
|
|
|
|
2018-08-13 06:47:46 +00:00
|
|
|
# Close all the db pools
|
|
|
|
hs.get_db_pool().close()
|
|
|
|
|
2018-09-20 10:14:34 +00:00
|
|
|
dropped = False
|
|
|
|
|
2018-08-13 06:47:46 +00:00
|
|
|
# Drop the test database
|
|
|
|
db_conn = db_engine.module.connect(
|
2019-02-27 21:03:14 +00:00
|
|
|
database=POSTGRES_BASE_DB,
|
|
|
|
user=POSTGRES_USER,
|
|
|
|
host=POSTGRES_HOST,
|
|
|
|
password=POSTGRES_PASSWORD,
|
2018-08-13 06:47:46 +00:00
|
|
|
)
|
|
|
|
db_conn.autocommit = True
|
|
|
|
cur = db_conn.cursor()
|
2018-09-20 10:14:34 +00:00
|
|
|
|
|
|
|
# Try a few times to drop the DB. Some things may hold on to the
|
|
|
|
# database for a few more seconds due to flakiness, preventing
|
|
|
|
# us from dropping it when the test is over. If we can't drop
|
|
|
|
# it, warn and move on.
|
|
|
|
for x in range(5):
|
|
|
|
try:
|
|
|
|
cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
|
|
|
|
db_conn.commit()
|
|
|
|
dropped = True
|
|
|
|
except psycopg2.OperationalError as e:
|
|
|
|
warnings.warn(
|
|
|
|
"Couldn't drop old db: " + str(e), category=UserWarning
|
|
|
|
)
|
|
|
|
time.sleep(0.5)
|
|
|
|
|
2018-08-13 06:47:46 +00:00
|
|
|
cur.close()
|
|
|
|
db_conn.close()
|
|
|
|
|
2018-09-20 10:14:34 +00:00
|
|
|
if not dropped:
|
|
|
|
warnings.warn("Failed to drop old DB.", category=UserWarning)
|
|
|
|
|
2018-09-03 16:21:48 +00:00
|
|
|
if not LEAVE_DB:
|
|
|
|
# Register the cleanup hook
|
|
|
|
cleanup_func(cleanup)
|
2018-08-13 06:47:46 +00:00
|
|
|
|
2016-01-27 17:25:07 +00:00
|
|
|
hs.setup()
|
2019-03-11 10:16:10 +00:00
|
|
|
if homeserverToUse.__name__ == "TestHomeServer":
|
|
|
|
hs.setup_master()
|
2015-02-11 11:37:30 +00:00
|
|
|
else:
|
2018-08-17 15:08:45 +00:00
|
|
|
hs = homeserverToUse(
|
2018-08-10 13:54:09 +00:00
|
|
|
name,
|
|
|
|
db_pool=None,
|
|
|
|
datastore=datastore,
|
|
|
|
config=config,
|
2015-02-18 16:51:33 +00:00
|
|
|
version_string="Synapse/tests",
|
2018-01-25 21:12:46 +00:00
|
|
|
database_engine=db_engine,
|
2016-11-21 11:53:02 +00:00
|
|
|
tls_server_context_factory=Mock(),
|
2018-06-24 20:38:43 +00:00
|
|
|
tls_client_options_factory=Mock(),
|
2018-07-20 12:41:13 +00:00
|
|
|
reactor=reactor,
|
2015-02-18 16:51:33 +00:00
|
|
|
**kargs
|
2015-02-11 11:37:30 +00:00
|
|
|
)
|
|
|
|
|
2015-08-26 14:59:32 +00:00
|
|
|
# bcrypt is far too slow to be doing in unit tests
|
2016-06-02 12:31:45 +00:00
|
|
|
# Need to let the HS build an auth handler and then mess with it
|
|
|
|
# because AuthHandler's constructor requires the HS, so we can't make one
|
|
|
|
# beforehand and pass it in to the HS's constructor (chicken / egg)
|
2019-06-20 09:32:02 +00:00
|
|
|
hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode("utf8")).hexdigest()
|
2018-08-10 13:54:09 +00:00
|
|
|
hs.get_auth_handler().validate_hash = (
|
2019-06-20 09:32:02 +00:00
|
|
|
lambda p, h: hashlib.md5(p.encode("utf8")).hexdigest() == h
|
2018-08-10 13:54:09 +00:00
|
|
|
)
|
2015-08-26 14:59:32 +00:00
|
|
|
|
2016-01-26 13:52:29 +00:00
|
|
|
fed = kargs.get("resource_for_federation", None)
|
|
|
|
if fed:
|
2019-03-04 10:05:39 +00:00
|
|
|
register_federation_servlets(hs, fed)
|
2016-01-26 13:52:29 +00:00
|
|
|
|
2015-02-11 11:37:30 +00:00
|
|
|
defer.returnValue(hs)
|
|
|
|
|
|
|
|
|
2019-03-04 10:05:39 +00:00
|
|
|
def register_federation_servlets(hs, resource):
|
|
|
|
federation_server.register_servlets(
|
|
|
|
hs,
|
|
|
|
resource=resource,
|
|
|
|
authenticator=federation_server.Authenticator(hs),
|
|
|
|
ratelimiter=FederationRateLimiter(
|
2019-05-15 17:06:04 +00:00
|
|
|
hs.get_clock(), config=hs.config.rc_federation
|
2019-03-04 10:05:39 +00:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2014-08-28 14:32:30 +00:00
|
|
|
def get_mock_call_args(pattern_func, mock_func):
|
|
|
|
""" Return the arguments the mock function was called with interpreted
|
|
|
|
by the pattern functions argument list.
|
|
|
|
"""
|
|
|
|
invoked_args, invoked_kargs = mock_func.call_args
|
|
|
|
return getcallargs(pattern_func, *invoked_args, **invoked_kargs)
|
|
|
|
|
2014-08-12 14:10:52 +00:00
|
|
|
|
2016-09-12 09:46:02 +00:00
|
|
|
def mock_getRawHeaders(headers=None):
|
|
|
|
headers = headers if headers is not None else {}
|
|
|
|
|
|
|
|
def getRawHeaders(name, default=None):
|
|
|
|
return headers.get(name, default)
|
|
|
|
|
|
|
|
return getRawHeaders
|
|
|
|
|
|
|
|
|
2014-08-18 13:03:07 +00:00
|
|
|
# This is a mock /resource/ not an entire server
|
|
|
|
class MockHttpResource(HttpServer):
|
2014-08-12 14:10:52 +00:00
|
|
|
def __init__(self, prefix=""):
|
|
|
|
self.callbacks = [] # 3-tuple of method/pattern/function
|
|
|
|
self.prefix = prefix
|
|
|
|
|
|
|
|
def trigger_get(self, path):
|
2018-08-01 14:54:06 +00:00
|
|
|
return self.trigger(b"GET", path, None)
|
2014-08-12 14:10:52 +00:00
|
|
|
|
2019-06-20 09:32:02 +00:00
|
|
|
@patch("twisted.web.http.Request")
|
2014-08-12 14:10:52 +00:00
|
|
|
@defer.inlineCallbacks
|
2018-09-04 00:23:18 +00:00
|
|
|
def trigger(
|
2018-09-06 16:58:18 +00:00
|
|
|
self, http_method, path, content, mock_request, federation_auth_origin=None
|
2018-09-04 00:23:18 +00:00
|
|
|
):
|
2014-08-12 14:10:52 +00:00
|
|
|
""" Fire an HTTP event.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
http_method : The HTTP method
|
|
|
|
path : The HTTP path
|
|
|
|
content : The HTTP body
|
|
|
|
mock_request : Mocked request to pass to the event so it can get
|
|
|
|
content.
|
2018-09-04 00:23:18 +00:00
|
|
|
federation_auth_origin (bytes|None): domain to authenticate as, for federation
|
2014-08-12 14:10:52 +00:00
|
|
|
Returns:
|
|
|
|
A tuple of (code, response)
|
|
|
|
Raises:
|
|
|
|
KeyError If no event is found which will handle the path.
|
|
|
|
"""
|
|
|
|
path = self.prefix + path
|
|
|
|
|
|
|
|
# annoyingly we return a twisted http request which has chained calls
|
|
|
|
# to get at the http content, hence mock it here.
|
|
|
|
mock_content = Mock()
|
2019-06-20 09:32:02 +00:00
|
|
|
config = {"read.return_value": content}
|
2014-08-12 14:10:52 +00:00
|
|
|
mock_content.configure_mock(**config)
|
|
|
|
mock_request.content = mock_content
|
|
|
|
|
2019-06-20 09:32:02 +00:00
|
|
|
mock_request.method = http_method.encode("ascii")
|
|
|
|
mock_request.uri = path.encode("ascii")
|
2014-10-13 13:37:46 +00:00
|
|
|
|
2015-06-12 16:17:29 +00:00
|
|
|
mock_request.getClientIP.return_value = "-"
|
|
|
|
|
2016-09-12 09:46:02 +00:00
|
|
|
headers = {}
|
2018-09-04 00:23:18 +00:00
|
|
|
if federation_auth_origin is not None:
|
|
|
|
headers[b"Authorization"] = [
|
2018-09-06 16:58:18 +00:00
|
|
|
b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin,)
|
2018-09-04 00:23:18 +00:00
|
|
|
]
|
2016-09-12 09:46:02 +00:00
|
|
|
mock_request.requestHeaders.getRawHeaders = mock_getRawHeaders(headers)
|
2014-10-13 14:53:18 +00:00
|
|
|
|
2014-08-12 14:10:52 +00:00
|
|
|
# return the right path if the event requires it
|
|
|
|
mock_request.path = path
|
|
|
|
|
|
|
|
# add in query params to the right place
|
|
|
|
try:
|
2019-06-20 09:32:02 +00:00
|
|
|
mock_request.args = urlparse.parse_qs(path.split("?")[1])
|
|
|
|
mock_request.path = path.split("?")[0]
|
2014-08-12 14:10:52 +00:00
|
|
|
path = mock_request.path
|
2017-10-23 14:52:32 +00:00
|
|
|
except Exception:
|
2014-08-12 14:10:52 +00:00
|
|
|
pass
|
|
|
|
|
2018-08-01 14:54:06 +00:00
|
|
|
if isinstance(path, bytes):
|
2019-06-20 09:32:02 +00:00
|
|
|
path = path.decode("utf8")
|
2018-08-01 14:54:06 +00:00
|
|
|
|
2014-08-12 14:10:52 +00:00
|
|
|
for (method, pattern, func) in self.callbacks:
|
|
|
|
if http_method != method:
|
|
|
|
continue
|
|
|
|
|
|
|
|
matcher = pattern.match(path)
|
|
|
|
if matcher:
|
|
|
|
try:
|
2018-08-10 13:54:09 +00:00
|
|
|
args = [urlparse.unquote(u) for u in matcher.groups()]
|
|
|
|
|
|
|
|
(code, response) = yield func(mock_request, *args)
|
2014-08-12 14:10:52 +00:00
|
|
|
defer.returnValue((code, response))
|
|
|
|
except CodeMessageException as e:
|
2016-10-14 20:46:54 +00:00
|
|
|
defer.returnValue((e.code, cs_error(e.msg, code=e.errcode)))
|
2014-08-12 14:10:52 +00:00
|
|
|
|
|
|
|
raise KeyError("No event can handle %s" % path)
|
|
|
|
|
2015-12-01 17:34:32 +00:00
|
|
|
def register_paths(self, method, path_patterns, callback):
|
|
|
|
for path_pattern in path_patterns:
|
|
|
|
self.callbacks.append((method, path_pattern, callback))
|
2014-08-12 14:10:52 +00:00
|
|
|
|
|
|
|
|
2014-09-24 16:25:41 +00:00
|
|
|
class MockKey(object):
|
|
|
|
alg = "mock_alg"
|
|
|
|
version = "mock_version"
|
2014-10-15 23:09:48 +00:00
|
|
|
signature = b"\x9a\x87$"
|
2014-09-24 16:25:41 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def verify_key(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def sign(self, message):
|
2014-10-15 23:09:48 +00:00
|
|
|
return self
|
2014-09-24 16:25:41 +00:00
|
|
|
|
|
|
|
def verify(self, message, sig):
|
|
|
|
assert sig == b"\x9a\x87$"
|
|
|
|
|
2019-02-27 14:24:45 +00:00
|
|
|
def encode(self):
|
|
|
|
return b"<fake_encoded_key>"
|
|
|
|
|
2014-09-24 16:25:41 +00:00
|
|
|
|
2014-08-13 17:26:42 +00:00
|
|
|
class MockClock(object):
|
|
|
|
now = 1000
|
|
|
|
|
2014-12-10 19:24:12 +00:00
|
|
|
def __init__(self):
|
2015-01-13 16:57:55 +00:00
|
|
|
# list of lists of [absolute_time, callback, expired] in no particular
|
|
|
|
# order
|
2014-12-10 19:24:12 +00:00
|
|
|
self.timers = []
|
2016-09-23 12:56:14 +00:00
|
|
|
self.loopers = []
|
2014-12-10 19:24:12 +00:00
|
|
|
|
2014-08-13 17:26:42 +00:00
|
|
|
def time(self):
|
|
|
|
return self.now
|
|
|
|
|
|
|
|
def time_msec(self):
|
|
|
|
return self.time() * 1000
|
|
|
|
|
2016-02-15 17:10:40 +00:00
|
|
|
def call_later(self, delay, callback, *args, **kwargs):
|
2014-12-10 19:24:12 +00:00
|
|
|
current_context = LoggingContext.current_context()
|
|
|
|
|
|
|
|
def wrapped_callback():
|
|
|
|
LoggingContext.thread_local.current_context = current_context
|
2016-02-15 17:10:40 +00:00
|
|
|
callback(*args, **kwargs)
|
2014-12-10 19:26:52 +00:00
|
|
|
|
2015-01-13 16:57:55 +00:00
|
|
|
t = [self.now + delay, wrapped_callback, False]
|
2014-12-10 19:26:52 +00:00
|
|
|
self.timers.append(t)
|
2015-01-13 16:57:55 +00:00
|
|
|
|
2014-12-10 19:26:52 +00:00
|
|
|
return t
|
2014-12-10 19:24:12 +00:00
|
|
|
|
2015-05-13 15:54:02 +00:00
|
|
|
def looping_call(self, function, interval):
|
2019-02-27 21:03:14 +00:00
|
|
|
self.loopers.append([function, interval / 1000.0, self.now])
|
2015-05-13 15:54:02 +00:00
|
|
|
|
2016-03-01 14:49:41 +00:00
|
|
|
def cancel_call_later(self, timer, ignore_errs=False):
|
2015-01-13 16:57:55 +00:00
|
|
|
if timer[2]:
|
2016-03-01 14:49:41 +00:00
|
|
|
if not ignore_errs:
|
|
|
|
raise Exception("Cannot cancel an expired timer")
|
2015-01-13 16:57:55 +00:00
|
|
|
|
|
|
|
timer[2] = True
|
2014-12-10 19:26:52 +00:00
|
|
|
self.timers = [t for t in self.timers if t != timer]
|
2014-12-10 19:24:12 +00:00
|
|
|
|
2014-08-13 18:17:30 +00:00
|
|
|
# For unit testing
|
|
|
|
def advance_time(self, secs):
|
|
|
|
self.now += secs
|
|
|
|
|
2014-12-10 19:24:12 +00:00
|
|
|
timers = self.timers
|
|
|
|
self.timers = []
|
|
|
|
|
2015-01-13 16:57:55 +00:00
|
|
|
for t in timers:
|
|
|
|
time, callback, expired = t
|
|
|
|
|
|
|
|
if expired:
|
|
|
|
raise Exception("Timer already expired")
|
|
|
|
|
2014-12-10 19:24:12 +00:00
|
|
|
if self.now >= time:
|
2015-01-13 16:57:55 +00:00
|
|
|
t[2] = True
|
2014-12-10 19:24:12 +00:00
|
|
|
callback()
|
|
|
|
else:
|
2015-01-13 16:57:55 +00:00
|
|
|
self.timers.append(t)
|
2014-12-10 19:24:12 +00:00
|
|
|
|
2016-09-23 12:56:14 +00:00
|
|
|
for looped in self.loopers:
|
|
|
|
func, interval, last = looped
|
|
|
|
if last + interval < self.now:
|
|
|
|
func()
|
|
|
|
looped[2] = self.now
|
|
|
|
|
2015-11-10 15:51:40 +00:00
|
|
|
def advance_time_msec(self, ms):
|
2019-02-27 21:03:14 +00:00
|
|
|
self.advance_time(ms / 1000.0)
|
2015-11-10 15:51:40 +00:00
|
|
|
|
2016-12-09 16:48:48 +00:00
|
|
|
def time_bound_deferred(self, d, *args, **kwargs):
|
|
|
|
# We don't bother timing things out for now.
|
|
|
|
return d
|
|
|
|
|
2014-08-13 17:26:42 +00:00
|
|
|
|
2014-08-18 14:10:31 +00:00
|
|
|
def _format_call(args, kwargs):
|
|
|
|
return ", ".join(
|
2018-08-10 13:54:09 +00:00
|
|
|
["%r" % (a) for a in args] + ["%s=%r" % (k, v) for k, v in kwargs.items()]
|
2014-08-18 14:10:31 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class DeferredMockCallable(object):
|
|
|
|
"""A callable instance that stores a set of pending call expectations and
|
|
|
|
return values for them. It allows a unit test to assert that the given set
|
|
|
|
of function calls are eventually made, by awaiting on them to be called.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.expectations = []
|
2014-08-28 15:40:06 +00:00
|
|
|
self.calls = []
|
2014-08-18 14:10:31 +00:00
|
|
|
|
|
|
|
def __call__(self, *args, **kwargs):
|
2014-08-28 15:40:06 +00:00
|
|
|
self.calls.append((args, kwargs))
|
|
|
|
|
2014-08-18 14:10:31 +00:00
|
|
|
if not self.expectations:
|
2018-08-10 13:54:09 +00:00
|
|
|
raise ValueError(
|
|
|
|
"%r has no pending calls to handle call(%s)"
|
|
|
|
% (self, _format_call(args, kwargs))
|
2014-08-18 14:10:31 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
for (call, result, d) in self.expectations:
|
|
|
|
if args == call[1] and kwargs == call[2]:
|
|
|
|
d.callback(None)
|
|
|
|
return result
|
|
|
|
|
2018-08-10 13:54:09 +00:00
|
|
|
failure = AssertionError(
|
|
|
|
"Was not expecting call(%s)" % (_format_call(args, kwargs))
|
|
|
|
)
|
2014-08-18 14:10:31 +00:00
|
|
|
|
2014-08-29 11:08:33 +00:00
|
|
|
for _, _, d in self.expectations:
|
|
|
|
try:
|
|
|
|
d.errback(failure)
|
2017-10-23 14:52:32 +00:00
|
|
|
except Exception:
|
2014-08-29 11:08:33 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
raise failure
|
|
|
|
|
2014-08-18 14:10:31 +00:00
|
|
|
def expect_call_and_return(self, call, result):
|
|
|
|
self.expectations.append((call, result, defer.Deferred()))
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2014-08-29 11:08:33 +00:00
|
|
|
def await_calls(self, timeout=1000):
|
|
|
|
deferred = defer.DeferredList(
|
2018-08-10 13:54:09 +00:00
|
|
|
[d for _, _, d in self.expectations], fireOnOneErrback=True
|
2014-08-29 11:08:33 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
timer = reactor.callLater(
|
2016-02-19 15:34:38 +00:00
|
|
|
timeout / 1000,
|
2014-08-29 11:08:33 +00:00
|
|
|
deferred.errback,
|
2018-08-10 13:54:09 +00:00
|
|
|
AssertionError(
|
|
|
|
"%d pending calls left: %s"
|
|
|
|
% (
|
|
|
|
len([e for e in self.expectations if not e[2].called]),
|
|
|
|
[e for e in self.expectations if not e[2].called],
|
|
|
|
)
|
|
|
|
),
|
2014-08-29 11:08:33 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
yield deferred
|
|
|
|
|
|
|
|
timer.cancel()
|
|
|
|
|
2014-08-28 15:40:06 +00:00
|
|
|
self.calls = []
|
|
|
|
|
|
|
|
def assert_had_no_calls(self):
|
|
|
|
if self.calls:
|
|
|
|
calls = self.calls
|
|
|
|
self.calls = []
|
|
|
|
|
2016-02-19 15:34:38 +00:00
|
|
|
raise AssertionError(
|
2018-08-10 13:54:09 +00:00
|
|
|
"Expected not to received any calls, got:\n"
|
|
|
|
+ "\n".join(["call(%s)" % _format_call(c[0], c[1]) for c in calls])
|
2014-08-28 15:40:06 +00:00
|
|
|
)
|
2018-08-09 13:33:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def create_room(hs, room_id, creator_id):
|
|
|
|
"""Creates and persist a creation event for the given room
|
|
|
|
|
|
|
|
Args:
|
|
|
|
hs
|
|
|
|
room_id (str)
|
|
|
|
creator_id (str)
|
|
|
|
"""
|
|
|
|
|
|
|
|
store = hs.get_datastore()
|
|
|
|
event_builder_factory = hs.get_event_builder_factory()
|
|
|
|
event_creation_handler = hs.get_event_creation_handler()
|
|
|
|
|
2019-04-01 09:24:38 +00:00
|
|
|
builder = event_builder_factory.for_room_version(
|
2019-01-24 09:28:16 +00:00
|
|
|
RoomVersions.V1,
|
2018-09-06 16:58:18 +00:00
|
|
|
{
|
|
|
|
"type": EventTypes.Create,
|
|
|
|
"state_key": "",
|
|
|
|
"sender": creator_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {},
|
2019-02-27 21:03:14 +00:00
|
|
|
},
|
2018-08-09 13:33:49 +00:00
|
|
|
)
|
|
|
|
|
2018-09-06 16:58:18 +00:00
|
|
|
event, context = yield event_creation_handler.create_new_client_event(builder)
|
|
|
|
|
2018-08-09 13:33:49 +00:00
|
|
|
yield store.persist_event(event, context)
|