Skip to content

Commit 898461a

Browse files
committed
Run ruff .
1 parent 404fc7b commit 898461a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+389
-170
lines changed

synapse/_scripts/synapse_port_db.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1065,7 +1065,7 @@ def get_start_id(txn: LoggingTransaction) -> int:
10651065

10661066
def get_sent_table_size(txn: LoggingTransaction) -> int:
10671067
txn.execute(
1068-
"SELECT count(*) FROM sent_transactions" " WHERE ts >= ?", (yesterday,)
1068+
"SELECT count(*) FROM sent_transactions WHERE ts >= ?", (yesterday,)
10691069
)
10701070
result = txn.fetchone()
10711071
assert result is not None

synapse/_scripts/synctl.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -292,9 +292,9 @@ def main() -> None:
292292
for key in worker_config:
293293
if key == "worker_app": # But we allow worker_app
294294
continue
295-
assert not key.startswith(
296-
"worker_"
297-
), "Main process cannot use worker_* config"
295+
assert not key.startswith("worker_"), (
296+
"Main process cannot use worker_* config"
297+
)
298298
else:
299299
worker_pidfile = worker_config["worker_pid_file"]
300300
worker_cache_factor = worker_config.get("synctl_cache_factor")

synapse/app/generic_worker.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -287,8 +287,7 @@ def start_listening(self) -> None:
287287
elif listener.type == "metrics":
288288
if not self.config.metrics.enable_metrics:
289289
logger.warning(
290-
"Metrics listener configured, but "
291-
"enable_metrics is not True!"
290+
"Metrics listener configured, but enable_metrics is not True!"
292291
)
293292
else:
294293
if isinstance(listener, TCPListenerConfig):

synapse/app/homeserver.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -289,8 +289,7 @@ def start_listening(self) -> None:
289289
elif listener.type == "metrics":
290290
if not self.config.metrics.enable_metrics:
291291
logger.warning(
292-
"Metrics listener configured, but "
293-
"enable_metrics is not True!"
292+
"Metrics listener configured, but enable_metrics is not True!"
294293
)
295294
else:
296295
if isinstance(listener, TCPListenerConfig):

synapse/config/tls.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None:
108108
# Raise an error if this option has been specified without any
109109
# corresponding certificates.
110110
raise ConfigError(
111-
"federation_custom_ca_list specified without "
112-
"any certificate files"
111+
"federation_custom_ca_list specified without any certificate files"
113112
)
114113

115114
certs = []

synapse/event_auth.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -986,8 +986,7 @@ def _check_power_levels(
986986
if old_level == user_level:
987987
raise AuthError(
988988
403,
989-
"You don't have permission to remove ops level equal "
990-
"to your own",
989+
"You don't have permission to remove ops level equal to your own",
991990
)
992991

993992
# Check if the old and new levels are greater than the user level

synapse/handlers/federation.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1312,9 +1312,9 @@ async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> List[str]:
13121312
if state_key is not None:
13131313
# the event was not rejected (get_event raises a NotFoundError for rejected
13141314
# events) so the state at the event should include the event itself.
1315-
assert (
1316-
state_map.get((event.type, state_key)) == event.event_id
1317-
), "State at event did not include event itself"
1315+
assert state_map.get((event.type, state_key)) == event.event_id, (
1316+
"State at event did not include event itself"
1317+
)
13181318

13191319
# ... but we need the state *before* that event
13201320
if "replaces_state" in event.unsigned:

synapse/handlers/message.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -143,9 +143,9 @@ async def get_room_data(
143143
elif membership == Membership.LEAVE:
144144
key = (event_type, state_key)
145145
# If the membership is not JOIN, then the event ID should exist.
146-
assert (
147-
membership_event_id is not None
148-
), "check_user_in_room_or_world_readable returned invalid data"
146+
assert membership_event_id is not None, (
147+
"check_user_in_room_or_world_readable returned invalid data"
148+
)
149149
room_state = await self._state_storage_controller.get_state_for_events(
150150
[membership_event_id], StateFilter.from_types([key])
151151
)
@@ -242,9 +242,9 @@ async def get_state_events(
242242
room_state = await self.store.get_events(state_ids.values())
243243
elif membership == Membership.LEAVE:
244244
# If the membership is not JOIN, then the event ID should exist.
245-
assert (
246-
membership_event_id is not None
247-
), "check_user_in_room_or_world_readable returned invalid data"
245+
assert membership_event_id is not None, (
246+
"check_user_in_room_or_world_readable returned invalid data"
247+
)
248248
room_state_events = (
249249
await self._state_storage_controller.get_state_for_events(
250250
[membership_event_id], state_filter=state_filter
@@ -1266,12 +1266,14 @@ async def create_new_client_event(
12661266
# Allow an event to have empty list of prev_event_ids
12671267
# only if it has auth_event_ids.
12681268
or auth_event_ids
1269-
), "Attempting to create a non-m.room.create event with no prev_events or auth_event_ids"
1269+
), (
1270+
"Attempting to create a non-m.room.create event with no prev_events or auth_event_ids"
1271+
)
12701272
else:
12711273
# we now ought to have some prev_events (unless it's a create event).
1272-
assert (
1273-
builder.type == EventTypes.Create or prev_event_ids
1274-
), "Attempting to create a non-m.room.create event with no prev_events"
1274+
assert builder.type == EventTypes.Create or prev_event_ids, (
1275+
"Attempting to create a non-m.room.create event with no prev_events"
1276+
)
12751277

12761278
if for_batch:
12771279
assert prev_event_ids is not None

synapse/handlers/sso.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1192,9 +1192,9 @@ async def revoke_sessions_for_provider_session_id(
11921192
"""
11931193

11941194
# It is expected that this is the main process.
1195-
assert isinstance(
1196-
self._device_handler, DeviceHandler
1197-
), "revoking SSO sessions can only be called on the main process"
1195+
assert isinstance(self._device_handler, DeviceHandler), (
1196+
"revoking SSO sessions can only be called on the main process"
1197+
)
11981198

11991199
# Invalidate any running user-mapping sessions
12001200
to_delete = []

synapse/http/matrixfederationclient.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -425,9 +425,9 @@ def __init__(
425425
)
426426
else:
427427
proxy_authorization_secret = hs.config.worker.worker_replication_secret
428-
assert (
429-
proxy_authorization_secret is not None
430-
), "`worker_replication_secret` must be set when using `outbound_federation_restricted_to` (used to authenticate requests across workers)"
428+
assert proxy_authorization_secret is not None, (
429+
"`worker_replication_secret` must be set when using `outbound_federation_restricted_to` (used to authenticate requests across workers)"
430+
)
431431
federation_proxy_credentials = BearerProxyCredentials(
432432
proxy_authorization_secret.encode("ascii")
433433
)

synapse/http/proxyagent.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -173,9 +173,9 @@ def __init__(
173173
self._federation_proxy_endpoint: Optional[IStreamClientEndpoint] = None
174174
self._federation_proxy_credentials: Optional[ProxyCredentials] = None
175175
if federation_proxy_locations:
176-
assert (
177-
federation_proxy_credentials is not None
178-
), "`federation_proxy_credentials` are required when using `federation_proxy_locations`"
176+
assert federation_proxy_credentials is not None, (
177+
"`federation_proxy_credentials` are required when using `federation_proxy_locations`"
178+
)
179179

180180
endpoints: List[IStreamClientEndpoint] = []
181181
for federation_proxy_location in federation_proxy_locations:
@@ -302,9 +302,9 @@ def request(
302302
parsed_uri.scheme == b"matrix-federation"
303303
and self._federation_proxy_endpoint
304304
):
305-
assert (
306-
self._federation_proxy_credentials is not None
307-
), "`federation_proxy_credentials` are required when using `federation_proxy_locations`"
305+
assert self._federation_proxy_credentials is not None, (
306+
"`federation_proxy_credentials` are required when using `federation_proxy_locations`"
307+
)
308308

309309
# Set a Proxy-Authorization header
310310
if headers is None:

synapse/http/servlet.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -582,9 +582,9 @@ def parse_enum(
582582
is not one of those allowed values.
583583
"""
584584
# Assert the enum values are strings.
585-
assert all(
586-
isinstance(e.value, str) for e in E
587-
), "parse_enum only works with string values"
585+
assert all(isinstance(e.value, str) for e in E), (
586+
"parse_enum only works with string values"
587+
)
588588
str_value = parse_string(
589589
request,
590590
name,

synapse/module_api/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -894,9 +894,9 @@ def invalidate_access_token(
894894
Raises:
895895
synapse.api.errors.AuthError: the access token is invalid
896896
"""
897-
assert isinstance(
898-
self._device_handler, DeviceHandler
899-
), "invalidate_access_token can only be called on the main process"
897+
assert isinstance(self._device_handler, DeviceHandler), (
898+
"invalidate_access_token can only be called on the main process"
899+
)
900900

901901
# see if the access token corresponds to a device
902902
user_info = yield defer.ensureDeferred(

synapse/replication/http/_base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -128,9 +128,9 @@ def __init__(self, hs: "HomeServer"):
128128

129129
# We reserve `instance_name` as a parameter to sending requests, so we
130130
# assert here that sub classes don't try and use the name.
131-
assert (
132-
"instance_name" not in self.PATH_ARGS
133-
), "`instance_name` is a reserved parameter name"
131+
assert "instance_name" not in self.PATH_ARGS, (
132+
"`instance_name` is a reserved parameter name"
133+
)
134134
assert (
135135
"instance_name"
136136
not in signature(self.__class__._serialize_payload).parameters

synapse/replication/tcp/streams/events.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -200,9 +200,9 @@ async def _update_function(
200200

201201
# we rely on get_all_new_forward_event_rows strictly honouring the limit, so
202202
# that we know it is safe to just take upper_limit = event_rows[-1][0].
203-
assert (
204-
len(event_rows) <= target_row_count
205-
), "get_all_new_forward_event_rows did not honour row limit"
203+
assert len(event_rows) <= target_row_count, (
204+
"get_all_new_forward_event_rows did not honour row limit"
205+
)
206206

207207
# if we hit the limit on event_updates, there's no point in going beyond the
208208
# last stream_id in the batch for the other sources.

synapse/rest/admin/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -207,8 +207,7 @@ async def on_POST(
207207
(stream, topo, _event_id) = r
208208
token = "t%d-%d" % (topo, stream)
209209
logger.info(
210-
"[purge] purging up to token %s (received_ts %i => "
211-
"stream_ordering %i)",
210+
"[purge] purging up to token %s (received_ts %i => stream_ordering %i)",
212211
token,
213212
ts,
214213
stream_ordering,

synapse/rest/client/receipts.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,7 @@
3939

4040
class ReceiptRestServlet(RestServlet):
4141
PATTERNS = client_patterns(
42-
"/rooms/(?P<room_id>[^/]*)"
43-
"/receipt/(?P<receipt_type>[^/]*)"
44-
"/(?P<event_id>[^/]*)$"
42+
"/rooms/(?P<room_id>[^/]*)/receipt/(?P<receipt_type>[^/]*)/(?P<event_id>[^/]*)$"
4543
)
4644
CATEGORY = "Receipts requests"
4745

synapse/rest/client/rendezvous.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,9 @@ def __init__(self, hs: "HomeServer"):
4444
redirection_target: Optional[str] = (
4545
hs.config.experimental.msc4108_delegation_endpoint
4646
)
47-
assert (
48-
redirection_target is not None
49-
), "Servlet is only registered if there is a delegation target"
47+
assert redirection_target is not None, (
48+
"Servlet is only registered if there is a delegation target"
49+
)
5050
self.endpoint = redirection_target.encode("utf-8")
5151

5252
async def on_POST(self, request: SynapseRequest) -> None:

synapse/rest/client/transactions.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,9 +94,9 @@ def _get_transaction_key(self, request: IRequest, requester: Requester) -> Hasha
9494
# (appservice and guest users), but does not cover access tokens minted
9595
# by the admin API. Use the access token ID instead.
9696
else:
97-
assert (
98-
requester.access_token_id is not None
99-
), "Requester must have an access_token_id"
97+
assert requester.access_token_id is not None, (
98+
"Requester must have an access_token_id"
99+
)
100100
return (path, "user_admin", requester.access_token_id)
101101

102102
def fetch_or_execute_request(

synapse/storage/background_updates.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -739,9 +739,9 @@ def runner(conn: Connection) -> None:
739739
c.execute(sql)
740740

741741
async def updater(progress: JsonDict, batch_size: int) -> int:
742-
assert isinstance(
743-
self.db_pool.engine, engines.PostgresEngine
744-
), "validate constraint background update registered for non-Postres database"
742+
assert isinstance(self.db_pool.engine, engines.PostgresEngine), (
743+
"validate constraint background update registered for non-Postres database"
744+
)
745745

746746
logger.info("Validating constraint %s to %s", constraint_name, table)
747747
await self.db_pool.runWithConnection(runner)
@@ -900,9 +900,9 @@ def register_background_validate_constraint_and_delete_rows(
900900
on the table. Used to iterate over the table.
901901
"""
902902

903-
assert isinstance(
904-
self.db_pool.engine, engines.PostgresEngine
905-
), "validate constraint background update registered for non-Postres database"
903+
assert isinstance(self.db_pool.engine, engines.PostgresEngine), (
904+
"validate constraint background update registered for non-Postres database"
905+
)
906906

907907
async def updater(progress: JsonDict, batch_size: int) -> int:
908908
return await self.validate_constraint_and_delete_in_background(

synapse/storage/controllers/persist_events.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -870,8 +870,7 @@ async def _get_new_state_after_events(
870870
# This should only happen for outlier events.
871871
if not ev.internal_metadata.is_outlier():
872872
raise Exception(
873-
"Context for new event %s has no state "
874-
"group" % (ev.event_id,)
873+
"Context for new event %s has no state group" % (ev.event_id,)
875874
)
876875
continue
877876
if ctx.state_group_deltas:

synapse/storage/databases/main/client_ips.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -650,9 +650,9 @@ async def insert_client_ip(
650650

651651
@wrap_as_background_process("update_client_ips")
652652
async def _update_client_ips_batch(self) -> None:
653-
assert (
654-
self._update_on_this_worker
655-
), "This worker is not designated to update client IPs"
653+
assert self._update_on_this_worker, (
654+
"This worker is not designated to update client IPs"
655+
)
656656

657657
# If the DB pool has already terminated, don't try updating
658658
if not self.db_pool.is_running():
@@ -671,9 +671,9 @@ def _update_client_ips_batch_txn(
671671
txn: LoggingTransaction,
672672
to_update: Mapping[Tuple[str, str, str], Tuple[str, Optional[str], int]],
673673
) -> None:
674-
assert (
675-
self._update_on_this_worker
676-
), "This worker is not designated to update client IPs"
674+
assert self._update_on_this_worker, (
675+
"This worker is not designated to update client IPs"
676+
)
677677

678678
# Keys and values for the `user_ips` upsert.
679679
user_ips_keys = []

synapse/storage/databases/main/deviceinbox.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -200,9 +200,9 @@ async def get_messages_for_user_devices(
200200
to_stream_id=to_stream_id,
201201
)
202202

203-
assert (
204-
last_processed_stream_id == to_stream_id
205-
), "Expected _get_device_messages to process all to-device messages up to `to_stream_id`"
203+
assert last_processed_stream_id == to_stream_id, (
204+
"Expected _get_device_messages to process all to-device messages up to `to_stream_id`"
205+
)
206206

207207
return user_id_device_id_to_messages
208208

0 commit comments

Comments
 (0)