Skip to content

Commit 9d43bec

Browse files
Bump ruff from 0.7.3 to 0.11.10 (#18451)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Morgan <andrew@amorgan.xyz> Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
1 parent a6cb353 commit 9d43bec

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+178
-206
lines changed

changelog.d/18451.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Bump ruff from 0.7.3 to 0.11.10.

poetry.lock

Lines changed: 20 additions & 20 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -320,7 +320,7 @@ all = [
320320
# failing on new releases. Keeping lower bounds loose here means that dependabot
321321
# can bump versions without having to update the content-hash in the lockfile.
322322
# This helps prevents merge conflicts when running a batch of dependabot updates.
323-
ruff = "0.7.3"
323+
ruff = "0.11.10"
324324
# Type checking only works with the pydantic.v1 compat module from pydantic v2
325325
pydantic = "^2"
326326

synapse/_scripts/synapse_port_db.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1065,7 +1065,7 @@ def get_start_id(txn: LoggingTransaction) -> int:
10651065

10661066
def get_sent_table_size(txn: LoggingTransaction) -> int:
10671067
txn.execute(
1068-
"SELECT count(*) FROM sent_transactions" " WHERE ts >= ?", (yesterday,)
1068+
"SELECT count(*) FROM sent_transactions WHERE ts >= ?", (yesterday,)
10691069
)
10701070
result = txn.fetchone()
10711071
assert result is not None

synapse/_scripts/synctl.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -292,9 +292,9 @@ def main() -> None:
292292
for key in worker_config:
293293
if key == "worker_app": # But we allow worker_app
294294
continue
295-
assert not key.startswith(
296-
"worker_"
297-
), "Main process cannot use worker_* config"
295+
assert not key.startswith("worker_"), (
296+
"Main process cannot use worker_* config"
297+
)
298298
else:
299299
worker_pidfile = worker_config["worker_pid_file"]
300300
worker_cache_factor = worker_config.get("synctl_cache_factor")

synapse/app/generic_worker.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -287,8 +287,7 @@ def start_listening(self) -> None:
287287
elif listener.type == "metrics":
288288
if not self.config.metrics.enable_metrics:
289289
logger.warning(
290-
"Metrics listener configured, but "
291-
"enable_metrics is not True!"
290+
"Metrics listener configured, but enable_metrics is not True!"
292291
)
293292
else:
294293
if isinstance(listener, TCPListenerConfig):

synapse/app/homeserver.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -289,8 +289,7 @@ def start_listening(self) -> None:
289289
elif listener.type == "metrics":
290290
if not self.config.metrics.enable_metrics:
291291
logger.warning(
292-
"Metrics listener configured, but "
293-
"enable_metrics is not True!"
292+
"Metrics listener configured, but enable_metrics is not True!"
294293
)
295294
else:
296295
if isinstance(listener, TCPListenerConfig):

synapse/config/tls.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None:
108108
# Raise an error if this option has been specified without any
109109
# corresponding certificates.
110110
raise ConfigError(
111-
"federation_custom_ca_list specified without "
112-
"any certificate files"
111+
"federation_custom_ca_list specified without any certificate files"
113112
)
114113

115114
certs = []

synapse/event_auth.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -986,8 +986,7 @@ def _check_power_levels(
986986
if old_level == user_level:
987987
raise AuthError(
988988
403,
989-
"You don't have permission to remove ops level equal "
990-
"to your own",
989+
"You don't have permission to remove ops level equal to your own",
991990
)
992991

993992
# Check if the old and new levels are greater than the user level

synapse/handlers/e2e_keys.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1163,7 +1163,7 @@ async def _process_self_signatures(
11631163
devices = devices[user_id]
11641164
except SynapseError as e:
11651165
failure = _exception_to_failure(e)
1166-
failures[user_id] = {device: failure for device in signatures.keys()}
1166+
failures[user_id] = dict.fromkeys(signatures.keys(), failure)
11671167
return signature_list, failures
11681168

11691169
for device_id, device in signatures.items():
@@ -1303,7 +1303,7 @@ async def _process_other_signatures(
13031303
except SynapseError as e:
13041304
failure = _exception_to_failure(e)
13051305
for user, devicemap in signatures.items():
1306-
failures[user] = {device_id: failure for device_id in devicemap.keys()}
1306+
failures[user] = dict.fromkeys(devicemap.keys(), failure)
13071307
return signature_list, failures
13081308

13091309
for target_user, devicemap in signatures.items():
@@ -1344,9 +1344,7 @@ async def _process_other_signatures(
13441344
# other devices were signed -- mark those as failures
13451345
logger.debug("upload signature: too many devices specified")
13461346
failure = _exception_to_failure(NotFoundError("Unknown device"))
1347-
failures[target_user] = {
1348-
device: failure for device in other_devices
1349-
}
1347+
failures[target_user] = dict.fromkeys(other_devices, failure)
13501348

13511349
if user_signing_key_id in master_key.get("signatures", {}).get(
13521350
user_id, {}
@@ -1367,9 +1365,7 @@ async def _process_other_signatures(
13671365
except SynapseError as e:
13681366
failure = _exception_to_failure(e)
13691367
if device_id is None:
1370-
failures[target_user] = {
1371-
device_id: failure for device_id in devicemap.keys()
1372-
}
1368+
failures[target_user] = dict.fromkeys(devicemap.keys(), failure)
13731369
else:
13741370
failures.setdefault(target_user, {})[device_id] = failure
13751371

synapse/handlers/federation.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1312,9 +1312,9 @@ async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> List[str]:
13121312
if state_key is not None:
13131313
# the event was not rejected (get_event raises a NotFoundError for rejected
13141314
# events) so the state at the event should include the event itself.
1315-
assert (
1316-
state_map.get((event.type, state_key)) == event.event_id
1317-
), "State at event did not include event itself"
1315+
assert state_map.get((event.type, state_key)) == event.event_id, (
1316+
"State at event did not include event itself"
1317+
)
13181318

13191319
# ... but we need the state *before* that event
13201320
if "replaces_state" in event.unsigned:

synapse/handlers/message.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -143,9 +143,9 @@ async def get_room_data(
143143
elif membership == Membership.LEAVE:
144144
key = (event_type, state_key)
145145
# If the membership is not JOIN, then the event ID should exist.
146-
assert (
147-
membership_event_id is not None
148-
), "check_user_in_room_or_world_readable returned invalid data"
146+
assert membership_event_id is not None, (
147+
"check_user_in_room_or_world_readable returned invalid data"
148+
)
149149
room_state = await self._state_storage_controller.get_state_for_events(
150150
[membership_event_id], StateFilter.from_types([key])
151151
)
@@ -242,9 +242,9 @@ async def get_state_events(
242242
room_state = await self.store.get_events(state_ids.values())
243243
elif membership == Membership.LEAVE:
244244
# If the membership is not JOIN, then the event ID should exist.
245-
assert (
246-
membership_event_id is not None
247-
), "check_user_in_room_or_world_readable returned invalid data"
245+
assert membership_event_id is not None, (
246+
"check_user_in_room_or_world_readable returned invalid data"
247+
)
248248
room_state_events = (
249249
await self._state_storage_controller.get_state_for_events(
250250
[membership_event_id], state_filter=state_filter
@@ -1266,12 +1266,14 @@ async def create_new_client_event(
12661266
# Allow an event to have empty list of prev_event_ids
12671267
# only if it has auth_event_ids.
12681268
or auth_event_ids
1269-
), "Attempting to create a non-m.room.create event with no prev_events or auth_event_ids"
1269+
), (
1270+
"Attempting to create a non-m.room.create event with no prev_events or auth_event_ids"
1271+
)
12701272
else:
12711273
# we now ought to have some prev_events (unless it's a create event).
1272-
assert (
1273-
builder.type == EventTypes.Create or prev_event_ids
1274-
), "Attempting to create a non-m.room.create event with no prev_events"
1274+
assert builder.type == EventTypes.Create or prev_event_ids, (
1275+
"Attempting to create a non-m.room.create event with no prev_events"
1276+
)
12751277

12761278
if for_batch:
12771279
assert prev_event_ids is not None

synapse/handlers/sso.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1192,9 +1192,9 @@ async def revoke_sessions_for_provider_session_id(
11921192
"""
11931193

11941194
# It is expected that this is the main process.
1195-
assert isinstance(
1196-
self._device_handler, DeviceHandler
1197-
), "revoking SSO sessions can only be called on the main process"
1195+
assert isinstance(self._device_handler, DeviceHandler), (
1196+
"revoking SSO sessions can only be called on the main process"
1197+
)
11981198

11991199
# Invalidate any running user-mapping sessions
12001200
to_delete = []

synapse/http/matrixfederationclient.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -425,9 +425,9 @@ def __init__(
425425
)
426426
else:
427427
proxy_authorization_secret = hs.config.worker.worker_replication_secret
428-
assert (
429-
proxy_authorization_secret is not None
430-
), "`worker_replication_secret` must be set when using `outbound_federation_restricted_to` (used to authenticate requests across workers)"
428+
assert proxy_authorization_secret is not None, (
429+
"`worker_replication_secret` must be set when using `outbound_federation_restricted_to` (used to authenticate requests across workers)"
430+
)
431431
federation_proxy_credentials = BearerProxyCredentials(
432432
proxy_authorization_secret.encode("ascii")
433433
)

synapse/http/proxyagent.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -173,9 +173,9 @@ def __init__(
173173
self._federation_proxy_endpoint: Optional[IStreamClientEndpoint] = None
174174
self._federation_proxy_credentials: Optional[ProxyCredentials] = None
175175
if federation_proxy_locations:
176-
assert (
177-
federation_proxy_credentials is not None
178-
), "`federation_proxy_credentials` are required when using `federation_proxy_locations`"
176+
assert federation_proxy_credentials is not None, (
177+
"`federation_proxy_credentials` are required when using `federation_proxy_locations`"
178+
)
179179

180180
endpoints: List[IStreamClientEndpoint] = []
181181
for federation_proxy_location in federation_proxy_locations:
@@ -302,9 +302,9 @@ def request(
302302
parsed_uri.scheme == b"matrix-federation"
303303
and self._federation_proxy_endpoint
304304
):
305-
assert (
306-
self._federation_proxy_credentials is not None
307-
), "`federation_proxy_credentials` are required when using `federation_proxy_locations`"
305+
assert self._federation_proxy_credentials is not None, (
306+
"`federation_proxy_credentials` are required when using `federation_proxy_locations`"
307+
)
308308

309309
# Set a Proxy-Authorization header
310310
if headers is None:

synapse/http/servlet.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -582,9 +582,9 @@ def parse_enum(
582582
is not one of those allowed values.
583583
"""
584584
# Assert the enum values are strings.
585-
assert all(
586-
isinstance(e.value, str) for e in E
587-
), "parse_enum only works with string values"
585+
assert all(isinstance(e.value, str) for e in E), (
586+
"parse_enum only works with string values"
587+
)
588588
str_value = parse_string(
589589
request,
590590
name,

synapse/module_api/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -894,9 +894,9 @@ def invalidate_access_token(
894894
Raises:
895895
synapse.api.errors.AuthError: the access token is invalid
896896
"""
897-
assert isinstance(
898-
self._device_handler, DeviceHandler
899-
), "invalidate_access_token can only be called on the main process"
897+
assert isinstance(self._device_handler, DeviceHandler), (
898+
"invalidate_access_token can only be called on the main process"
899+
)
900900

901901
# see if the access token corresponds to a device
902902
user_info = yield defer.ensureDeferred(

synapse/replication/http/_base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -128,9 +128,9 @@ def __init__(self, hs: "HomeServer"):
128128

129129
# We reserve `instance_name` as a parameter to sending requests, so we
130130
# assert here that sub classes don't try and use the name.
131-
assert (
132-
"instance_name" not in self.PATH_ARGS
133-
), "`instance_name` is a reserved parameter name"
131+
assert "instance_name" not in self.PATH_ARGS, (
132+
"`instance_name` is a reserved parameter name"
133+
)
134134
assert (
135135
"instance_name"
136136
not in signature(self.__class__._serialize_payload).parameters

synapse/replication/tcp/streams/events.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -200,9 +200,9 @@ async def _update_function(
200200

201201
# we rely on get_all_new_forward_event_rows strictly honouring the limit, so
202202
# that we know it is safe to just take upper_limit = event_rows[-1][0].
203-
assert (
204-
len(event_rows) <= target_row_count
205-
), "get_all_new_forward_event_rows did not honour row limit"
203+
assert len(event_rows) <= target_row_count, (
204+
"get_all_new_forward_event_rows did not honour row limit"
205+
)
206206

207207
# if we hit the limit on event_updates, there's no point in going beyond the
208208
# last stream_id in the batch for the other sources.

synapse/rest/admin/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -207,8 +207,7 @@ async def on_POST(
207207
(stream, topo, _event_id) = r
208208
token = "t%d-%d" % (topo, stream)
209209
logger.info(
210-
"[purge] purging up to token %s (received_ts %i => "
211-
"stream_ordering %i)",
210+
"[purge] purging up to token %s (received_ts %i => stream_ordering %i)",
212211
token,
213212
ts,
214213
stream_ordering,

synapse/rest/client/receipts.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,7 @@
3939

4040
class ReceiptRestServlet(RestServlet):
4141
PATTERNS = client_patterns(
42-
"/rooms/(?P<room_id>[^/]*)"
43-
"/receipt/(?P<receipt_type>[^/]*)"
44-
"/(?P<event_id>[^/]*)$"
42+
"/rooms/(?P<room_id>[^/]*)/receipt/(?P<receipt_type>[^/]*)/(?P<event_id>[^/]*)$"
4543
)
4644
CATEGORY = "Receipts requests"
4745

0 commit comments

Comments
 (0)