diff --git a/synodic_client/application/screen/tool_update_controller.py b/synodic_client/application/screen/tool_update_controller.py index 1e13cbd..1ac0acc 100644 --- a/synodic_client/application/screen/tool_update_controller.py +++ b/synodic_client/application/screen/tool_update_controller.py @@ -29,9 +29,7 @@ update_all_tools, update_tool, ) -from synodic_client.resolution import ( - resolve_update_config, -) +from synodic_client.resolution import resolve_update_config if TYPE_CHECKING: from synodic_client.application.config_store import ConfigStore diff --git a/synodic_client/application/screen/tray.py b/synodic_client/application/screen/tray.py index fb60093..aae7401 100644 --- a/synodic_client/application/screen/tray.py +++ b/synodic_client/application/screen/tray.py @@ -29,6 +29,8 @@ class TrayScreen: """Tray screen for the application.""" + _MENU_POPUP_DELAY_MS = 100 + def __init__( self, app: QApplication, @@ -148,6 +150,13 @@ def _on_tray_activated(self, reason: QSystemTrayIcon.ActivationReason) -> None: logger.debug('Tray activated: reason=%s', reason.name) if reason == QSystemTrayIcon.ActivationReason.DoubleClick: self._show_window() + elif reason == QSystemTrayIcon.ActivationReason.Context: + QTimer.singleShot(self._MENU_POPUP_DELAY_MS, self._show_tray_menu) + + def _show_tray_menu(self) -> None: + """Show the tray context menu at the current cursor position.""" + geo = self.tray.geometry() + self._menu.popup(geo.center()) def _show_window(self) -> None: """Show, raise, and focus the main window.""" diff --git a/synodic_client/application/update_controller.py b/synodic_client/application/update_controller.py index 49335a7..9d06f3e 100644 --- a/synodic_client/application/update_controller.py +++ b/synodic_client/application/update_controller.py @@ -31,10 +31,8 @@ from synodic_client.application.update_model import UpdateModel from synodic_client.operations.schema import UpdateCheckResult from synodic_client.operations.update import apply_self_update, check_self_update, download_self_update -from synodic_client.resolution import ( - ResolvedConfig, - resolve_update_config, -) +from synodic_client.resolution import resolve_update_config +from synodic_client.schema import ResolvedConfig, UpdateState from synodic_client.startup import sync_startup if TYPE_CHECKING: @@ -93,7 +91,7 @@ def __init__( # Track update-relevant config fields to avoid reinitialising # on every config save (e.g. timestamp-only changes). - self._update_config_key = self._extract_update_key(store.config) + self._update_config_key = resolve_update_config(store.config) # Periodic auto-update timer self._auto_update_timer: QTimer | None = None @@ -225,7 +223,7 @@ def _on_config_changed(self, config: object) -> None: return self._auto_apply = config.auto_apply - new_key = self._extract_update_key(config) + new_key = resolve_update_config(config) if new_key == self._update_config_key: return self._update_config_key = new_key @@ -237,16 +235,6 @@ def _on_config_changed(self, config: object) -> None: # Updater re-initialisation # ------------------------------------------------------------------ - @staticmethod - def _extract_update_key(config: ResolvedConfig) -> tuple[object, ...]: - """Return a hashable tuple of the fields that affect the updater.""" - return ( - config.update_source, - config.update_channel, - config.auto_update_interval_minutes, - config.auto_apply, - ) - def _reinitialize_updater(self, config: ResolvedConfig) -> None: """Re-derive update settings and restart the updater and timer. @@ -289,6 +277,15 @@ def _do_check(self, *, silent: bool) -> None: self._model.set_error('Updater is not initialized.') return + # A download may still be running in a thread-pool executor even + # after the asyncio Task wrapper has been cancelled. Starting a + # new check would trigger a second concurrent download for the + # same package, leading to a file-rename race on Windows. + if self._client.updater.state == UpdateState.DOWNLOADING: + logger.debug('Skipping update check — download already in progress') + self._model.set_check_button_enabled(True) + return + # Always disable the button; only show "Checking…" when no # download is already pending (to preserve the ready state). self._model.set_check_button_enabled(False) diff --git a/synodic_client/config.py b/synodic_client/config.py index 7dacaae..8ad096a 100644 --- a/synodic_client/config.py +++ b/synodic_client/config.py @@ -122,11 +122,16 @@ def load_user_config() -> UserConfig: try: data = json.loads(path.read_text(encoding='utf-8')) + except json.JSONDecodeError, OSError: + logger.exception('Failed to read config from %s, using defaults', path) + return UserConfig() + + try: config = UserConfig.model_validate(data) logger.debug('Loaded user config from %s', path) return config except Exception: - logger.exception('Failed to load user config from %s, using defaults', path) + logger.exception('Failed to validate user config from %s, using defaults', path) return UserConfig() diff --git a/synodic_client/resolution.py b/synodic_client/resolution.py index 0d7fc06..44ff038 100644 --- a/synodic_client/resolution.py +++ b/synodic_client/resolution.py @@ -25,9 +25,7 @@ from synodic_client.schema import ( DEFAULT_AUTO_UPDATE_INTERVAL_MINUTES, DEFAULT_TOOL_UPDATE_INTERVAL_MINUTES, - GITHUB_REPO_URL, ResolvedConfig, - UpdateChannel, UpdateConfig, UserConfig, ) @@ -160,7 +158,11 @@ def update_user_config(**changes: object) -> ResolvedConfig: def resolve_update_config(config: ResolvedConfig) -> UpdateConfig: - """Derive an ``UpdateConfig`` from resolved configuration values. + """Derive an :class:`UpdateConfig` from resolved configuration values. + + Delegates to :meth:`UpdateConfig.from_resolved` so the type owns + its own construction while this module stays the canonical entry + point for all resolution logic. Args: config: A resolved configuration snapshot. @@ -168,11 +170,4 @@ def resolve_update_config(config: ResolvedConfig) -> UpdateConfig: Returns: An ``UpdateConfig`` ready to initialise the updater. """ - channel = UpdateChannel.DEVELOPMENT if config.update_channel == 'dev' else UpdateChannel.STABLE - - return UpdateConfig( - channel=channel, - repo_url=config.update_source or GITHUB_REPO_URL, - auto_update_interval_minutes=config.auto_update_interval_minutes, - tool_update_interval_minutes=config.tool_update_interval_minutes, - ) + return UpdateConfig.from_resolved(config) diff --git a/synodic_client/schema.py b/synodic_client/schema.py index 65587c2..ea85a77 100644 --- a/synodic_client/schema.py +++ b/synodic_client/schema.py @@ -8,13 +8,16 @@ from __future__ import annotations +import logging import sys from dataclasses import dataclass, field from enum import Enum, StrEnum, auto from typing import Any from packaging.version import Version -from pydantic import BaseModel +from pydantic import BaseModel, ValidationError, model_validator + +logger = logging.getLogger(__name__) # --------------------------------------------------------------------------- # BuildConfig — read-only, lives next to the executable @@ -109,6 +112,31 @@ class UserConfig(BaseModel): # tool updates have been recorded. last_tool_updates: dict[str, str] | None = None + @model_validator(mode='wrap') + @classmethod + def _recover_invalid_fields(cls, data: Any, handler: Any) -> UserConfig: + """Silently drop fields that fail validation instead of rejecting the entire config. + + When an on-disk ``config.json`` contains a value whose type no longer + matches the schema (e.g. after a version upgrade renames or re-types a + field), the normal behaviour is to raise ``ValidationError`` and lose + *every* setting. This wrap validator intercepts the error, removes + only the offending fields (so their defaults kick in), and retries. + """ + try: + return handler(data) + except ValidationError as exc: + if not isinstance(data, dict): + raise + + bad_fields = {str(e['loc'][0]) for e in exc.errors() if e.get('loc')} + cleaned = {k: v for k, v in data.items() if k not in bad_fields} + + for name in bad_fields: + logger.warning('Discarding invalid config field %r (using default)', name) + + return handler(cleaned) + # --------------------------------------------------------------------------- # Update channel & state enums @@ -201,6 +229,24 @@ class UpdateConfig: # Interval in minutes between tool update checks (0 = disabled) tool_update_interval_minutes: int = DEFAULT_TOOL_UPDATE_INTERVAL_MINUTES + @classmethod + def from_resolved(cls, config: ResolvedConfig) -> UpdateConfig: + """Derive an ``UpdateConfig`` from resolved configuration values. + + Args: + config: A resolved configuration snapshot. + + Returns: + An ``UpdateConfig`` ready to initialise the updater. + """ + channel = UpdateChannel.DEVELOPMENT if config.update_channel == 'dev' else UpdateChannel.STABLE + return cls( + channel=channel, + repo_url=config.update_source or GITHUB_REPO_URL, + auto_update_interval_minutes=config.auto_update_interval_minutes, + tool_update_interval_minutes=config.tool_update_interval_minutes, + ) + @property def channel_name(self) -> str: """Get the channel name for Velopack. diff --git a/synodic_client/updater.py b/synodic_client/updater.py index e02655c..6f8381d 100644 --- a/synodic_client/updater.py +++ b/synodic_client/updater.py @@ -208,7 +208,12 @@ def check_for_update(self) -> UpdateInfo: # moved past it. A periodic re-check that discovers the # same release must not regress DOWNLOADED → UPDATE_AVAILABLE, # which would cause apply_update_on_exit() to reject the update. - if self._state not in {UpdateState.DOWNLOADED, UpdateState.APPLYING, UpdateState.APPLIED}: + if self._state not in { + UpdateState.DOWNLOADING, + UpdateState.DOWNLOADED, + UpdateState.APPLYING, + UpdateState.APPLIED, + }: self._state = UpdateState.UPDATE_AVAILABLE logger.info('Update available: %s -> %s', self._current_version, latest) else: @@ -379,20 +384,16 @@ def _download_direct( # Verify checksum — prefer SHA256, fall back to SHA1. if asset.SHA256: actual = sha256_hash.hexdigest() - if not actual.lower() == asset.SHA256.lower(): + if actual.lower() != asset.SHA256.lower(): partial_file.unlink(missing_ok=True) - raise RuntimeError( - f'SHA256 mismatch for {asset.FileName}: expected {asset.SHA256}, got {actual}' - ) + raise RuntimeError(f'SHA256 mismatch for {asset.FileName}: expected {asset.SHA256}, got {actual}') elif asset.SHA1: actual_sha1 = hashlib.sha1(partial_file.read_bytes()).hexdigest() # noqa: S324 — verifying known digest - if not actual_sha1.lower() == asset.SHA1.lower(): + if actual_sha1.lower() != asset.SHA1.lower(): partial_file.unlink(missing_ok=True) - raise RuntimeError( - f'SHA1 mismatch for {asset.FileName}: expected {asset.SHA1}, got {actual_sha1}' - ) + raise RuntimeError(f'SHA1 mismatch for {asset.FileName}: expected {asset.SHA1}, got {actual_sha1}') - partial_file.rename(target_file) + partial_file.replace(target_file) logger.info('Direct download complete: %s', target_file) def download_update(self, progress_callback: Callable[[int], None] | None = None) -> bool: @@ -545,7 +546,7 @@ def _get_velopack_manager(self) -> Any: raise RuntimeError(f'Failed to create Velopack UpdateManager: {e}') from e -def _on_before_uninstall(version: str) -> None: +def on_before_uninstall(version: str) -> None: """Velopack hook: called before the app is uninstalled. Removes the ``synodic://`` URI protocol handler and auto-startup diff --git a/tests/unit/operations/test_install.py b/tests/unit/operations/test_install.py index 0cb771b..6fccaa2 100644 --- a/tests/unit/operations/test_install.py +++ b/tests/unit/operations/test_install.py @@ -61,6 +61,7 @@ def _make_action( action.package.constraint = constraint else: action.package = None + action.distro = None return action diff --git a/tests/unit/qt/conftest.py b/tests/unit/qt/conftest.py index 7c7c1c8..4cbb71f 100644 --- a/tests/unit/qt/conftest.py +++ b/tests/unit/qt/conftest.py @@ -97,6 +97,7 @@ def make_action( action.command = overrides.get('command') action.include_prereleases = overrides.get('include_prereleases', False) action.plugin_target = overrides.get('plugin_target') + action.distro = overrides.get('distro') return action diff --git a/tests/unit/qt/test_update_controller.py b/tests/unit/qt/test_update_controller.py index 2027300..4c1233b 100644 --- a/tests/unit/qt/test_update_controller.py +++ b/tests/unit/qt/test_update_controller.py @@ -4,6 +4,8 @@ from unittest.mock import MagicMock, patch +import pytest + from synodic_client.application.screen.update_banner import UpdateBanner from synodic_client.application.theme import ( UPDATE_STATUS_AVAILABLE_STYLE, @@ -13,6 +15,7 @@ from synodic_client.application.update_controller import UpdateController from synodic_client.application.update_model import UpdateModel from synodic_client.operations.schema import UpdateCheckResult +from synodic_client.schema import UpdateState from .conftest import make_config_store, make_resolved_config @@ -576,3 +579,73 @@ def test_apply_without_pending_silent_logs_only() -> None: assert banner.state.name == 'HIDDEN' client.apply_update_on_exit.assert_not_called() app.quit.assert_not_called() + + +# --------------------------------------------------------------------------- +# DOWNLOADING guard in _do_check +# --------------------------------------------------------------------------- + + +class TestDoCheckDownloadingGuard: + """Verify _do_check skips checks when a download is in progress.""" + + @staticmethod + @pytest.mark.parametrize('silent', [False, True], ids=['manual', 'silent']) + def test_check_skipped_when_downloading(silent: bool) -> None: + """_do_check should return early without creating a task when DOWNLOADING.""" + ctrl, _app, client, banner, model = _make_controller() + spy = ModelSpy(model) + client.updater.state = UpdateState.DOWNLOADING + + with patch.object(ctrl, '_set_task') as mock_set_task: + ctrl._do_check(silent=silent) + + mock_set_task.assert_not_called() + # Button should be re-enabled + assert True in spy.check_button_enabled + + @staticmethod + def test_check_proceeds_when_not_downloading() -> None: + """_do_check should proceed normally when updater is not DOWNLOADING.""" + ctrl, _app, client, banner, model = _make_controller() + client.updater.state = UpdateState.NO_UPDATE + + with patch.object(ctrl, '_set_task') as mock_set_task: + ctrl._do_check(silent=False) + + mock_set_task.assert_called_once() + + +# --------------------------------------------------------------------------- +# request_retry / request_apply +# --------------------------------------------------------------------------- + + +class TestRequestRetry: + """Verify request_retry clears the failed version and re-checks.""" + + @staticmethod + def test_clears_failed_version_and_checks() -> None: + """request_retry should clear _failed_version then trigger a check.""" + ctrl, _app, _client, banner, model = _make_controller() + ctrl._failed_version = '2.0.0' + + with patch.object(ctrl, 'check_now') as mock_check: + ctrl.request_retry() + + assert ctrl._failed_version is None + mock_check.assert_called_once_with(silent=True) + + +class TestRequestApply: + """Verify request_apply delegates to _apply_update.""" + + @staticmethod + def test_delegates_to_apply_update() -> None: + """request_apply should call _apply_update(silent=False).""" + ctrl, _app, _client, banner, model = _make_controller() + + with patch.object(ctrl, '_apply_update') as mock_apply: + ctrl.request_apply() + + mock_apply.assert_called_once_with(silent=False) diff --git a/tests/unit/qt/test_update_feedback.py b/tests/unit/qt/test_update_feedback.py index 700253c..1b7a20d 100644 --- a/tests/unit/qt/test_update_feedback.py +++ b/tests/unit/qt/test_update_feedback.py @@ -512,9 +512,9 @@ def test_header_auto_update_toggled_emits_composite() -> None: header.set_runtime('3.11') spy = MagicMock() header.auto_update_toggled.connect(spy) - # Find the Auto button and click it + # Find the auto-update toggle button (checkable ↺ button) for child in header.findChildren(QPushButton): - if child.text() == 'Auto': + if child.isCheckable(): child.click() break spy.assert_called_once() diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 4ed06a8..016e821 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -10,6 +10,7 @@ BuildConfig, UserConfig, config_dir, + load_user_config, save_user_config, set_dev_mode, ) @@ -208,3 +209,101 @@ def test_save_load_round_trip(tmp_path: Path) -> None: assert loaded.update_channel == 'dev' assert loaded.auto_start is False assert loaded.update_source is None + + +class TestUserConfigRecovery: + """Tests for the wrap validator that recovers invalid fields.""" + + @staticmethod + def test_single_corrupt_field_preserves_others() -> None: + """One invalid field is discarded; all other fields survive.""" + data = { + 'update_channel': 'dev', + 'auto_start': False, + 'auto_update_interval_minutes': 'not_an_int', # corrupt + } + config = UserConfig.model_validate(data) + assert config.update_channel == 'dev' + assert config.auto_start is False + assert config.auto_update_interval_minutes is None # reset to default + + @staticmethod + def test_multiple_corrupt_fields() -> None: + """Multiple invalid fields are discarded; valid fields survive.""" + data = { + 'update_channel': 'stable', + 'auto_update_interval_minutes': 'bad', + 'auto_apply': 'not_a_bool', + 'debug_logging': 42, # int coerces to bool in Pydantic — this is valid + 'auto_start': False, + } + config = UserConfig.model_validate(data) + assert config.update_channel == 'stable' + assert config.auto_start is False + # The corrupt fields revert to defaults + assert config.auto_update_interval_minutes is None + assert config.auto_apply is None + + @staticmethod + def test_all_fields_corrupt() -> None: + """When every known field is invalid, result is equivalent to UserConfig().""" + data = { + 'update_source': 123, # str field, int won't coerce + 'update_channel': [], + 'auto_update_interval_minutes': 'bad', + 'tool_update_interval_minutes': 'bad', + 'auto_apply': 'nope', + 'auto_start': 'nope', + 'debug_logging': 'nope', + 'plugin_auto_update': 'bad', + 'prerelease_packages': 42, + 'last_client_update': [], + 'last_tool_updates': 'bad', + } + config = UserConfig.model_validate(data) + assert config == UserConfig() + + @staticmethod + def test_valid_data_unchanged() -> None: + """Fully valid data passes through the wrap validator without modification.""" + data = { + 'update_channel': 'dev', + 'auto_start': True, + 'auto_update_interval_minutes': 10, + } + expected_interval = 10 + config = UserConfig.model_validate(data) + assert config.update_channel == 'dev' + assert config.auto_start is True + assert config.auto_update_interval_minutes == expected_interval + + +class TestLoadUserConfigRecovery: + """Integration tests: load_user_config with corrupt files on disk.""" + + @staticmethod + def test_corrupt_field_recovered_from_disk(tmp_path: Path) -> None: + """A config file with one bad field loads with that field reset to default.""" + config_data = { + 'update_channel': 'dev', + 'auto_start': False, + 'auto_update_interval_minutes': 'not_a_number', + } + (tmp_path / 'config.json').write_text(json.dumps(config_data), encoding='utf-8') + + with patch('synodic_client.config.config_dir', return_value=tmp_path): + config = load_user_config() + + assert config.update_channel == 'dev' + assert config.auto_start is False + assert config.auto_update_interval_minutes is None + + @staticmethod + def test_invalid_json_returns_defaults(tmp_path: Path) -> None: + """Completely invalid JSON returns a default UserConfig.""" + (tmp_path / 'config.json').write_text('{{not valid json', encoding='utf-8') + + with patch('synodic_client.config.config_dir', return_value=tmp_path): + config = load_user_config() + + assert config == UserConfig() diff --git a/tests/unit/test_updater.py b/tests/unit/test_updater.py index 230c5b4..68b3ce5 100644 --- a/tests/unit/test_updater.py +++ b/tests/unit/test_updater.py @@ -1,8 +1,9 @@ """Tests for the self-update functionality using Velopack.""" -import io import json +import urllib.error from pathlib import Path +from typing import Any from unittest.mock import MagicMock, PropertyMock, patch import pytest @@ -15,6 +16,7 @@ Updater, github_release_asset_url, initialize_velopack, + on_before_uninstall, pep440_to_semver, ) @@ -257,19 +259,30 @@ def test_check_non_404_http_error_is_failed(updater: Updater) -> None: assert updater.state == UpdateState.FAILED @staticmethod - def test_check_preserves_downloaded_state(updater: Updater) -> None: - """Re-checking after download must not regress DOWNLOADED → UPDATE_AVAILABLE. + @pytest.mark.parametrize( + 'guarded_state', + [UpdateState.DOWNLOADING, UpdateState.DOWNLOADED, UpdateState.APPLYING, UpdateState.APPLIED], + ids=lambda s: s.name.lower(), + ) + def test_check_preserves_advanced_state(updater: Updater, guarded_state: UpdateState) -> None: + """Re-checking must not regress any advanced state back to UPDATE_AVAILABLE. - Regression test: when the periodic auto-check timer fires between - download completion and the user clicking "Restart Now", the state - was incorrectly reset to UPDATE_AVAILABLE, causing apply_update_on_exit - to reject the update with "No downloaded update to apply". + Regression test: when the periodic auto-check timer fires during or + after a download, the state must remain at the higher-priority state. """ mock_target = MagicMock(spec=velopack.VelopackAsset) mock_target.Version = '2.0.0' - mock_velopack_info = _setup_downloaded_state(updater) + mock_velopack_info = MagicMock(spec=velopack.UpdateInfo) mock_velopack_info.TargetFullRelease = mock_target + updater._state = guarded_state + updater._update_info = UpdateInfo( + available=True, + current_version=Version('1.0.0'), + latest_version=Version('2.0.0'), + _velopack_info=mock_velopack_info, + ) + mock_manager = MagicMock(spec=velopack.UpdateManager) mock_manager.check_for_updates.return_value = mock_velopack_info @@ -277,7 +290,7 @@ def test_check_preserves_downloaded_state(updater: Updater) -> None: info = updater.check_for_update() assert info.available is True - assert updater.state == UpdateState.DOWNLOADED + assert updater.state == guarded_state class TestUpdaterDownloadUpdate: @@ -377,17 +390,32 @@ def test_apply_on_exit_no_downloaded_update(updater: Updater) -> None: @staticmethod @pytest.mark.parametrize( - ('restart', 'silent'), + ('restart', 'silent', 'restart_args'), [ - (True, False), - (False, False), - (True, True), - (False, True), + (True, False, []), + (False, False, []), + (True, True, []), + (False, True, []), + (True, False, ['--minimized']), + (True, True, ['--minimized']), + ], + ids=[ + 'restart', + 'no-restart', + 'silent-restart', + 'silent-no-restart', + 'restart-with-args', + 'silent-restart-with-args', ], - ids=['restart', 'no-restart', 'silent-restart', 'silent-no-restart'], ) - def test_apply_on_exit_matrix(updater: Updater, *, restart: bool, silent: bool) -> None: - """Verify apply_update_on_exit stages the update with the correct restart/silent flags.""" + def test_apply_on_exit_matrix( + updater: Updater, + *, + restart: bool, + silent: bool, + restart_args: list[str], + ) -> None: + """Verify apply_update_on_exit stages the update with the correct flags.""" mock_velopack_info = _setup_downloaded_state(updater) mock_manager = MagicMock(spec=velopack.UpdateManager) @@ -395,54 +423,14 @@ def test_apply_on_exit_matrix(updater: Updater, *, restart: bool, silent: bool) patch.object(Updater, 'is_installed', new_callable=PropertyMock, return_value=True), patch.object(updater, '_get_velopack_manager', return_value=mock_manager), ): - updater.apply_update_on_exit(restart=restart, silent=silent) + updater.apply_update_on_exit(restart=restart, silent=silent, restart_args=restart_args) assert updater.state == UpdateState.APPLYING mock_manager.wait_exit_then_apply_updates.assert_called_once_with( mock_velopack_info, silent=silent, restart=restart, - restart_args=[], - ) - - @staticmethod - def test_apply_on_exit_with_restart_args(updater: Updater) -> None: - """Verify restart_args are forwarded to wait_exit_then_apply_updates.""" - mock_velopack_info = _setup_downloaded_state(updater) - mock_manager = MagicMock(spec=velopack.UpdateManager) - - with ( - patch.object(Updater, 'is_installed', new_callable=PropertyMock, return_value=True), - patch.object(updater, '_get_velopack_manager', return_value=mock_manager), - ): - updater.apply_update_on_exit(restart=True, restart_args=['--minimized']) - - assert updater.state == UpdateState.APPLYING - mock_manager.wait_exit_then_apply_updates.assert_called_once_with( - mock_velopack_info, - silent=False, - restart=True, - restart_args=['--minimized'], - ) - - @staticmethod - def test_apply_on_exit_silent_with_restart_args(updater: Updater) -> None: - """Verify silent mode forwards restart_args.""" - mock_velopack_info = _setup_downloaded_state(updater) - mock_manager = MagicMock(spec=velopack.UpdateManager) - - with ( - patch.object(Updater, 'is_installed', new_callable=PropertyMock, return_value=True), - patch.object(updater, '_get_velopack_manager', return_value=mock_manager), - ): - updater.apply_update_on_exit(restart=True, silent=True, restart_args=['--minimized']) - - assert updater.state == UpdateState.APPLYING - mock_manager.wait_exit_then_apply_updates.assert_called_once_with( - mock_velopack_info, - silent=True, - restart=True, - restart_args=['--minimized'], + restart_args=restart_args, ) @@ -641,7 +629,7 @@ def test_semver_input_passthrough() -> None: } -def _make_urlopen_response(data: dict[str, object]) -> MagicMock: +def _make_urlopen_response(data: dict[str, Any]) -> MagicMock: """Build a mock ``urlopen`` return value that reads as JSON.""" body = json.dumps(data).encode() resp = MagicMock() @@ -666,8 +654,10 @@ def dev_updater() -> Updater: class TestDevChannelGithubPrerelease: - """Regression tests: dev channel uses a GitHub prerelease that Velopack's - GithubSource silently ignores (``prerelease=false``). + """Regression: dev channel prerelease ignored by Velopack GithubSource. + + The dev channel uses a GitHub prerelease that Velopack's GithubSource + silently ignores (``prerelease=false``). The check path already has ``_check_manifest_fallback``. These tests verify that the *download* path also works when the update was discovered @@ -809,10 +799,7 @@ def test_download_direct_constructs_correct_url(dev_updater: Updater, tmp_path: # Verify the URL call_args = mock_urlopen.call_args req = call_args[0][0] - expected_url = ( - f'{GITHUB_REPO_URL}/releases/download/dev' - '/synodic-0.1.0-dev.83-dev-win-full.nupkg' - ) + expected_url = f'{GITHUB_REPO_URL}/releases/download/dev/synodic-0.1.0-dev.83-dev-win-full.nupkg' assert req.full_url == expected_url # Verify the file was written @@ -838,12 +825,12 @@ def test_download_direct_sha256_mismatch(dev_updater: Updater, tmp_path: Path) - with ( patch('synodic_client.updater.sys') as mock_sys, patch('synodic_client.updater.urllib.request.urlopen', return_value=resp), - pytest.raises(RuntimeError, match='SHA256 mismatch'), ): mock_sys.executable = str(tmp_path / 'current' / 'synodic.exe') (tmp_path / 'current').mkdir() - dev_updater._download_direct(mock_velopack_info) + with pytest.raises(RuntimeError, match='SHA256 mismatch'): + dev_updater._download_direct(mock_velopack_info) # Partial file should be cleaned up assert not (tmp_path / 'packages' / 'test.nupkg.partial').exists() @@ -855,12 +842,400 @@ def test_download_direct_skips_existing(dev_updater: Updater, tmp_path: Path) -> mock_velopack_info = MagicMock() mock_velopack_info.TargetFullRelease.FileName = 'already-there.nupkg' - with patch('synodic_client.updater.sys') as mock_sys: + with ( + patch('synodic_client.updater.sys') as mock_sys, + patch('synodic_client.updater.urllib.request.urlopen') as mock_urlopen, + ): mock_sys.executable = str(tmp_path / 'current' / 'synodic.exe') (tmp_path / 'current').mkdir() packages = tmp_path / 'packages' packages.mkdir() (packages / 'already-there.nupkg').write_bytes(b'existing') - # Should not hit network at all dev_updater._download_direct(mock_velopack_info) + + # Must not have opened any network connection + mock_urlopen.assert_not_called() + + @staticmethod + def test_download_direct_uses_replace_not_rename(dev_updater: Updater, tmp_path: Path) -> None: + """_download_direct must use Path.replace() — not rename() — for atomicity. + + Regression: ``Path.rename()`` raises ``FileExistsError`` on Windows + (WinError 183) when a concurrent download has already placed the + target file. ``Path.replace()`` overwrites atomically. + """ + mock_velopack_info = MagicMock() + mock_velopack_info.TargetFullRelease.FileName = 'race.nupkg' + mock_velopack_info.TargetFullRelease.SHA256 = '' + mock_velopack_info.TargetFullRelease.SHA1 = '' + mock_velopack_info.TargetFullRelease.Size = 0 + + nupkg_content = b'new-package-data' + resp = MagicMock() + resp.read.side_effect = [nupkg_content, b''] + resp.__enter__ = lambda s: s + resp.__exit__ = MagicMock(return_value=False) + resp.headers = {'Content-Length': str(len(nupkg_content))} + + with ( + patch('synodic_client.updater.sys') as mock_sys, + patch('synodic_client.updater.urllib.request.urlopen', return_value=resp), + patch.object(Path, 'replace') as mock_replace, + patch.object(Path, 'rename') as mock_rename, + ): + mock_sys.executable = str(tmp_path / 'current' / 'synodic.exe') + (tmp_path / 'current').mkdir() + dev_updater._download_direct(mock_velopack_info) + + # replace() must be used for atomic overwrite + mock_replace.assert_called_once() + # rename() must NOT be used (it fails on Windows when target exists) + mock_rename.assert_not_called() + + @staticmethod + def test_check_does_not_regress_downloading_state(dev_updater: Updater) -> None: + """check_for_update must not regress DOWNLOADING → UPDATE_AVAILABLE. + + Regression: a periodic re-check during an active download was + resetting state to UPDATE_AVAILABLE, which allowed a second + concurrent download_update() call. + """ + mock_manager = MagicMock(spec=velopack.UpdateManager) + mock_manager.check_for_updates.return_value = None + manifest_resp = _make_urlopen_response(_DEV_MANIFEST) + + # First: put the updater into DOWNLOADING state + dev_updater._state = UpdateState.DOWNLOADING + + with ( + patch.object(dev_updater, '_get_velopack_manager', return_value=mock_manager), + patch('synodic_client.updater.urllib.request.urlopen', return_value=manifest_resp), + ): + info = dev_updater.check_for_update() + + # The check should still report the update, but must NOT + # regress the state from DOWNLOADING to UPDATE_AVAILABLE. + assert info.available is True + assert dev_updater.state == UpdateState.DOWNLOADING + + +class TestDownloadDirectSHA1: + """Verify _download_direct SHA1 verification paths.""" + + @staticmethod + def test_sha1_only_success(dev_updater: Updater, tmp_path: Path) -> None: + """When SHA256 is empty but SHA1 is provided, SHA1 is verified.""" + content = b'sha1-verified-content' + expected_sha1 = 'c1e63f162617f1685471d477b99de2532580ea97' + + mock_velopack_info = MagicMock() + mock_velopack_info.TargetFullRelease.FileName = 'sha1-only.nupkg' + mock_velopack_info.TargetFullRelease.SHA256 = '' + mock_velopack_info.TargetFullRelease.SHA1 = expected_sha1 + mock_velopack_info.TargetFullRelease.Size = len(content) + + resp = MagicMock() + resp.read.side_effect = [content, b''] + resp.__enter__ = lambda s: s + resp.__exit__ = MagicMock(return_value=False) + resp.headers = {'Content-Length': str(len(content))} + + with ( + patch('synodic_client.updater.sys') as mock_sys, + patch('synodic_client.updater.urllib.request.urlopen', return_value=resp), + ): + mock_sys.executable = str(tmp_path / 'current' / 'synodic.exe') + (tmp_path / 'current').mkdir() + dev_updater._download_direct(mock_velopack_info) + + target = tmp_path / 'packages' / 'sha1-only.nupkg' + assert target.exists() + assert target.read_bytes() == content + + @staticmethod + def test_sha1_mismatch(dev_updater: Updater, tmp_path: Path) -> None: + """SHA1 mismatch raises RuntimeError and cleans up the partial file.""" + mock_velopack_info = MagicMock() + mock_velopack_info.TargetFullRelease.FileName = 'sha1-bad.nupkg' + mock_velopack_info.TargetFullRelease.SHA256 = '' + mock_velopack_info.TargetFullRelease.SHA1 = 'wrong_sha1_hash' + mock_velopack_info.TargetFullRelease.Size = 0 + + resp = MagicMock() + resp.read.side_effect = [b'some content', b''] + resp.__enter__ = lambda s: s + resp.__exit__ = MagicMock(return_value=False) + resp.headers = {'Content-Length': '12'} + + with ( + patch('synodic_client.updater.sys') as mock_sys, + patch('synodic_client.updater.urllib.request.urlopen', return_value=resp), + ): + mock_sys.executable = str(tmp_path / 'current' / 'synodic.exe') + (tmp_path / 'current').mkdir() + + with pytest.raises(RuntimeError, match='SHA1 mismatch'): + dev_updater._download_direct(mock_velopack_info) + + assert not (tmp_path / 'packages' / 'sha1-bad.nupkg.partial').exists() + assert not (tmp_path / 'packages' / 'sha1-bad.nupkg').exists() + + @staticmethod + def test_no_checksums_skips_verification(dev_updater: Updater, tmp_path: Path) -> None: + """When both SHA256 and SHA1 are empty, download succeeds without verification.""" + content = b'no-checksum-content' + + mock_velopack_info = MagicMock() + mock_velopack_info.TargetFullRelease.FileName = 'no-hash.nupkg' + mock_velopack_info.TargetFullRelease.SHA256 = '' + mock_velopack_info.TargetFullRelease.SHA1 = '' + mock_velopack_info.TargetFullRelease.Size = 0 + + resp = MagicMock() + resp.read.side_effect = [content, b''] + resp.__enter__ = lambda s: s + resp.__exit__ = MagicMock(return_value=False) + resp.headers = {'Content-Length': str(len(content))} + + with ( + patch('synodic_client.updater.sys') as mock_sys, + patch('synodic_client.updater.urllib.request.urlopen', return_value=resp), + ): + mock_sys.executable = str(tmp_path / 'current' / 'synodic.exe') + (tmp_path / 'current').mkdir() + dev_updater._download_direct(mock_velopack_info) + + assert (tmp_path / 'packages' / 'no-hash.nupkg').exists() + + +class TestCheckManifestFallbackEdgeCases: + """Direct unit tests for _check_manifest_fallback edge cases.""" + + @staticmethod + def test_empty_assets_returns_none(dev_updater: Updater) -> None: + """A manifest with no Assets returns None.""" + manifest = {'Assets': []} + resp = _make_urlopen_response(manifest) + + with ( + patch.object(dev_updater, '_get_velopack_manager', return_value=None), + patch('synodic_client.updater.urllib.request.urlopen', return_value=resp), + ): + result = dev_updater._check_manifest_fallback() + + assert result is None + + @staticmethod + def test_no_full_assets_returns_none(dev_updater: Updater) -> None: + """A manifest with only Delta assets returns None.""" + manifest = { + 'Assets': [ + { + 'PackageId': 'synodic', + 'Version': '0.1.0-dev.83', + 'Type': 'Delta', + 'FileName': 'delta.nupkg', + }, + ], + } + resp = _make_urlopen_response(manifest) + + with patch('synodic_client.updater.urllib.request.urlopen', return_value=resp): + result = dev_updater._check_manifest_fallback() + + assert result is None + + @staticmethod + def test_older_version_returns_none(dev_updater: Updater) -> None: + """A manifest with only older versions returns None.""" + manifest = { + 'Assets': [ + { + 'PackageId': 'synodic', + 'Version': '0.1.0-dev.79', + 'Type': 'Full', + 'FileName': 'old.nupkg', + }, + ], + } + resp = _make_urlopen_response(manifest) + + with patch('synodic_client.updater.urllib.request.urlopen', return_value=resp): + result = dev_updater._check_manifest_fallback() + + assert result is None + + @staticmethod + def test_network_error_returns_none(dev_updater: Updater) -> None: + """A network error during manifest fetch returns None.""" + with patch( + 'synodic_client.updater.urllib.request.urlopen', + side_effect=urllib.error.URLError('connection refused'), + ): + result = dev_updater._check_manifest_fallback() + + assert result is None + + @staticmethod + def test_invalid_version_skipped(dev_updater: Updater) -> None: + """An asset with an unparseable version is skipped without crashing.""" + manifest = { + 'Assets': [ + { + 'PackageId': 'synodic', + 'Version': 'not-a-version', + 'Type': 'Full', + 'FileName': 'bad.nupkg', + }, + { + 'PackageId': 'synodic', + 'Version': '0.1.0-dev.83', + 'Type': 'Full', + 'FileName': 'good.nupkg', + 'SHA256': 'abc', + }, + ], + } + resp = _make_urlopen_response(manifest) + + with patch('synodic_client.updater.urllib.request.urlopen', return_value=resp): + result = dev_updater._check_manifest_fallback() + + assert result is not None + assert result.TargetFullRelease.Version == '0.1.0-dev.83' + + @staticmethod + def test_picks_highest_version(dev_updater: Updater) -> None: + """When multiple Full assets are newer, the highest version wins.""" + manifest = { + 'Assets': [ + { + 'PackageId': 'synodic', + 'Version': '0.1.0-dev.81', + 'Type': 'Full', + 'FileName': 'v81.nupkg', + }, + { + 'PackageId': 'synodic', + 'Version': '0.1.0-dev.85', + 'Type': 'Full', + 'FileName': 'v85.nupkg', + }, + { + 'PackageId': 'synodic', + 'Version': '0.1.0-dev.83', + 'Type': 'Full', + 'FileName': 'v83.nupkg', + }, + ], + } + resp = _make_urlopen_response(manifest) + + with patch('synodic_client.updater.urllib.request.urlopen', return_value=resp): + result = dev_updater._check_manifest_fallback() + + assert result is not None + assert result.TargetFullRelease.Version == '0.1.0-dev.85' + assert result.TargetFullRelease.FileName == 'v85.nupkg' + + @staticmethod + def test_collects_matching_deltas(dev_updater: Updater) -> None: + """Delta assets matching the best version are included in DeltasToTarget.""" + resp = _make_urlopen_response(_DEV_MANIFEST) + + with patch('synodic_client.updater.urllib.request.urlopen', return_value=resp): + result = dev_updater._check_manifest_fallback() + + assert result is not None + assert len(result.DeltasToTarget) == 1 + assert result.DeltasToTarget[0].Type == 'Delta' + assert result.DeltasToTarget[0].Version == '0.1.0-dev.83' + + +class TestDownloadUpdateGuards: + """Test edge-case guards in download_update().""" + + @staticmethod + def test_velopack_info_none_returns_false() -> None: + """download_update returns False when _velopack_info is None.""" + config = UpdateConfig(repo_url=GITHUB_REPO_URL, channel=UpdateChannel.STABLE) + u = Updater(current_version=Version('1.0.0'), config=config) + u._state = UpdateState.UPDATE_AVAILABLE + u._update_info = UpdateInfo( + available=True, + current_version=Version('1.0.0'), + latest_version=Version('2.0.0'), + _velopack_info=None, + ) + + with patch.object(Updater, 'is_installed', new_callable=PropertyMock, return_value=True): + result = u.download_update() + + assert result is False + # State should NOT advance to DOWNLOADING + assert u.state == UpdateState.UPDATE_AVAILABLE + + @staticmethod + def test_download_transitions_through_downloading_state() -> None: + """download_update sets DOWNLOADING state during the download.""" + config = UpdateConfig(repo_url=GITHUB_REPO_URL, channel=UpdateChannel.STABLE) + u = Updater(current_version=Version('1.0.0'), config=config) + mock_velopack_info = MagicMock(spec=velopack.UpdateInfo) + u._state = UpdateState.UPDATE_AVAILABLE + u._update_info = UpdateInfo( + available=True, + current_version=Version('1.0.0'), + latest_version=Version('2.0.0'), + _velopack_info=mock_velopack_info, + ) + + observed_states: list[UpdateState] = [] + mock_manager = MagicMock(spec=velopack.UpdateManager) + mock_manager.download_updates.side_effect = lambda info, cb: observed_states.append(u.state) + + with ( + patch.object(Updater, 'is_installed', new_callable=PropertyMock, return_value=True), + patch.object(u, '_get_velopack_manager', return_value=mock_manager), + ): + u.download_update() + + assert UpdateState.DOWNLOADING in observed_states + + +class TestOnBeforeUninstall: + """Tests for the Velopack uninstall hook.""" + + @staticmethod + def test_removes_protocol_and_startup() -> None: + """Both remove_protocol and remove_startup are called.""" + with ( + patch('synodic_client.updater.remove_protocol') as mock_proto, + patch('synodic_client.updater.remove_startup') as mock_startup, + ): + on_before_uninstall('0.1.0') + + mock_proto.assert_called_once() + mock_startup.assert_called_once() + + @staticmethod + def test_protocol_failure_does_not_block_startup_removal() -> None: + """If remove_protocol raises, remove_startup is still called.""" + with ( + patch('synodic_client.updater.remove_protocol', side_effect=OSError('failed')), + patch('synodic_client.updater.remove_startup') as mock_startup, + ): + on_before_uninstall('0.1.0') + + mock_startup.assert_called_once() + + @staticmethod + def test_startup_failure_does_not_raise() -> None: + """If remove_startup raises, the hook does not propagate and protocol is still removed.""" + with ( + patch('synodic_client.updater.remove_protocol') as mock_proto, + patch('synodic_client.updater.remove_startup', side_effect=OSError('failed')), + ): + on_before_uninstall('0.1.0') # should not raise + + mock_proto.assert_called_once()