diff --git a/src/easyscience/fitting/fitter.py b/src/easyscience/fitting/fitter.py index 70524996..823b6cf9 100644 --- a/src/easyscience/fitting/fitter.py +++ b/src/easyscience/fitting/fitter.py @@ -209,6 +209,7 @@ def inner_fit_callable( y: np.ndarray, weights: Optional[np.ndarray] = None, vectorized: bool = False, + progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, **kwargs, ) -> FitResults: """This is a wrapped callable which performs the actual @@ -237,6 +238,7 @@ def inner_fit_callable( weights=weights, tolerance=self._tolerance, max_evaluations=self._max_evaluations, + progress_callback=progress_callback, **kwargs, ) diff --git a/src/easyscience/fitting/minimizers/minimizer_base.py b/src/easyscience/fitting/minimizers/minimizer_base.py index b4539ac4..35527a12 100644 --- a/src/easyscience/fitting/minimizers/minimizer_base.py +++ b/src/easyscience/fitting/minimizers/minimizer_base.py @@ -9,7 +9,6 @@ from typing import Callable from typing import Dict from typing import List -from typing import Optional from typing import Tuple from typing import Union @@ -58,17 +57,23 @@ def enum(self) -> AvailableMinimizers: def name(self) -> str: return self._minimizer_enum.name + def _restore_parameter_values(self) -> None: + for key in self._cached_pars.keys(): + self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._cached_pars[key].error = self._cached_pars_vals[key][1] + @abstractmethod def fit( self, x: np.ndarray, y: np.ndarray, weights: np.ndarray, - model: Optional[Callable] = None, - parameters: Optional[Parameter] = None, - method: Optional[str] = None, - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, + model: Callable | None = None, + parameters: List[Parameter] | None = None, + method: str | None = None, + tolerance: float | None = None, + max_evaluations: int | None = None, + progress_callback: Callable[[dict], bool | None] | None = None, **kwargs, ) -> FitResults: """Perform a fit using the engine. @@ -88,7 +93,7 @@ def fit( """ def evaluate( - self, x: np.ndarray, minimizer_parameters: Optional[dict[str, float]] = None, **kwargs + self, x: np.ndarray, minimizer_parameters: dict[str, float] | None = None, **kwargs ) -> np.ndarray: """Evaluate the fit function for values of x. Parameters used are either the latest or user supplied. If the parameters are @@ -117,7 +122,7 @@ def evaluate( return self._fit_function(x, **minimizer_parameters, **kwargs) - def _get_method_kwargs(self, passed_method: Optional[str] = None) -> dict[str, str]: + def _get_method_kwargs(self, passed_method: str | None = None) -> dict[str, str]: if passed_method is not None: if passed_method not in self.supported_methods(): raise FitError(f'Method {passed_method} not available in {self.__class__}') @@ -129,7 +134,7 @@ def _get_method_kwargs(self, passed_method: Optional[str] = None) -> dict[str, s return {} @abstractmethod - def convert_to_pars_obj(self, par_list: Optional[Union[list]] = None): + def convert_to_pars_obj(self, par_list: List[Parameter] | None = None): """Create an engine compatible container with the `Parameters` converted from the base object. diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 5e9013ae..cd49fa11 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -7,11 +7,12 @@ import warnings from typing import Callable from typing import List -from typing import Optional import numpy as np from bumps.fitters import FIT_AVAILABLE_IDS -from bumps.fitters import fit as bumps_fit +from bumps.fitters import FITTERS +from bumps.fitters import FitDriver +from bumps.monitor import Monitor from bumps.names import Curve from bumps.names import FitProblem from bumps.parameter import Parameter as BumpsParameter @@ -31,6 +32,21 @@ FIT_AVAILABLE_IDS_FILTERED.remove('pt') +class _StepCounterMonitor(Monitor): + """Lightweight monitor that ensures step count is recorded in + history. + """ + + def __init__(self): + self.last_step = 0 + + def config_history(self, history): + history.requires(step=1) + + def __call__(self, history): + self.last_step = int(history.step[0]) + + class _EvalCounter: def __init__(self, fn: Callable): self._fn = fn @@ -44,6 +60,25 @@ def __call__(self, *args, **kwargs): return self._fn(*args, **kwargs) +class _BumpsProgressMonitor(Monitor): + def __init__(self, problem, callback, payload_builder): + self._problem = problem + self._callback = callback + self._payload_builder = payload_builder + + def config_history(self, history): + history.requires(step=1, point=1, value=1) + + def __call__(self, history): + payload = self._payload_builder( + problem=self._problem, + iteration=int(history.step[0]), + point=np.asarray(history.point[0]), + nllf=float(history.value[0]), + ) + self._callback(payload) + + class Bumps(MinimizerBase): """ This is a wrapper to Bumps: https://bumps.readthedocs.io/ @@ -56,7 +91,7 @@ def __init__( self, obj, #: ObjBase, fit_function: Callable, - minimizer_enum: Optional[AvailableMinimizers] = None, + minimizer_enum: AvailableMinimizers | None = None, ): # todo after constraint changes, add type hint: obj: ObjBase # noqa: E501 """Initialize the fitting engine with a `ObjBase` and an arbitrary fitting function. @@ -70,7 +105,7 @@ def __init__( """ super().__init__(obj=obj, fit_function=fit_function, minimizer_enum=minimizer_enum) self._p_0 = {} - self._eval_counter: Optional[_EvalCounter] = None + self._eval_counter: _EvalCounter | None = None @staticmethod def all_methods() -> List[str]: @@ -87,16 +122,17 @@ def fit( x: np.ndarray, y: np.ndarray, weights: np.ndarray, - model: Optional[Callable] = None, - parameters: Optional[Parameter] = None, - method: Optional[str] = None, - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, - minimizer_kwargs: Optional[dict] = None, - engine_kwargs: Optional[dict] = None, + model: Callable | None = None, + parameters: List[Parameter] | None = None, + method: str | None = None, + tolerance: float | None = None, + max_evaluations: int | None = None, + progress_callback: Callable[[dict], bool | None] | None = None, + minimizer_kwargs: dict | None = None, + engine_kwargs: dict | None = None, **kwargs, ) -> FitResults: - """Perform a fit using the lmfit engine. + """Perform a fit using the BUMPS engine. :param x: points to be calculated at :type x: np.ndarray @@ -105,17 +141,14 @@ def fit( :param weights: Weights for supplied measured points :type weights: np.ndarray :param model: Optional Model which is being fitted to - :type model: lmModel :param parameters: Optional parameters for the fit :type parameters: List[BumpsParameter] - :param kwargs: Additional arguments for the fitting function. :param method: Method for minimization :type method: str + :param progress_callback: Optional callback for progress updates + :type progress_callback: Callable :return: Fit results - :rtype: ModelResult For standard least squares, the weights - should be 1/sigma, where sigma is the standard deviation of - the measurement. For unweighted least squares, these should - be 1. + :rtype: FitResults """ method_dict = self._get_method_kwargs(method) @@ -156,6 +189,28 @@ def fit( self._p_0 = {f'p{key}': self._cached_pars[key].value for key in self._cached_pars.keys()} problem = FitProblem(model) + + method_str = method_dict.get('method', self._method) + fitclass = self._resolve_fitclass(method_str) + + step_counter = _StepCounterMonitor() + monitors = [step_counter] + if progress_callback is not None: + if not callable(progress_callback): + raise ValueError('progress_callback must be callable') + monitors.append( + _BumpsProgressMonitor(problem, progress_callback, self._build_progress_payload) + ) + + driver = FitDriver( + fitclass=fitclass, + problem=problem, + monitors=monitors, + **minimizer_kwargs, + **kwargs, + ) + driver.clip() + # Why do we do this? Because a fitting template has to have global_object instantiated outside pre-runtime from easyscience import global_object @@ -163,7 +218,22 @@ def fit( global_object.stack.enabled = False try: - model_results = bumps_fit(problem, **method_dict, **minimizer_kwargs, **kwargs) + # Drive the fit through the local FitDriver instance so the supplied + # `monitors` (including the optional progress callback monitor) are + # invoked. `bumps.fitters.fit` constructs its own driver. + x, fx = driver.fit() + from scipy.optimize import OptimizeResult + + model_results = OptimizeResult( + x=x, + dx=driver.stderr(), + fun=fx, + success=True, + status=0, + message='successful termination', + nit=driver.monitor_runner.history.step[0], + ) + model_results.state = driver.fitter.state self._set_parameter_fit_result(model_results, stack_status, problem._parameters) results = self._gen_fit_results( model_results, @@ -171,12 +241,48 @@ def fit( tolerance=tolerance, ) except Exception as e: - for key in self._cached_pars.keys(): - self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._restore_parameter_values() raise FitError(e) + finally: + global_object.stack.enabled = stack_status return results - def convert_to_pars_obj(self, par_list: Optional[List] = None) -> List[BumpsParameter]: + @staticmethod + def _resolve_fitclass(method: str): + for fitclass in FITTERS: + if fitclass.id == method: + return fitclass + raise FitError(f'Unknown BUMPS fitting method: {method}') + + def _build_progress_payload( + self, problem, iteration: int, point: np.ndarray, nllf: float + ) -> dict: + # Use the nllf already computed by the fitter to avoid a costly + # model re-evaluation, and let BUMPS apply its own chisq scaling. + chi2 = float(problem.chisq(nllf=nllf, norm=False)) + reduced_chi2 = float(problem.chisq(nllf=nllf, norm=True)) + + parameter_values = self._current_parameter_snapshot(problem, point) + + return { + 'iteration': iteration, + 'chi2': chi2, + 'reduced_chi2': reduced_chi2, + 'parameter_values': parameter_values, + 'refresh_plots': False, + 'finished': False, + } + + def _current_parameter_snapshot(self, problem, point: np.ndarray) -> dict: + labels = problem.labels() + values = problem.getp() if point is None else point + snapshot = {} + for label, value in zip(labels, values): + dict_name = label[len(MINIMIZER_PARAMETER_PREFIX) :] + snapshot[dict_name] = float(value) + return snapshot + + def convert_to_pars_obj(self, par_list: List[Parameter] | None = None) -> List[BumpsParameter]: """Create a container with the `Parameters` converted from the base object. @@ -211,7 +317,7 @@ def convert_to_par_object(obj) -> BumpsParameter: fixed=obj.fixed, ) - def _make_model(self, parameters: Optional[List[BumpsParameter]] = None) -> Callable: + def _make_model(self, parameters: List[BumpsParameter] | None = None) -> Callable: """Generate a bumps model from the supplied `fit_function` and parameters in the base object. Note that this makes a callable as it needs to be initialized with *x*, *y*, *weights* @@ -244,43 +350,51 @@ def _make_func(x, y, weights): return _outer(self) def _set_parameter_fit_result( - self, fit_result, stack_status: bool, par_list: List[BumpsParameter] + self, + fit_result, + stack_status: bool, + par_list: List[BumpsParameter], ): """Update parameters to their final values and assign a std error to them. - :param fit_result: Fit object which contains info on the fit - :return: None - :rtype: noneType + :param fit_result: BUMPS OptimizeResult containing best-fit + values and errors + :param stack_status: Whether the undo stack was enabled + :param par_list: List of BUMPS parameter objects """ from easyscience import global_object pars = self._cached_pars + x_result = np.asarray(fit_result.x) + stderr = np.asarray(fit_result.dx) if stack_status: - for name in pars.keys(): - pars[name].value = self._cached_pars_vals[name][0] - pars[name].error = self._cached_pars_vals[name][1] + self._restore_parameter_values() global_object.stack.enabled = True global_object.stack.beginMacro('Fitting routine') for index, name in enumerate([par.name for par in par_list]): dict_name = name[len(MINIMIZER_PARAMETER_PREFIX) :] - pars[dict_name].value = fit_result.x[index] - pars[dict_name].error = fit_result.dx[index] + pars[dict_name].value = x_result[index] + pars[dict_name].error = stderr[index] if stack_status: global_object.stack.endMacro() def _gen_fit_results( self, fit_results, - max_evaluations: Optional[int] = None, - tolerance: Optional[float] = None, + max_evaluations: int | None = None, + tolerance: float | None = None, **kwargs, ) -> FitResults: """Convert fit results into the unified `FitResults` format. - :param fit_result: Fit object which contains info on the fit + :param x_result: Optimized parameter values from FitDriver + :param fx: Final objective function value + :param driver: The FitDriver instance + :param n_evaluations: Number of iterations completed + :param max_evaluations: Maximum evaluations budget (if set) :return: fit results container :rtype: FitResults """ @@ -314,7 +428,6 @@ def _gen_fit_results( item = {} for index, name in enumerate(self._cached_model.pars.keys()): dict_name = name[len(MINIMIZER_PARAMETER_PREFIX) :] - item[name] = pars[dict_name].value results.p0 = self._p_0 @@ -351,5 +464,4 @@ def _gen_fit_results( results.minimizer_engine = self.__class__ results.fit_args = None results.engine_result = fit_results - # results.check_sanity() return results diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index cf102e68..2cb2031e 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -2,10 +2,11 @@ # SPDX-License-Identifier: BSD-3-Clause import warnings +from dataclasses import dataclass +from numbers import Integral from typing import Callable from typing import Dict from typing import List -from typing import Optional import dfols import numpy as np @@ -21,6 +22,21 @@ from .utils import FitResults +@dataclass(frozen=True) +class DFOCallbackState: + """Snapshot of a DFO objective evaluation.""" + + evaluation: int + xk: np.ndarray + residuals: np.ndarray + objective: float + parameters: dict[str, float] + best_xk: np.ndarray + best_objective: float + best_parameters: dict[str, float] + improved: bool + + class DFO(MinimizerBase): """ This is a wrapper to Derivative Free Optimisation for Least Square: https://numericalalgorithmsgroup.github.io/dfols/ @@ -32,7 +48,7 @@ def __init__( self, obj, #: ObjBase, fit_function: Callable, - minimizer_enum: Optional[AvailableMinimizers] = None, + minimizer_enum: AvailableMinimizers | None = None, ): # todo after constraint changes, add type hint: obj: ObjBase # noqa: E501 """Initialize the fitting engine with a `ObjBase` and an arbitrary fitting function. @@ -60,11 +76,15 @@ def fit( x: np.ndarray, y: np.ndarray, weights: np.ndarray, - model: Optional[Callable] = None, - parameters: Optional[List[Parameter]] = None, - method: str = None, - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, + model: Callable | None = None, + parameters: List[Parameter] | None = None, + method: str | None = None, + tolerance: float | None = None, + max_evaluations: int | None = None, + progress_callback: Callable[[dict], bool | None] | None = None, + callback: Callable[[DFOCallbackState], None] | None = None, + callback_every: int = 1, + callback_on_improvement_only: bool = False, **kwargs, ) -> FitResults: """Perform a fit using the DFO-ls engine. @@ -102,9 +122,33 @@ def fit( if (weights <= 0).any(): raise ValueError('Weights must be strictly positive and non-zero.') + if not isinstance(callback_every, Integral) or isinstance(callback_every, bool): + raise ValueError('callback_every must be a positive integer.') + + if callback_every < 1: + raise ValueError('callback_every must be a positive integer.') + + # Bridge progress_callback into the DFO callback mechanism + if progress_callback is not None and callback is None: + dof = max(len(x) - len(self._cached_pars), 1) + callback = self._make_progress_adapter(progress_callback, dof) + if model is None: - model_function = self._make_model(parameters=parameters) + model_function = self._make_model( + parameters=parameters, + callback=callback, + callback_every=callback_every, + callback_on_improvement_only=callback_on_improvement_only, + ) model = model_function(x, y, weights) + elif callback is not None: + model = self._wrap_model_with_callback( + model, + self._get_callback_parameter_names(parameters), + callback, + callback_every, + callback_on_improvement_only, + ) self._cached_model = model self._cached_model.x = x self._cached_model.y = y @@ -124,16 +168,16 @@ def fit( self._set_parameter_fit_result(model_results, stack_status) results = self._gen_fit_results(model_results, weights) except FitError: - for key in self._cached_pars.keys(): - self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._restore_parameter_values() raise except Exception as e: - for key in self._cached_pars.keys(): - self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._restore_parameter_values() raise FitError(e) + finally: + global_object.stack.enabled = stack_status return results - def convert_to_pars_obj(self, par_list: Optional[list] = None): + def convert_to_pars_obj(self, par_list: List[Parameter] | None = None): """Required by interface but not needed for DFO-LS.""" pass @@ -142,7 +186,13 @@ def convert_to_par_object(obj) -> None: """Required by interface but not needed for DFO-LS.""" pass - def _make_model(self, parameters: Optional[List[Parameter]] = None) -> Callable: + def _make_model( + self, + parameters: List[Parameter] | None = None, + callback: Callable[[DFOCallbackState], None] | None = None, + callback_every: int = 1, + callback_on_improvement_only: bool = False, + ) -> Callable: """Generate a model from the supplied `fit_function` and parameters in the base object. Note that this makes a callable as it needs to be initialized with *x*, *y*, *weights* @@ -167,12 +217,112 @@ def _residuals(pars_values: List[float]) -> np.ndarray: dfo_pars[par_name] = pars_values[idx] return (y - fit_func(x, **dfo_pars)) * weights - return _residuals + return obj._wrap_model_with_callback( + _residuals, + list(dfo_pars.keys()), + callback, + callback_every, + callback_on_improvement_only, + ) return _make_func return _outer(self) + def _get_callback_parameter_names( + self, parameters: List[Parameter] | None = None + ) -> list[str]: + if parameters is not None: + return [MINIMIZER_PARAMETER_PREFIX + parameter.unique_name for parameter in parameters] + return [MINIMIZER_PARAMETER_PREFIX + name for name in self._cached_pars.keys()] + + @staticmethod + def _wrap_model_with_callback( + model: Callable, + parameter_names: list[str], + callback: Callable[[DFOCallbackState], None] | None, + callback_every: int, + callback_on_improvement_only: bool, + ) -> Callable: + if callback is None: + return model + + evaluation = 0 + best_objective = np.inf + best_xk = np.array([], dtype=float) + best_parameters: dict[str, float] = {} + + def wrapped_model(pars_values: List[float]) -> np.ndarray: + nonlocal evaluation, best_objective, best_xk, best_parameters + + residuals = np.asarray(model(pars_values), dtype=float) + xk = np.asarray(pars_values, dtype=float).copy() + parameters = {name: value for name, value in zip(parameter_names, xk)} + objective = float(np.dot(residuals.ravel(), residuals.ravel())) + + evaluation += 1 + improved = objective < best_objective + if improved: + best_objective = objective + best_xk = xk.copy() + best_parameters = parameters.copy() + + should_notify = evaluation % callback_every == 0 + if callback_on_improvement_only: + should_notify = should_notify and improved + + if should_notify: + callback( + DFOCallbackState( + evaluation=evaluation, + xk=xk, + residuals=residuals.copy(), + objective=objective, + parameters=parameters, + best_xk=best_xk.copy(), + best_objective=best_objective, + best_parameters=best_parameters.copy(), + improved=improved, + ) + ) + + return residuals + + return wrapped_model + + @staticmethod + def _make_progress_adapter( + progress_callback: Callable[[dict], bool | None], + dof: int, + ) -> Callable[['DFOCallbackState'], None]: + """Create a DFO callback that translates DFOCallbackState into + the standard progress_callback dict format used by the GUI. + + :param progress_callback: Standard progress callback (dict -> + bool|None) + :param dof: Degrees of freedom for reduced chi2 calculation + :return: DFO-compatible callback + """ + + def adapter(state: 'DFOCallbackState') -> None: + chi2 = state.best_objective + reduced_chi2 = chi2 / dof if dof > 0 else chi2 + param_snapshot = { + name[len(MINIMIZER_PARAMETER_PREFIX) :]: float(val) + for name, val in state.best_parameters.items() + } + payload = { + 'iteration': state.evaluation, + 'chi2': chi2, + 'reduced_chi2': reduced_chi2, + 'parameter_values': param_snapshot, + 'refresh_plots': False, + 'finished': False, + } + progress_callback(payload) + + return adapter + def _set_parameter_fit_result(self, fit_result, stack_status, ci: float = 0.95) -> None: """Update parameters to their final values and assign a std error to them. @@ -187,9 +337,7 @@ def _set_parameter_fit_result(self, fit_result, stack_status, ci: float = 0.95) pars = self._cached_pars if stack_status: - for name in pars.keys(): - pars[name].value = self._cached_pars_vals[name][0] - pars[name].error = self._cached_pars_vals[name][1] + self._restore_parameter_values() global_object.stack.enabled = True global_object.stack.beginMacro('Fitting routine') @@ -231,6 +379,9 @@ def _gen_fit_results(self, fit_results, weights, **kwargs) -> FitResults: results.y_err = weights results.n_evaluations = int(fit_results.nf) results.message = str(fit_results.msg) + if not results.success: + warning_message = results.message or 'DFO fit did not succeed.' + warnings.warn(warning_message, UserWarning, stacklevel=2) # results.residual = results.y_obs - results.y_calc # results.goodness_of_fit = fit_results.f @@ -279,8 +430,8 @@ def _dfo_fit( @staticmethod def _prepare_kwargs( - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, + tolerance: float | None = None, + max_evaluations: int | None = None, **kwargs, ) -> dict[str:str]: if max_evaluations is not None: diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index 3b224ae6..8bd95c63 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -4,7 +4,6 @@ import warnings from typing import Callable from typing import List -from typing import Optional import numpy as np from lmfit import Model as LMModel @@ -35,7 +34,7 @@ def __init__( self, obj, #: ObjBase, fit_function: Callable, - minimizer_enum: Optional[AvailableMinimizers] = None, + minimizer_enum: AvailableMinimizers | None = None, ): # todo after constraint changes, add type hint: obj: ObjBase # noqa: E501 """Initialize the minimizer with the `ObjBase` and the `fit_function` to be used. @@ -82,13 +81,14 @@ def fit( x: np.ndarray, y: np.ndarray, weights: np.ndarray = None, - model: Optional[LMModel] = None, - parameters: Optional[LMParameters] = None, - method: Optional[str] = None, - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, - minimizer_kwargs: Optional[dict] = None, - engine_kwargs: Optional[dict] = None, + model: LMModel | None = None, + parameters: LMParameters | None = None, + method: str | None = None, + tolerance: float | None = None, + max_evaluations: int | None = None, + progress_callback: Callable[[dict], bool | None] | None = None, + minimizer_kwargs: dict | None = None, + engine_kwargs: dict | None = None, **kwargs, ) -> FitResults: """Perform a fit using the lmfit engine. @@ -145,11 +145,13 @@ def fit( if model is None: model = self._make_model() + iter_cb = self._create_iter_callback(progress_callback) model_results = model.fit( y, x=x, weights=weights, max_nfev=max_evaluations, + iter_cb=iter_cb, fit_kws=fit_kws_dict, **method_kwargs, **engine_kwargs, @@ -158,11 +160,52 @@ def fit( self._set_parameter_fit_result(model_results, stack_status) results = self._gen_fit_results(model_results) except Exception as e: - for key in self._cached_pars.keys(): - self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._restore_parameter_values() raise FitError(e) + finally: + global_object.stack.enabled = stack_status return results + def _create_iter_callback( + self, + progress_callback: Callable[[dict], bool | None] | None, + ) -> Callable | None: + if progress_callback is None: + return None + + def iter_cb(params, iteration: int, residuals: np.ndarray, *args, **kwargs) -> bool: + payload = self._build_progress_payload(params, iteration, residuals) + progress_callback(payload) + return False + + return iter_cb + + def _build_progress_payload(self, params, iteration: int, residuals: np.ndarray) -> dict: + residual_array = np.asarray(residuals) + chi2 = float(np.square(residual_array).sum()) + varied_parameter_count = sum( + 1 for parameter in params.values() if getattr(parameter, 'vary', False) + ) + degrees_of_freedom = residual_array.size - varied_parameter_count + reduced_chi2 = chi2 / degrees_of_freedom if degrees_of_freedom > 0 else chi2 + + parameter_values = {} + for parameter_name, parameter in self._cached_pars.items(): + lmfit_parameter_name = f'{MINIMIZER_PARAMETER_PREFIX}{parameter_name}' + if lmfit_parameter_name in params: + parameter_values[parameter_name] = float(params[lmfit_parameter_name].value) + else: + parameter_values[parameter_name] = float(parameter.value) + + return { + 'iteration': int(iteration), + 'chi2': chi2, + 'reduced_chi2': reduced_chi2, + 'parameter_values': parameter_values, + 'refresh_plots': False, + 'finished': False, + } + def _get_fit_kws( self, method: str, tolerance: float, minimizer_kwargs: dict[str:str] ) -> dict[str:str]: @@ -175,7 +218,7 @@ def _get_fit_kws( minimizer_kwargs['tol'] = tolerance return minimizer_kwargs - def convert_to_pars_obj(self, parameters: Optional[List[Parameter]] = None) -> LMParameters: + def convert_to_pars_obj(self, parameters: List[Parameter] | None = None) -> LMParameters: """Create an lmfit compatible container with the `Parameters` converted from the base object. @@ -211,7 +254,7 @@ def convert_to_par_object(parameter: Parameter) -> LMParameter: brute_step=None, ) - def _make_model(self, pars: Optional[LMParameters] = None) -> LMModel: + def _make_model(self, pars: LMParameters | None = None) -> LMModel: """Generate a lmfit model from the supplied `fit_function` and parameters in the base object. @@ -261,9 +304,7 @@ def _set_parameter_fit_result(self, fit_result: ModelResult, stack_status: bool) pars = self._cached_pars if stack_status: - for name in pars.keys(): - pars[name].value = self._cached_pars_vals[name][0] - pars[name].error = self._cached_pars_vals[name][1] + self._restore_parameter_values() global_object.stack.enabled = True global_object.stack.beginMacro('Fitting routine') for name in pars.keys(): diff --git a/src/easyscience/fitting/minimizers/utils.py b/src/easyscience/fitting/minimizers/utils.py index c6d462fe..b44dab88 100644 --- a/src/easyscience/fitting/minimizers/utils.py +++ b/src/easyscience/fitting/minimizers/utils.py @@ -48,7 +48,7 @@ def __repr__(self) -> str: chi2_val = self.chi2 reduced_val = self.reduced_chi2 if not np.isfinite(chi2_val) or not np.isfinite(reduced_val): - raise ValueError + raise ValueError('Chi2 or reduced chi2 is not finite') chi2 = f'{chi2_val:.4g}' reduced = f'{reduced_val:.4g}' except Exception: diff --git a/src/easyscience/fitting/multi_fitter.py b/src/easyscience/fitting/multi_fitter.py index 6f3b9938..af8ab978 100644 --- a/src/easyscience/fitting/multi_fitter.py +++ b/src/easyscience/fitting/multi_fitter.py @@ -18,6 +18,10 @@ class MultiFitter(Fitter): We can fit these types of data simultaneously: - Multiple models on multiple datasets. + + The inherited ``fit`` wrapper from ``Fitter`` is used unchanged, + including support for forwarding progress callbacks to the active + minimizer. """ def __init__( diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..2f9a6768 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,14 @@ +# SPDX-FileCopyrightText: 2026 EasyScience contributors +# SPDX-License-Identifier: BSD-3-Clause + +from __future__ import annotations + +import sys +from pathlib import Path + +PROJECT_ROOT = Path(__file__).resolve().parents[1] +SRC_ROOT = PROJECT_ROOT / 'src' + +src_root_str = str(SRC_ROOT) +if src_root_str not in sys.path: + sys.path.insert(0, src_root_str) diff --git a/tests/unit/fitting/minimizers/test_minimizer_bumps.py b/tests/unit/fitting/minimizers/test_minimizer_bumps.py index 196b77dc..4d8dbaef 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_bumps.py +++ b/tests/unit/fitting/minimizers/test_minimizer_bumps.py @@ -1,14 +1,17 @@ # SPDX-FileCopyrightText: 2026 EasyScience contributors # SPDX-License-Identifier: BSD-3-Clause -import warnings +from unittest.mock import ANY from unittest.mock import MagicMock +from unittest.mock import patch import numpy as np import pytest import easyscience.fitting.minimizers.minimizer_bumps from easyscience.fitting.minimizers.minimizer_bumps import Bumps +from easyscience.fitting.minimizers.minimizer_bumps import _BumpsProgressMonitor +from easyscience.fitting.minimizers.minimizer_bumps import _StepCounterMonitor from easyscience.fitting.minimizers.utils import FitError @@ -48,9 +51,15 @@ def test_fit(self, minimizer: Bumps, monkeypatch) -> None: global_object.stack.enabled = False - mock_bumps_fit = MagicMock(return_value='fit') + # Mock FitDriver. driver.fit() returns (x, fx); driver.stderr() returns dx. + mock_driver_instance = MagicMock() + mock_driver_instance.clip = MagicMock() + mock_driver_instance.fit = MagicMock(return_value=(np.array([42.0]), 0.0)) + mock_driver_instance.stderr = MagicMock(return_value=np.array([0.1])) + mock_driver_instance.monitor_runner.history.step = [0] + mock_FitDriver = MagicMock(return_value=mock_driver_instance) monkeypatch.setattr( - easyscience.fitting.minimizers.minimizer_bumps, 'bumps_fit', mock_bumps_fit + easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver ) # Prepare a mock parameter with .name = 'pmock_parm_1' @@ -73,28 +82,38 @@ def test_fit(self, minimizer: Bumps, monkeypatch) -> None: cached_par.value = 1 cached_pars = {'mock_parm_1': cached_par} minimizer._cached_pars = cached_pars + minimizer._cached_pars_vals = {'mock_parm_1': (1, 0.0)} - # Patch _set_parameter_fit_result to a real function that will not raise KeyError + # Patch _set_parameter_fit_result def fake_set_parameter_fit_result(fit_result, stack_status, par_list): - # Simulate what the real function does: update _cached_pars for index, name in enumerate([par.name for par in par_list]): - dict_name = name[len('p') :] # Remove prefix 'p' - minimizer._cached_pars[dict_name].value = 42 # Arbitrary value + dict_name = name[len('p') :] + minimizer._cached_pars[dict_name].value = fit_result.x[index] minimizer._set_parameter_fit_result = fake_set_parameter_fit_result + mock_fitclass = MagicMock() + mock_fitclass.id = 'amoeba' + minimizer._resolve_fitclass = MagicMock(return_value=mock_fitclass) + # Then result = minimizer.fit(x=1.0, y=2.0, weights=1) # Expect assert result == 'gen_fit_results' - mock_bumps_fit.assert_called_once_with(mock_FitProblem_instance, method='amoeba') + mock_FitDriver.assert_called_once() + mock_driver_instance.clip.assert_called_once() + mock_driver_instance.fit.assert_called_once() minimizer._make_model.assert_called_once_with(parameters=None) - minimizer._gen_fit_results.assert_called_once_with( - 'fit', - max_evaluations=None, - tolerance=None, - ) + # _gen_fit_results is called with the OptimizeResult built from driver.fit() + minimizer._gen_fit_results.assert_called_once() + passed_result = minimizer._gen_fit_results.call_args.args[0] + assert np.array_equal(passed_result.x, np.array([42.0])) + assert np.array_equal(passed_result.dx, np.array([0.1])) + assert minimizer._gen_fit_results.call_args.kwargs == { + 'max_evaluations': None, + 'tolerance': None, + } mock_model_function.assert_called_once_with(1.0, 2.0, 1) mock_FitProblem.assert_called_once_with(mock_model) @@ -157,8 +176,8 @@ def test_set_parameter_fit_result_no_stack_status(self, minimizer: Bumps): minimizer._cached_model = mock_cached_model mock_fit_result = MagicMock() - mock_fit_result.x = [1.0, 2.0] - mock_fit_result.dx = [0.1, 0.2] + mock_fit_result.x = np.array([1.0, 2.0]) + mock_fit_result.dx = np.array([0.1, 0.2]) # The new argument: par_list (list of mock parameters) mock_par_a = MagicMock() @@ -238,6 +257,386 @@ def test_gen_fit_results(self, minimizer: Bumps, monkeypatch): 'x', minimizer_parameters={'ppar_1': 'par_value_1', 'ppar_2': 'par_value_2'} ) + @pytest.mark.parametrize( + 'n_evaluations, max_evaluations, expected_success', + [ + (1, 3, True), # last step (1) < budget-1 (2) => success + (2, 3, False), # last step (2) == budget-1 (2) => budget consumed => failure + (3, 3, False), # last step (3) > budget-1 (2) => failure + (0, 1, False), # 0 >= 0 => failure (budget of 1, step counter 0-indexed) + (5, None, True), # no budget => always success + ], + ) + def test_gen_fit_results_max_evaluations_boundary( + self, minimizer: Bumps, monkeypatch, n_evaluations, max_evaluations, expected_success + ): + """Bumps step counter is 0-indexed so the last step of a budget + of N is N-1. Verify the boundary condition in _gen_fit_results.""" + mock_domain_fit_results = MagicMock() + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, + 'FitResults', + MagicMock(return_value=mock_domain_fit_results), + ) + + mock_cached_model = MagicMock() + mock_cached_model.pars = {'ppar_1': 0} + minimizer._cached_model = mock_cached_model + + mock_par = MagicMock() + mock_par.value = 1.0 + minimizer._cached_pars = {'par_1': mock_par} + minimizer._p_0 = 'p_0' + minimizer._eval_counter = MagicMock(count=n_evaluations) + minimizer.evaluate = MagicMock(return_value='evaluate') + + mock_fit_result = MagicMock() + mock_fit_result.success = True + mock_fit_result.nit = n_evaluations + + minimizer._gen_fit_results(mock_fit_result, max_evaluations=max_evaluations) + + assert mock_domain_fit_results.success is expected_success + + def test_resolve_fitclass_valid(self, minimizer: Bumps) -> None: + # When Then + fitclass = Bumps._resolve_fitclass('lm') + + # Expect + assert fitclass.id == 'lm' + + def test_resolve_fitclass_invalid(self, minimizer: Bumps) -> None: + # When Then Expect + with pytest.raises(FitError): + Bumps._resolve_fitclass('nonexistent_method') + + def test_fit_progress_callback(self, minimizer: Bumps, monkeypatch) -> None: + # When + from easyscience import global_object + + global_object.stack.enabled = False + + progress_callback = MagicMock(return_value=True) + + mock_driver_instance = MagicMock() + mock_driver_instance.clip = MagicMock() + mock_driver_instance.fit = MagicMock(return_value=(np.array([42.0]), 0.0)) + mock_driver_instance.stderr = MagicMock(return_value=np.array([0.1])) + mock_driver_instance.monitor_runner.history.step = [0] + mock_FitDriver = MagicMock(return_value=mock_driver_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver + ) + + mock_bumps_param = MagicMock() + mock_bumps_param.name = 'pmock_parm_1' + mock_FitProblem_instance = MagicMock() + mock_FitProblem_instance._parameters = [mock_bumps_param] + mock_FitProblem = MagicMock(return_value=mock_FitProblem_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitProblem', mock_FitProblem + ) + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + cached_par = MagicMock() + cached_par.value = 1 + minimizer._cached_pars = {'mock_parm_1': cached_par} + minimizer._cached_pars_vals = {'mock_parm_1': (1, 0.0)} + + minimizer._resolve_fitclass = MagicMock(return_value=MagicMock(id='amoeba')) + + # Then + result = minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=progress_callback) + + # Expect - FitDriver was called with a monitor list containing our monitor + assert result == 'gen_fit_results' + driver_call_kwargs = mock_FitDriver.call_args + monitors = driver_call_kwargs.kwargs.get('monitors', driver_call_kwargs[1].get('monitors')) + assert len(monitors) == 2 + assert isinstance(monitors[0], _StepCounterMonitor) + assert isinstance(monitors[1], _BumpsProgressMonitor) + assert monitors[1]._problem is mock_FitProblem_instance + assert monitors[1]._callback is progress_callback + assert monitors[1]._payload_builder == minimizer._build_progress_payload + + def test_fit_uses_supplied_model_and_optional_kwargs( + self, minimizer: Bumps, monkeypatch + ) -> None: + from easyscience import global_object + + global_object.stack.enabled = False + + mock_driver_instance = MagicMock() + mock_driver_instance.clip = MagicMock() + mock_driver_instance.fit = MagicMock(return_value=(np.array([3.0]), 0.0)) + mock_driver_instance.stderr = MagicMock(return_value=np.array([0.1])) + mock_driver_instance.monitor_runner.history.step = [0] + mock_FitDriver = MagicMock(return_value=mock_driver_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver + ) + + mock_bumps_param = MagicMock() + mock_bumps_param.name = 'pmock_parm_1' + mock_problem = MagicMock() + mock_problem._parameters = [mock_bumps_param] + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, + 'FitProblem', + MagicMock(return_value=mock_problem), + ) + + minimizer._make_model = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + minimizer._resolve_fitclass = MagicMock(return_value=MagicMock(id='amoeba')) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._cached_pars = {'mock_parm_1': MagicMock(value=1.0)} + minimizer._cached_pars_vals = {'mock_parm_1': (1.0, 0.0)} + + supplied_model = MagicMock() + minimizer_kwargs = {'existing_option': 'minimizer'} + engine_kwargs = {'engine_option': 'engine'} + + result = minimizer.fit( + x=np.array([1.0]), + y=np.array([2.0]), + weights=np.array([1.0]), + model=supplied_model, + tolerance=0.25, + max_evaluations=7, + minimizer_kwargs=minimizer_kwargs, + engine_kwargs=engine_kwargs, + ) + + assert result == 'gen_fit_results' + minimizer._make_model.assert_not_called() + fit_driver_kwargs = mock_FitDriver.call_args.kwargs + assert fit_driver_kwargs['problem'] is mock_problem + assert fit_driver_kwargs['existing_option'] == 'minimizer' + assert fit_driver_kwargs['engine_option'] == 'engine' + assert fit_driver_kwargs['ftol'] == 0.25 + assert fit_driver_kwargs['xtol'] == 0.25 + assert fit_driver_kwargs['steps'] == 7 + mock_driver_instance.fit.assert_called_once() + + def test_fit_rejects_non_callable_progress_callback( + self, minimizer: Bumps, monkeypatch + ) -> None: + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, + 'FitProblem', + MagicMock(return_value=MagicMock()), + ) + minimizer._resolve_fitclass = MagicMock(return_value=MagicMock(id='amoeba')) + + with pytest.raises(ValueError, match='progress_callback must be callable'): + minimizer.fit( + x=np.array([1.0]), + y=np.array([2.0]), + weights=np.array([1.0]), + model=MagicMock(), + progress_callback='not-callable', + ) + + def test_build_progress_payload(self, minimizer: Bumps) -> None: + # When + mock_problem = MagicMock() + mock_problem.chisq.side_effect = [25.0, 12.5] + mock_problem.labels.return_value = ['palpha', 'pbeta'] + mock_problem.getp.return_value = np.array([1.0, 2.0]) + + point = np.array([1.0, 2.0]) + nllf = 12.5 + + # Then + payload = minimizer._build_progress_payload(mock_problem, 7, point, nllf) + + # Expect + assert payload == { + 'iteration': 7, + 'chi2': 25.0, + 'reduced_chi2': 12.5, + 'parameter_values': {'alpha': 1.0, 'beta': 2.0}, + 'refresh_plots': False, + 'finished': False, + } + mock_problem.chisq.assert_any_call(nllf=nllf, norm=False) + mock_problem.chisq.assert_any_call(nllf=nllf, norm=True) + # setp should NOT be called – the monitor avoids model re-evaluation + mock_problem.setp.assert_not_called() + + def test_build_progress_payload_keys_match_lmfit(self, minimizer: Bumps) -> None: + # When + mock_problem = MagicMock() + mock_problem.chisq.side_effect = [10.0, 5.0] + mock_problem.labels.return_value = ['pa'] + mock_problem.getp.return_value = np.array([5.0]) + + minimizer._cached_pars = {'a': MagicMock(value=5.0)} + + # Then + payload = minimizer._build_progress_payload(mock_problem, 1, np.array([5.0]), nllf=5.0) + + # Expect - same keys as LMFit payload + expected_keys = { + 'iteration', + 'chi2', + 'reduced_chi2', + 'parameter_values', + 'refresh_plots', + 'finished', + } + assert set(payload.keys()) == expected_keys + assert isinstance(payload['iteration'], int) + assert isinstance(payload['chi2'], float) + assert isinstance(payload['reduced_chi2'], float) + assert isinstance(payload['parameter_values'], dict) + assert payload['refresh_plots'] is False + assert payload['finished'] is False + + def test_build_progress_payload_reduced_chi2_positive_dof(self, minimizer: Bumps) -> None: + # When - use BUMPS chisq helpers for raw and normalized values + mock_problem = MagicMock() + mock_problem.chisq.side_effect = [10.0, 5.0] + mock_problem.labels.return_value = ['pa'] + mock_problem.getp.return_value = np.array([5.0]) + + minimizer._cached_pars = {'a': MagicMock(value=5.0)} + + # Then + payload = minimizer._build_progress_payload(mock_problem, 1, np.array([5.0]), nllf=5.0) + + # Expect + assert payload['chi2'] == 10.0 + assert payload['reduced_chi2'] == 5.0 + assert mock_problem.chisq.call_args_list == [ + ((), {'nllf': 5.0, 'norm': False}), + ((), {'nllf': 5.0, 'norm': True}), + ] + + def test_current_parameter_snapshot(self, minimizer: Bumps) -> None: + # When + mock_problem = MagicMock() + mock_problem.labels.return_value = ['palpha', 'pbeta'] + + point = np.array([1.5, 2.5]) + + # Then + snapshot = minimizer._current_parameter_snapshot(mock_problem, point) + + # Expect + assert snapshot == {'alpha': 1.5, 'beta': 2.5} + + @pytest.mark.parametrize('par_list', [None, [MagicMock(unique_name='alpha')]]) + def test_convert_to_pars_obj_optional_parameter_list( + self, minimizer: Bumps, par_list, monkeypatch + ) -> None: + object_parameters = [MagicMock(unique_name='beta')] + minimizer._object = MagicMock() + minimizer._object.get_fit_parameters = MagicMock(return_value=object_parameters) + monkeypatch.setattr( + Bumps, + 'convert_to_par_object', + staticmethod(lambda parameter: parameter.unique_name), + ) + + converted = minimizer.convert_to_pars_obj(par_list) + + expected_parameters = object_parameters if par_list is None else par_list + assert converted == [parameter.unique_name for parameter in expected_parameters] + if par_list is None: + minimizer._object.get_fit_parameters.assert_called_once_with() + else: + minimizer._object.get_fit_parameters.assert_not_called() + + def test_make_model_without_parameters_uses_cached_parameters( + self, minimizer: Bumps, monkeypatch + ) -> None: + minimizer._generate_fit_function = MagicMock( + return_value=MagicMock(return_value=np.array([2.0])) + ) + minimizer._cached_pars = {'alpha': MagicMock(value=1.0)} + minimizer.convert_to_par_object = MagicMock(return_value='converted-alpha') + + mock_curve = MagicMock(return_value='curve') + monkeypatch.setattr(easyscience.fitting.minimizers.minimizer_bumps, 'Curve', mock_curve) + + model = minimizer._make_model() + curve = model(np.array([1.0]), np.array([2.0]), np.array([3.0])) + + assert curve == 'curve' + minimizer.convert_to_par_object.assert_called_once_with(minimizer._cached_pars['alpha']) + assert mock_curve.call_args.kwargs['palpha'] == 'converted-alpha' + + def test_bumps_progress_monitor_calls_callback(self, minimizer: Bumps) -> None: + # When + callback = MagicMock(return_value=True) + mock_problem = MagicMock() + payload_builder = MagicMock(return_value={'iteration': 1}) + + monitor = _BumpsProgressMonitor(mock_problem, callback, payload_builder) + + mock_history = MagicMock() + mock_history.step = [5] + mock_history.point = [np.array([1.0])] + mock_history.value = [42.0] + + # Then + monitor(mock_history) + + # Expect + callback.assert_called_once_with({'iteration': 1}) + payload_builder.assert_called_once_with( + problem=mock_problem, + iteration=5, + point=ANY, + nllf=42.0, + ) + + def test_fit_exception_restores_values(self, minimizer: Bumps, monkeypatch) -> None: + # When + from easyscience import global_object + + global_object.stack.enabled = False + + from easyscience.variable import Parameter + + parameter = MagicMock(Parameter) + parameter.value = 10.0 + minimizer._cached_pars = {'alpha': parameter} + minimizer._cached_pars_vals = {'alpha': (1.0, None)} + + mock_driver_instance = MagicMock() + mock_driver_instance.fit.side_effect = RuntimeError('something broke') + mock_driver_instance.clip = MagicMock() + mock_FitDriver = MagicMock(return_value=mock_driver_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver + ) + + mock_FitProblem_instance = MagicMock() + mock_FitProblem_instance._parameters = [] + mock_FitProblem = MagicMock(return_value=mock_FitProblem_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitProblem', mock_FitProblem + ) + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._resolve_fitclass = MagicMock(return_value=MagicMock(id='amoeba')) + + # Then Expect + with pytest.raises(FitError): + minimizer.fit(x=1.0, y=2.0, weights=1) + + assert parameter.value == 1.0 + def test_gen_fit_results_uses_nit_for_budget_check(self, minimizer: Bumps, monkeypatch): mock_domain_fit_results = MagicMock() mock_FitResults = MagicMock(return_value=mock_domain_fit_results) diff --git a/tests/unit/fitting/minimizers/test_minimizer_dfo.py b/tests/unit/fitting/minimizers/test_minimizer_dfo.py index 4585607c..7d67ba1c 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_dfo.py +++ b/tests/unit/fitting/minimizers/test_minimizer_dfo.py @@ -9,6 +9,7 @@ import easyscience.fitting.minimizers.minimizer_dfo from easyscience.fitting.minimizers.minimizer_dfo import DFO +from easyscience.fitting.minimizers.minimizer_dfo import DFOCallbackState from easyscience.fitting.minimizers.utils import FitError from easyscience.variable import Parameter @@ -67,11 +68,105 @@ def test_fit(self, minimizer: DFO) -> None: # Expect assert result == 'gen_fit_results' minimizer._dfo_fit.assert_called_once_with(cached_pars, mock_model) - minimizer._make_model.assert_called_once_with(parameters=None) + minimizer._make_model.assert_called_once_with( + parameters=None, + callback=None, + callback_every=1, + callback_on_improvement_only=False, + ) minimizer._set_parameter_fit_result.assert_called_once_with('fit', False) minimizer._gen_fit_results.assert_called_once_with('fit', 1) mock_model_function.assert_called_once_with(1.0, 2.0, 1) + def test_fit_passes_callback_to_model_builder(self, minimizer: DFO) -> None: + from easyscience import global_object + + global_object.stack.enabled = False + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._dfo_fit = MagicMock(return_value='fit') + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + cached_par = MagicMock() + cached_par.value = 1 + minimizer._cached_pars = {'mock_parm_1': cached_par} + + callback = MagicMock() + + minimizer.fit(x=1.0, y=2.0, weights=1, callback=callback) + + minimizer._make_model.assert_called_once_with( + parameters=None, + callback=callback, + callback_every=1, + callback_on_improvement_only=False, + ) + + def test_fit_wraps_supplied_model_with_explicit_callback(self, minimizer: DFO) -> None: + from easyscience import global_object + + global_object.stack.enabled = False + + supplied_model = MagicMock() + wrapped_model = MagicMock() + explicit_callback = MagicMock() + + minimizer._make_model = MagicMock() + minimizer._wrap_model_with_callback = MagicMock(return_value=wrapped_model) + minimizer._get_callback_parameter_names = MagicMock(return_value=['palpha']) + minimizer._dfo_fit = MagicMock(return_value='fit') + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + minimizer._cached_pars = {'alpha': MagicMock(value=1.0)} + + result = minimizer.fit( + x=np.array([1.0]), + y=np.array([2.0]), + weights=np.array([1.0]), + model=supplied_model, + callback=explicit_callback, + ) + + assert result == 'gen_fit_results' + minimizer._make_model.assert_not_called() + minimizer._wrap_model_with_callback.assert_called_once_with( + supplied_model, + ['palpha'], + explicit_callback, + 1, + False, + ) + minimizer._dfo_fit.assert_called_once_with(minimizer._cached_pars, wrapped_model) + + def test_fit_uses_supplied_model_without_callback(self, minimizer: DFO) -> None: + from easyscience import global_object + + global_object.stack.enabled = False + + supplied_model = MagicMock() + + minimizer._make_model = MagicMock() + minimizer._wrap_model_with_callback = MagicMock() + minimizer._dfo_fit = MagicMock(return_value='fit') + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + minimizer._cached_pars = {'alpha': MagicMock(value=1.0)} + + result = minimizer.fit( + x=np.array([1.0]), + y=np.array([2.0]), + weights=np.array([1.0]), + model=supplied_model, + ) + + assert result == 'gen_fit_results' + minimizer._make_model.assert_not_called() + minimizer._wrap_model_with_callback.assert_not_called() + minimizer._dfo_fit.assert_called_once_with(minimizer._cached_pars, supplied_model) + def test_generate_fit_function(self, minimizer: DFO) -> None: # When minimizer._original_fit_function = MagicMock(return_value='fit_function_result') @@ -143,6 +238,106 @@ def test_make_model(self, minimizer: DFO) -> None: 'pmock_parm_2': 2222, } + def test_make_model_callback(self, minimizer: DFO) -> None: + mock_fit_function = MagicMock(return_value=np.array([11, 22])) + minimizer._generate_fit_function = MagicMock(return_value=mock_fit_function) + + mock_parm_1 = MagicMock() + mock_parm_1.unique_name = 'mock_parm_1' + mock_parm_1.value = 1000.0 + mock_parm_2 = MagicMock() + mock_parm_2.unique_name = 'mock_parm_2' + mock_parm_2.value = 2000.0 + + callback = MagicMock() + + model = minimizer._make_model(parameters=[mock_parm_1, mock_parm_2], callback=callback) + residuals_for_model = model( + x=np.array([1, 2]), + y=np.array([10, 20]), + weights=np.array([1 / 100, 1 / 200]), + ) + + residuals = residuals_for_model(np.array([1111, 2222])) + + assert all(np.array([-0.01, -0.01]) == residuals) + callback.assert_called_once() + state = callback.call_args[0][0] + assert isinstance(state, DFOCallbackState) + assert state.evaluation == 1 + assert state.improved == True + assert state.objective == pytest.approx(0.0002) + assert all(state.xk == np.array([1111, 2222])) + assert all(state.residuals == np.array([-0.01, -0.01])) + assert state.parameters == { + 'pmock_parm_1': 1111.0, + 'pmock_parm_2': 2222.0, + } + assert all(state.best_xk == np.array([1111, 2222])) + assert state.best_parameters == { + 'pmock_parm_1': 1111.0, + 'pmock_parm_2': 2222.0, + } + + def test_make_model_callback_every(self, minimizer: DFO) -> None: + mock_fit_function = MagicMock(return_value=np.array([11, 22])) + minimizer._generate_fit_function = MagicMock(return_value=mock_fit_function) + + mock_parm_1 = MagicMock() + mock_parm_1.unique_name = 'mock_parm_1' + mock_parm_1.value = 1000.0 + mock_parm_2 = MagicMock() + mock_parm_2.unique_name = 'mock_parm_2' + mock_parm_2.value = 2000.0 + + callback = MagicMock() + + model = minimizer._make_model( + parameters=[mock_parm_1, mock_parm_2], + callback=callback, + callback_every=2, + ) + residuals_for_model = model( + x=np.array([1, 2]), + y=np.array([10, 20]), + weights=np.array([1 / 100, 1 / 200]), + ) + + residuals_for_model(np.array([1111, 2222])) + residuals_for_model(np.array([1222, 2333])) + + callback.assert_called_once() + state = callback.call_args[0][0] + assert state.evaluation == 2 + assert all(state.xk == np.array([1222, 2333])) + + def test_make_model_without_parameters_uses_cached_parameters(self, minimizer: DFO) -> None: + mock_fit_function = MagicMock(return_value=np.array([11.0])) + minimizer._generate_fit_function = MagicMock(return_value=mock_fit_function) + minimizer._cached_pars = {'alpha': MagicMock(value=1000.0)} + + model = minimizer._make_model() + residuals_for_model = model( + x=np.array([1.0]), + y=np.array([10.0]), + weights=np.array([0.5]), + ) + + residuals_for_model(np.array([1111.0])) + + assert mock_fit_function.call_args.kwargs == {'palpha': 1111.0} + + @pytest.mark.parametrize('callback_every', [0, 1.3]) + def test_fit_callback_every_must_be_positive(self, minimizer: DFO, callback_every) -> None: + with pytest.raises(ValueError, match='callback_every must be a positive integer'): + minimizer.fit( + x=np.array([1.0]), + y=np.array([1.0]), + weights=np.array([1.0]), + callback=MagicMock(), + callback_every=callback_every, + ) + def test_set_parameter_fit_result_no_stack_status(self, minimizer: DFO): # When minimizer._cached_pars = { @@ -261,6 +456,49 @@ def test_gen_fit_results_maxfun_warning_sets_success_false_and_warns( assert domain_fit_results.n_evaluations == 50 assert domain_fit_results.message == 'Objective has been called MAXFUN times' + def test_gen_fit_results_success_does_not_warn(self, minimizer: DFO, monkeypatch): + mock_domain_fit_results = MagicMock() + mock_FitResults = MagicMock(return_value=mock_domain_fit_results) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_dfo, 'FitResults', mock_FitResults + ) + + mock_fit_result = MagicMock() + mock_fit_result.EXIT_SUCCESS = 0 + mock_fit_result.EXIT_MAXFUN_WARNING = 1 + mock_fit_result.flag = 1 # MAXFUN_WARNING + mock_fit_result.nf = 50 + mock_fit_result.msg = 'Objective has been called MAXFUN times' + + mock_cached_model = MagicMock() + mock_cached_model.x = 'x' + mock_cached_model.y = 'y' + minimizer._cached_model = mock_cached_model + + mock_cached_par_1 = MagicMock() + mock_cached_par_1.value = 'v1' + minimizer._cached_pars = {'par_1': mock_cached_par_1} + minimizer._p_0 = 'p_0' + minimizer.evaluate = MagicMock(return_value='evaluate') + + with pytest.warns(UserWarning, match='Objective has been called MAXFUN times'): + domain_fit_results = minimizer._gen_fit_results(mock_fit_result, 'weights') + + assert domain_fit_results.success == False + assert domain_fit_results.n_evaluations == 50 + assert domain_fit_results.message == 'Objective has been called MAXFUN times' + + def test_dfo_fit_allows_maxfun_warning(self, minimizer: DFO, monkeypatch) -> None: + mock_result = MagicMock() + mock_result.EXIT_SUCCESS = 0 + mock_result.EXIT_MAXFUN_WARNING = 1 + mock_result.flag = 1 + + mock_solve = MagicMock(return_value=mock_result) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_dfo.dfols, 'solve', mock_solve + ) + def test_gen_fit_results_success_does_not_warn(self, minimizer: DFO, monkeypatch): mock_domain_fit_results = MagicMock() mock_FitResults = MagicMock(return_value=mock_domain_fit_results) @@ -407,7 +645,7 @@ def test_fit_generic_exception_resets_parameters_and_raises_fit_error( parameter values to cached originals and re-raise as FitError.""" from easyscience import global_object - global_object.stack.enabled = False + global_object.stack.enabled = True mock_model = MagicMock() mock_model_function = MagicMock(return_value=mock_model) @@ -426,6 +664,7 @@ def test_fit_generic_exception_resets_parameters_and_raises_fit_error( assert cached_par_1.value == 1.0 assert cached_par_2.value == 2.0 + assert global_object.stack.enabled is True def test_fit_fit_error_resets_parameters_and_reraises(self, minimizer: DFO) -> None: """When _dfo_fit raises FitError, fit() must reset parameter values and re-raise it.""" @@ -469,3 +708,143 @@ def test_dfo_fit_exception(self, minimizer: DFO, monkeypatch): # Then Expect with pytest.raises(FitError): minimizer._dfo_fit(pars, 'model', **kwargs) + + def test_progress_callback_creates_adapter_when_no_explicit_callback( + self, minimizer: DFO + ) -> None: + """When progress_callback is provided without an explicit callback, + fit() should auto-create a DFO callback adapter.""" + from easyscience import global_object + + global_object.stack.enabled = False + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._dfo_fit = MagicMock(return_value='fit') + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + cached_par = MagicMock() + cached_par.value = 1 + minimizer._cached_pars = {'mock_parm_1': cached_par} + + progress_cb = MagicMock() + + minimizer.fit( + x=np.array([1.0, 2.0, 3.0]), + y=np.array([1.0, 2.0, 3.0]), + weights=np.array([1.0, 1.0, 1.0]), + progress_callback=progress_cb, + ) + + # The adapter should have been passed as callback to _make_model + call_kwargs = minimizer._make_model.call_args[1] + assert call_kwargs['callback'] is not None + assert callable(call_kwargs['callback']) + + def test_progress_callback_not_used_when_explicit_callback_given(self, minimizer: DFO) -> None: + """When both progress_callback and callback are given, the explicit + callback takes precedence and progress_callback is ignored.""" + from easyscience import global_object + + global_object.stack.enabled = False + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._dfo_fit = MagicMock(return_value='fit') + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + cached_par = MagicMock() + cached_par.value = 1 + minimizer._cached_pars = {'mock_parm_1': cached_par} + + progress_cb = MagicMock() + explicit_cb = MagicMock() + + minimizer.fit( + x=np.array([1.0, 2.0, 3.0]), + y=np.array([1.0, 2.0, 3.0]), + weights=np.array([1.0, 1.0, 1.0]), + progress_callback=progress_cb, + callback=explicit_cb, + ) + + call_kwargs = minimizer._make_model.call_args[1] + assert call_kwargs['callback'] is explicit_cb + + @pytest.mark.parametrize( + ('parameters', 'expected_names'), + [ + ([MagicMock(unique_name='alpha')], ['palpha']), + (None, ['pbeta']), + ], + ) + def test_get_callback_parameter_names_optional_parameters( + self, minimizer: DFO, parameters, expected_names + ) -> None: + minimizer._cached_pars = {'beta': MagicMock(value=1.0)} + + parameter_names = minimizer._get_callback_parameter_names(parameters) + + assert parameter_names == expected_names + + def test_wrap_model_with_callback_improvement_only(self, minimizer: DFO) -> None: + callback = MagicMock() + wrapped_model = minimizer._wrap_model_with_callback( + lambda pars_values: np.asarray([pars_values[0] - 1.0]), + ['palpha'], + callback, + callback_every=1, + callback_on_improvement_only=True, + ) + + wrapped_model([0.5]) + + callback.assert_called_once() + assert callback.call_args.args[0].improved is True + + def test_prepare_kwargs_with_optional_arguments(self, minimizer: DFO) -> None: + kwargs = minimizer._prepare_kwargs(tolerance=0.05, max_evaluations=11, keep=True) + + assert kwargs == { + 'keep': True, + 'maxfun': 11, + 'rhoend': 0.05, + } + + def test_prepare_kwargs_rejects_large_tolerance(self, minimizer: DFO) -> None: + with pytest.raises(ValueError, match='Tolerance must be equal or smaller than 0.1'): + minimizer._prepare_kwargs(tolerance=0.2) + + def test_make_progress_adapter_payload_format(self) -> None: + """The adapter must produce the standard progress payload dict.""" + progress_cb = MagicMock() + dof = 5 + + adapter = DFO._make_progress_adapter(progress_cb, dof) + + state = DFOCallbackState( + evaluation=10, + xk=np.array([1.0, 2.0]), + residuals=np.array([0.1, 0.2]), + objective=0.05, + parameters={'pmock_parm_1': 1.0, 'pmock_parm_2': 2.0}, + best_xk=np.array([1.0, 2.0]), + best_objective=0.04, + best_parameters={'pmock_parm_1': 1.0, 'pmock_parm_2': 2.0}, + improved=True, + ) + + adapter(state) + + progress_cb.assert_called_once() + payload = progress_cb.call_args[0][0] + assert payload['iteration'] == 10 + assert payload['chi2'] == 0.04 # best_objective + assert payload['reduced_chi2'] == pytest.approx(0.04 / 5) + assert payload['parameter_values'] == {'mock_parm_1': 1.0, 'mock_parm_2': 2.0} + assert payload['refresh_plots'] is False + assert payload['finished'] is False diff --git a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py index 53f3ec9d..448cc7ea 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py +++ b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py @@ -130,7 +130,7 @@ def test_fit(self, minimizer: LMFit) -> None: # Expect assert result == 'gen_fit_results' mock_model.fit.assert_called_once_with( - 2.0, x=1.0, weights=1, max_nfev=None, fit_kws={}, method='leastsq' + 2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='leastsq' ) minimizer._make_model.assert_called_once_with() minimizer._set_parameter_fit_result.assert_called_once_with('fit', False) @@ -149,7 +149,7 @@ def test_fit_model(self, minimizer: LMFit) -> None: # Expect mock_model.fit.assert_called_once_with( - 2.0, x=1.0, weights=1, max_nfev=None, fit_kws={}, method='leastsq' + 2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='leastsq' ) minimizer._make_model.assert_not_called() @@ -168,7 +168,7 @@ def test_fit_method(self, minimizer: LMFit) -> None: # Expect mock_model.fit.assert_called_once_with( - 2.0, x=1.0, weights=1, max_nfev=None, fit_kws={}, method='method_passed' + 2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='method_passed' ) minimizer.supported_methods.assert_called_once_with() @@ -195,11 +195,74 @@ def test_fit_kwargs(self, minimizer: LMFit) -> None: x=1.0, weights=1, max_nfev=None, + iter_cb=None, fit_kws={'minimizer_key': 'minimizer_val'}, method='leastsq', engine_key='engine_val', ) + def test_fit_progress_callback(self, minimizer: LMFit) -> None: + # When + progress_callback = MagicMock(return_value=True) + mock_model = MagicMock() + mock_model.fit = MagicMock(return_value='fit') + minimizer._make_model = MagicMock(return_value=mock_model) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + # Then + minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=progress_callback) + + # Expect + assert mock_model.fit.call_count == 1 + iter_cb = mock_model.fit.call_args.kwargs['iter_cb'] + assert callable(iter_cb) + + def test_create_iter_callback_no_callback(self, minimizer: LMFit) -> None: + # When Then Expect + assert minimizer._create_iter_callback(None) is None + + def test_create_iter_callback_invokes_progress(self, minimizer: LMFit) -> None: + # When + progress_callback = MagicMock(return_value=False) + iter_cb = minimizer._create_iter_callback(progress_callback) + + # Then + result = iter_cb(MagicMock(), 5, np.array([1.0, -2.0])) + + # Expect — progress callback is notified, but its return value is ignored + progress_callback.assert_called_once() + assert result is False + + def test_build_progress_payload(self, minimizer: LMFit) -> None: + # When + parameter_a = MagicMock(Parameter) + parameter_a.value = 1.5 + parameter_b = MagicMock(Parameter) + parameter_b.value = 2.5 + minimizer._cached_pars = {'alpha': parameter_a, 'beta': parameter_b} + + mock_param_alpha = MagicMock() + mock_param_alpha.value = 1.0 + mock_param_alpha.vary = True + mock_param_beta = MagicMock() + mock_param_beta.value = 2.0 + mock_param_beta.vary = False + params = {'palpha': mock_param_alpha, 'pbeta': mock_param_beta} + + # Then + payload = minimizer._build_progress_payload(params, 7, np.array([3.0, 4.0])) + + # Expect + assert payload == { + 'iteration': 7, + 'chi2': 25.0, + 'reduced_chi2': 25.0, + 'parameter_values': {'alpha': 1.0, 'beta': 2.0}, + 'refresh_plots': False, + 'finished': False, + } + def test_fit_exception(self, minimizer: LMFit) -> None: # When minimizer._make_model = MagicMock(side_effect=Exception('Exception')) diff --git a/tests/unit/fitting/test_fitter.py b/tests/unit/fitting/test_fitter.py index 73f5a12a..702f5e59 100644 --- a/tests/unit/fitting/test_fitter.py +++ b/tests/unit/fitting/test_fitter.py @@ -212,6 +212,31 @@ def test_fit(self, fitter: Fitter): assert fitter._dependent_dims == 'dims' assert fitter._fit_function == self.mock_fit_function + def test_fit_progress_callback(self, fitter: Fitter): + # When + fitter._precompute_reshaping = MagicMock( + return_value=('x_fit', 'x_new', 'y_new', 'weights', 'dims') + ) + fitter._fit_function_wrapper = MagicMock(return_value='wrapped_fit_function') + fitter._post_compute_reshaping = MagicMock(return_value='fit_result') + fitter._minimizer = MagicMock() + fitter._minimizer.fit = MagicMock(return_value='result') + progress_callback = MagicMock() + + # Then + result = fitter.fit('x', 'y', 'weights', 'vectorized', progress_callback=progress_callback) + + # Expect + assert result == 'fit_result' + fitter._minimizer.fit.assert_called_once_with( + 'x_fit', + 'y_new', + weights='weights', + tolerance=None, + max_evaluations=None, + progress_callback=progress_callback, + ) + def test_post_compute_reshaping(self, fitter: Fitter): # When fit_result = MagicMock() diff --git a/tests/unit/fitting/test_multi_fitter.py b/tests/unit/fitting/test_multi_fitter.py new file mode 100644 index 00000000..a54a927b --- /dev/null +++ b/tests/unit/fitting/test_multi_fitter.py @@ -0,0 +1,63 @@ +# SPDX-FileCopyrightText: 2026 EasyScience contributors +# SPDX-License-Identifier: BSD-3-Clause + +from unittest.mock import MagicMock + +import pytest + +from easyscience import ObjBase +from easyscience import Parameter +from easyscience.fitting.fitter import Fitter +from easyscience.fitting.multi_fitter import MultiFitter + + +class Line(ObjBase): + m: Parameter + c: Parameter + + def __init__(self, m_val: float, c_val: float): + m = Parameter('m', m_val) + c = Parameter('c', c_val) + super().__init__('line', m=m, c=c) + + def __call__(self, x): + return self.m.value * x + self.c.value + + +class TestMultiFitter: + @pytest.fixture + def multi_fitter(self, monkeypatch): + monkeypatch.setattr(Fitter, '_update_minimizer', MagicMock()) + fit_object_1 = Line(1.0, 0.5) + fit_object_2 = Line(2.0, 1.5) + return MultiFitter([fit_object_1, fit_object_2], [fit_object_1, fit_object_2]) + + def test_fit_progress_callback(self, multi_fitter: MultiFitter): + # When + multi_fitter._precompute_reshaping = MagicMock( + return_value=('x_fit', 'x_new', 'y_new', 'weights', 'dims') + ) + multi_fitter._fit_function_wrapper = MagicMock(return_value='wrapped_fit_function') + multi_fitter._post_compute_reshaping = MagicMock(return_value='fit_result') + multi_fitter._minimizer = MagicMock() + multi_fitter._minimizer.fit = MagicMock(return_value='result') + progress_callback = MagicMock() + + # Then + result = multi_fitter.fit( + ['x_1', 'x_2'], + ['y_1', 'y_2'], + ['weights_1', 'weights_2'], + progress_callback=progress_callback, + ) + + # Expect + assert result == 'fit_result' + multi_fitter._minimizer.fit.assert_called_once_with( + 'x_fit', + 'y_new', + weights='weights', + tolerance=None, + max_evaluations=None, + progress_callback=progress_callback, + )