From 1335d9c3ce29d563070a6359656b1a161b554898 Mon Sep 17 00:00:00 2001 From: James Krach <69264125+jameskrach@users.noreply.github.com> Date: Thu, 10 Jul 2025 18:25:33 -0400 Subject: [PATCH] Allow saving and loading optimizer state without probes/registrations --- bayes_opt/bayesian_optimization.py | 15 ++------------- tests/test_bayesian_optimization.py | 14 ++++++++------ 2 files changed, 10 insertions(+), 19 deletions(-) diff --git a/bayes_opt/bayesian_optimization.py b/bayes_opt/bayesian_optimization.py index 0df53b6d0..8afaedaff 100644 --- a/bayes_opt/bayesian_optimization.py +++ b/bayes_opt/bayesian_optimization.py @@ -352,19 +352,7 @@ def save_state(self, path: str | PathLike[str]) -> None: ---------- path : str or PathLike Path to save the optimization state - - Raises - ------ - ValueError - If attempting to save state before collecting any samples. """ - if len(self._space) == 0: - msg = ( - "Cannot save optimizer state before collecting any samples. " - "Please probe or register at least one point before saving." - ) - raise ValueError(msg) - random_state = None if self._random_state is not None: state_tuple = self._random_state.get_state() @@ -443,7 +431,8 @@ def load_state(self, path: str | PathLike[str]) -> None: # Set the GP parameters self.set_gp_params(**gp_params) - self._gp.fit(self._space.params, self._space.target) + if len(self._space): + self._gp.fit(self._space.params, self._space.target) if state["random_state"] is not None: random_state_tuple = ( diff --git a/tests/test_bayesian_optimization.py b/tests/test_bayesian_optimization.py index 158b7478e..e1d39b31d 100644 --- a/tests/test_bayesian_optimization.py +++ b/tests/test_bayesian_optimization.py @@ -372,14 +372,16 @@ def test_save_load_unused_optimizer(tmp_path): """Test saving and loading optimizer state with unused optimizer.""" optimizer = BayesianOptimization(f=target_func, pbounds=PBOUNDS, random_state=1, verbose=0) - # Test that saving without samples raises an error - with pytest.raises(ValueError, match="Cannot save optimizer state before collecting any samples"): - optimizer.save_state(tmp_path / "optimizer_state.json") + # Test that saving without samples does not raise an error + optimizer.save_state(tmp_path / "unprobed_optimizer_state.json") - # Add a sample point - optimizer.probe(params={"p1": 1, "p2": 2}, lazy=False) + # Check that we load the original state + first_suggestion = optimizer.suggest() + optimizer.load_state(tmp_path / "unprobed_optimizer_state.json") + assert optimizer.suggest() == first_suggestion - # Now saving should work + # Save an optimizer state with a probed point + optimizer.probe(params={"p1": 1, "p2": 2}, lazy=False) optimizer.save_state(tmp_path / "optimizer_state.json") new_optimizer = BayesianOptimization(f=target_func, pbounds=PBOUNDS, random_state=1, verbose=0)