diff --git a/bayes_opt/logger.py b/bayes_opt/logger.py index 69f40d19..7198febf 100644 --- a/bayes_opt/logger.py +++ b/bayes_opt/logger.py @@ -271,7 +271,18 @@ def log_optimization_step( result : dict[str, Any] The result dictionary for the most recent step. - params_config : Mapping[str, ParamsType] + # Read current data + with self._path.open("r") as f: + fileData = json.load(f) + + # Append next data point + fileData.append(data) + + # Writes content back to a file + with self._path.open("w") as f: + json.dumps(fileData) + + params_config: Mapping[str, ParamsType] The configuration to map the key to the parameter for correct formatting. current_max : dict[str, Any] | None diff --git a/bayes_opt/util.py b/bayes_opt/util.py index ea68889f..5a7edf63 100644 --- a/bayes_opt/util.py +++ b/bayes_opt/util.py @@ -2,8 +2,58 @@ from __future__ import annotations +import json +from collections.abc import Iterable +from os import PathLike +from pathlib import Path + import numpy as np +# from bayes_opt.bayesian_optimization import BayesianOptimization + + +def load_logs( + optimizer: BayesianOptimization, logs: str | PathLike[str] | Iterable[str | PathLike[str]] +) -> BayesianOptimization: + """Load previous ... + + Parameters + ---------- + optimizer : BayesianOptimizer + Optimizer the register the previous observations with. + + logs : str or os.PathLike + File to load the logs from. + + Returns + ------- + The optimizer with the state loaded. + + """ + if isinstance(logs, (str, PathLike)): + logs = [logs] + + for log in logs: + try: + with Path(log).open("r") as fil: + fileData = json.load(fil) + except json.JSONDecodeError: + print(f"ERROR: JSON decode error when decoding '{log}'") + continue + + for iteration in fileData: + # Prevents duplicate points being registered when an exception can be raised + if not optimizer._allow_duplicate_points and iteration["params"] in optimizer: + continue + + optimizer.register( + params=iteration["params"], + target=iteration["target"], + constraint_value=(iteration["constraint"] if optimizer.is_constrained else None), + ) + + return optimizer + def ensure_rng(random_state: int | np.random.RandomState | None = None) -> np.random.RandomState: """Create a random number generator based on an optional seed.