diff --git a/freqtrade/optimize/hyperopt/hyperopt.py b/freqtrade/optimize/hyperopt/hyperopt.py index 3eecb3128..acc1be7f2 100644 --- a/freqtrade/optimize/hyperopt/hyperopt.py +++ b/freqtrade/optimize/hyperopt/hyperopt.py @@ -14,7 +14,8 @@ from pathlib import Path from typing import Any import rapidjson -from joblib import Parallel, cpu_count, delayed, wrap_non_picklable_objects +from joblib import Parallel, cpu_count +from inspect import unwrap from freqtrade.constants import FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config from freqtrade.enums import HyperoptState @@ -158,7 +159,8 @@ class Hyperopt: return self.hyperopter.generate_optimizer(*args, **kwargs) - return parallel(delayed(wrap_non_picklable_objects(optimizer_wrapper))(v) for v in asked) + # return parallel(delayed(wrap_non_picklable_objects(optimizer_wrapper))(v) for v in asked) + return parallel(optimizer_wrapper(v) for v in asked) def _set_random_state(self, random_state: int | None) -> int: return random_state or random.randint(1, 2**16 - 1) # noqa: S311 @@ -283,7 +285,7 @@ class Hyperopt: asked, is_random = self.get_asked_points( n_points=1, dimensions=self.hyperopter.o_dimensions ) - f_val0 = self.hyperopter.generate_optimizer(asked[0].params) + f_val0 = unwrap(self.hyperopter.generate_optimizer)(asked[0].params) self.opt.tell(asked[0], [f_val0["loss"]]) self.evaluate_result(f_val0, 1, is_random[0]) pbar.update(task, advance=1) diff --git a/freqtrade/optimize/hyperopt/hyperopt_optimizer.py b/freqtrade/optimize/hyperopt/hyperopt_optimizer.py index 33b6a9ba1..76284d330 100644 --- a/freqtrade/optimize/hyperopt/hyperopt_optimizer.py +++ b/freqtrade/optimize/hyperopt/hyperopt_optimizer.py @@ -11,7 +11,7 @@ from pathlib import Path from typing import Any import optuna -from joblib import dump, load +from joblib import dump, load, delayed, wrap_non_picklable_objects from joblib.externals import cloudpickle from optuna.exceptions import ExperimentalWarning from pandas import DataFrame @@ -248,6 +248,8 @@ class HyperOptimizer: # noinspection PyProtectedMember attr.value = params_dict[attr_name] + @delayed + @wrap_non_picklable_objects def generate_optimizer(self, raw_params: dict[str, Any]) -> dict[str, Any]: """ Used Optimize function. diff --git a/tests/optimize/test_hyperopt.py b/tests/optimize/test_hyperopt.py index 76825cc03..97bd989bb 100644 --- a/tests/optimize/test_hyperopt.py +++ b/tests/optimize/test_hyperopt.py @@ -605,7 +605,7 @@ def test_generate_optimizer(mocker, hyperopt_conf) -> None: hyperopt.hyperopter.min_date = dt_utc(2017, 12, 10) hyperopt.hyperopter.max_date = dt_utc(2017, 12, 13) hyperopt.hyperopter.init_spaces() - generate_optimizer_value = hyperopt.hyperopter.generate_optimizer(optimizer_param) + generate_optimizer_value = hyperopt.hyperopter.generate_optimizer._obj(hyperopt.hyperopter, raw_params=optimizer_param) assert generate_optimizer_value == response_expected