remove backtesting from generate_optimizer

This commit is contained in:
viotemp1
2025-04-27 21:26:06 +03:00
parent fb64ac942b
commit 8d0ca7f5c1
3 changed files with 21 additions and 20 deletions

View File

@@ -20,7 +20,6 @@ from freqtrade.constants import FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config
from freqtrade.enums import HyperoptState from freqtrade.enums import HyperoptState
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
from freqtrade.misc import file_dump_json, plural from freqtrade.misc import file_dump_json, plural
from freqtrade.optimize.backtesting import Backtesting
from freqtrade.optimize.hyperopt.hyperopt_logger import logging_mp_handle, logging_mp_setup from freqtrade.optimize.hyperopt.hyperopt_logger import logging_mp_handle, logging_mp_setup
from freqtrade.optimize.hyperopt.hyperopt_optimizer import HyperOptimizer from freqtrade.optimize.hyperopt.hyperopt_optimizer import HyperOptimizer
from freqtrade.optimize.hyperopt.hyperopt_output import HyperoptOutput from freqtrade.optimize.hyperopt.hyperopt_output import HyperoptOutput
@@ -149,7 +148,7 @@ class Hyperopt:
) )
def run_optimizer_parallel( def run_optimizer_parallel(
self, parallel: Parallel, backtesting: Backtesting, asked: list[list] self, parallel: Parallel, asked: list[list]
) -> list[dict[str, Any]]: ) -> list[dict[str, Any]]:
"""Start optimizer in a parallel way""" """Start optimizer in a parallel way"""
@@ -162,7 +161,7 @@ class Hyperopt:
return self.hyperopter.generate_optimizer(*args, **kwargs) return self.hyperopter.generate_optimizer(*args, **kwargs)
return parallel( return parallel(
delayed(wrap_non_picklable_objects(optimizer_wrapper))(backtesting, v) for v in asked delayed(wrap_non_picklable_objects(optimizer_wrapper))(v) for v in asked
) )
def _set_random_state(self, random_state: int | None) -> int: def _set_random_state(self, random_state: int | None) -> int:
@@ -289,7 +288,7 @@ class Hyperopt:
n_points=1, dimensions=self.hyperopter.o_dimensions n_points=1, dimensions=self.hyperopter.o_dimensions
) )
f_val0 = self.hyperopter.generate_optimizer( f_val0 = self.hyperopter.generate_optimizer(
self.hyperopter.backtesting, asked[0].params asked[0].params
) )
self.opt.tell(asked[0], [f_val0["loss"]]) self.opt.tell(asked[0], [f_val0["loss"]])
self.evaluate_result(f_val0, 1, is_random[0]) self.evaluate_result(f_val0, 1, is_random[0])
@@ -309,7 +308,7 @@ class Hyperopt:
f_val = self.run_optimizer_parallel( f_val = self.run_optimizer_parallel(
parallel, parallel,
self.hyperopter.backtesting, # self.hyperopter.backtesting,
[asked1.params for asked1 in asked], [asked1.params for asked1 in asked],
) )
f_val_loss = [v["loss"] for v in f_val] f_val_loss = [v["loss"] for v in f_val]

View File

@@ -267,9 +267,7 @@ class HyperOptimizer:
# noinspection PyProtectedMember # noinspection PyProtectedMember
attr.value = params_dict[attr_name] attr.value = params_dict[attr_name]
def generate_optimizer( def generate_optimizer(self, raw_params: dict[str, Any]) -> dict[str, Any]:
self, backtesting: Backtesting, raw_params: dict[str, Any]
) -> dict[str, Any]:
""" """
Used Optimize function. Used Optimize function.
Called once per epoch to optimize whatever is configured. Called once per epoch to optimize whatever is configured.
@@ -281,26 +279,30 @@ class HyperOptimizer:
# Apply parameters # Apply parameters
if HyperoptTools.has_space(self.config, "buy"): if HyperoptTools.has_space(self.config, "buy"):
self.assign_params(backtesting, params_dict, "buy") self.assign_params(self.backtesting, params_dict, "buy")
if HyperoptTools.has_space(self.config, "sell"): if HyperoptTools.has_space(self.config, "sell"):
self.assign_params(backtesting, params_dict, "sell") self.assign_params(self.backtesting, params_dict, "sell")
if HyperoptTools.has_space(self.config, "protection"): if HyperoptTools.has_space(self.config, "protection"):
self.assign_params(backtesting, params_dict, "protection") self.assign_params(self.backtesting, params_dict, "protection")
if HyperoptTools.has_space(self.config, "roi"): if HyperoptTools.has_space(self.config, "roi"):
backtesting.strategy.minimal_roi = self.custom_hyperopt.generate_roi_table(params_dict) self.backtesting.strategy.minimal_roi = self.custom_hyperopt.generate_roi_table(
params_dict
)
if HyperoptTools.has_space(self.config, "stoploss"): if HyperoptTools.has_space(self.config, "stoploss"):
backtesting.strategy.stoploss = params_dict["stoploss"] self.backtesting.strategy.stoploss = params_dict["stoploss"]
if HyperoptTools.has_space(self.config, "trailing"): if HyperoptTools.has_space(self.config, "trailing"):
d = self.custom_hyperopt.generate_trailing_params(params_dict) d = self.custom_hyperopt.generate_trailing_params(params_dict)
backtesting.strategy.trailing_stop = d["trailing_stop"] self.backtesting.strategy.trailing_stop = d["trailing_stop"]
backtesting.strategy.trailing_stop_positive = d["trailing_stop_positive"] self.backtesting.strategy.trailing_stop_positive = d["trailing_stop_positive"]
backtesting.strategy.trailing_stop_positive_offset = d["trailing_stop_positive_offset"] self.backtesting.strategy.trailing_stop_positive_offset = d[
backtesting.strategy.trailing_only_offset_is_reached = d[ "trailing_stop_positive_offset"
]
self.backtesting.strategy.trailing_only_offset_is_reached = d[
"trailing_only_offset_is_reached" "trailing_only_offset_is_reached"
] ]
@@ -319,7 +321,7 @@ class HyperOptimizer:
self.config.update({"max_open_trades": updated_max_open_trades}) self.config.update({"max_open_trades": updated_max_open_trades})
backtesting.strategy.max_open_trades = updated_max_open_trades self.backtesting.strategy.max_open_trades = updated_max_open_trades
with self.data_pickle_file.open("rb") as f: with self.data_pickle_file.open("rb") as f:
processed = load(f, mmap_mode="r") processed = load(f, mmap_mode="r")
@@ -327,7 +329,7 @@ class HyperOptimizer:
# Data is not yet analyzed, rerun populate_indicators. # Data is not yet analyzed, rerun populate_indicators.
processed = self.advise_and_trim(processed) processed = self.advise_and_trim(processed)
bt_results = backtesting.backtest( bt_results = self.backtesting.backtest(
processed=processed, start_date=self.min_date, end_date=self.max_date processed=processed, start_date=self.min_date, end_date=self.max_date
) )
backtest_end_time = datetime.now(timezone.utc) backtest_end_time = datetime.now(timezone.utc)

View File

@@ -606,7 +606,7 @@ def test_generate_optimizer(mocker, hyperopt_conf) -> None:
hyperopt.hyperopter.max_date = dt_utc(2017, 12, 13) hyperopt.hyperopter.max_date = dt_utc(2017, 12, 13)
hyperopt.hyperopter.init_spaces() hyperopt.hyperopter.init_spaces()
generate_optimizer_value = hyperopt.hyperopter.generate_optimizer( generate_optimizer_value = hyperopt.hyperopter.generate_optimizer(
hyperopt.hyperopter.backtesting, optimizer_param optimizer_param
) )
assert generate_optimizer_value == response_expected assert generate_optimizer_value == response_expected