fix increasing memory usage.

This commit is contained in:
viotemp1
2025-04-02 18:45:49 +03:00
parent 3fcf6559ab
commit 90aaaa50fc
3 changed files with 65 additions and 41 deletions

View File

@@ -4,6 +4,7 @@
This module contains the hyperopt logic
"""
import gc
import logging
import random
from datetime import datetime
@@ -19,6 +20,7 @@ from freqtrade.constants import FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config
from freqtrade.enums import HyperoptState
from freqtrade.exceptions import OperationalException
from freqtrade.misc import file_dump_json, plural
from freqtrade.optimize.backtesting import Backtesting
from freqtrade.optimize.hyperopt.hyperopt_logger import logging_mp_handle, logging_mp_setup
from freqtrade.optimize.hyperopt.hyperopt_optimizer import HyperOptimizer
from freqtrade.optimize.hyperopt.hyperopt_output import HyperoptOutput
@@ -30,6 +32,9 @@ from freqtrade.optimize.hyperopt_tools import (
from freqtrade.util import get_progress_tracker
# import multiprocessing as mp
# mp.set_start_method('fork', force=True) # spawn fork forkserver
logger = logging.getLogger(__name__)
@@ -90,6 +95,7 @@ class Hyperopt:
self.print_json = self.config.get("print_json", False)
self.hyperopter = HyperOptimizer(self.config)
self.hyperopter.data_pickle_file = self.data_pickle_file
@staticmethod
def get_lock_filename(config: Config) -> str:
@@ -146,7 +152,9 @@ class Hyperopt:
self.print_all,
)
def run_optimizer_parallel(self, parallel: Parallel, asked: list[list]) -> list[dict[str, Any]]:
def run_optimizer_parallel(
self, parallel: Parallel, backtesting: Backtesting, asked: list[list]
) -> list[dict[str, Any]]:
"""Start optimizer in a parallel way"""
def optimizer_wrapper(*args, **kwargs):
@@ -157,7 +165,9 @@ class Hyperopt:
return self.hyperopter.generate_optimizer(*args, **kwargs)
return parallel(delayed(wrap_non_picklable_objects(optimizer_wrapper))(v) for v in asked)
return parallel(
delayed(wrap_non_picklable_objects(optimizer_wrapper))(backtesting, v) for v in asked
)
def _set_random_state(self, random_state: int | None) -> int:
return random_state or random.randint(1, 2**16 - 1) # noqa: S311
@@ -282,7 +292,9 @@ class Hyperopt:
asked, is_random = self.get_asked_points(
n_points=1, dimensions=self.hyperopter.o_dimensions
)
f_val0 = self.hyperopter.generate_optimizer(asked[0].params)
f_val0 = self.hyperopter.generate_optimizer(
self.hyperopter.backtesting, asked[0].params
)
self.opt.tell(asked[0], [f_val0["loss"]])
self.evaluate_result(f_val0, 1, is_random[0])
pbar.update(task, advance=1)
@@ -299,13 +311,17 @@ class Hyperopt:
n_points=current_jobs, dimensions=self.hyperopter.o_dimensions
)
# asked_params = [asked1.params for asked1 in asked]
# logger.info(f"asked iteration {i}: {asked_params}")
# logger.info(f"asked iteration {i}: {asked} {asked_params}")
f_val = self.run_optimizer_parallel(
parallel, [asked1.params for asked1 in asked]
parallel,
self.hyperopter.backtesting,
[asked1.params for asked1 in asked],
)
for o_ask, v in zip(asked, f_val, strict=False):
self.opt.tell(o_ask, v["loss"])
# self.opt.tell(asked, [v["loss"] for v in f_val])
f_val_loss = [v["loss"] for v in f_val]
for o_ask, v in zip(asked, f_val_loss, strict=False):
self.opt.tell(o_ask, v)
# logger.info(f"result iteration {i}: {asked} {f_val_loss}")
for j, val in enumerate(f_val):
# Use human-friendly indexes here (starting from 1)
@@ -314,6 +330,7 @@ class Hyperopt:
self.evaluate_result(val, current, is_random[j])
pbar.update(task, advance=1)
logging_mp_handle(log_queue)
gc.collect()
except KeyboardInterrupt:
print("User interrupted..")

View File

@@ -7,12 +7,14 @@ import logging
import sys
import warnings
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
from joblib import dump, load
from joblib.externals import cloudpickle
from pandas import DataFrame
# from memory_profiler import profile
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
from freqtrade.data.converter import trim_dataframes
from freqtrade.data.history import get_timerange
@@ -31,10 +33,12 @@ from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
from freqtrade.util.dry_run_wallet import get_dry_run_wallet
# Suppress scikit-learn FutureWarnings from skopt
# Suppress optuna ExperimentalWarning from skopt
with warnings.catch_warnings():
from optuna.exceptions import ExperimentalWarning
warnings.filterwarnings("ignore", category=FutureWarning)
# from skopt import Optimizer
# warnings.filterwarnings("ignore", category=ExperimentalWarning)
import optuna
from skopt.space import Dimension
@@ -102,12 +106,8 @@ class HyperOptimizer:
self.config
)
self.calculate_loss = self.custom_hyperoptloss.hyperopt_loss_function
self.data_pickle_file = (
self.config["user_data_dir"] / "hyperopt_results" / "hyperopt_tickerdata.pkl"
)
self.market_change = 0.0
self.data_pickle_file = ""
if HyperoptTools.has_space(self.config, "sell"):
# Make sure use_exit_signal is enabled
@@ -260,16 +260,23 @@ class HyperOptimizer:
+ self.max_open_trades_space
)
def assign_params(self, params_dict: dict[str, Any], category: str) -> None:
def assign_params(
self, backtesting: Backtesting, params_dict: dict[str, Any], category: str
) -> None:
"""
Assign hyperoptable parameters
"""
for attr_name, attr in self.backtesting.strategy.enumerate_parameters(category):
for attr_name, attr in backtesting.strategy.enumerate_parameters(category):
if attr.optimize:
# noinspection PyProtectedMember
attr.value = params_dict[attr_name]
def generate_optimizer(self, raw_params: dict[str, Any]) -> dict[str, Any]: # list[Any]
# @profile
# fp=open('memory_profiler.log','w+')
# @profile(stream=fp)
def generate_optimizer(
self, backtesting: Backtesting, raw_params: dict[str, Any]
) -> dict[str, Any]: # list[Any]
"""
Used Optimize function.
Called once per epoch to optimize whatever is configured.
@@ -281,30 +288,26 @@ class HyperOptimizer:
# Apply parameters
if HyperoptTools.has_space(self.config, "buy"):
self.assign_params(params_dict, "buy")
self.assign_params(backtesting, params_dict, "buy")
if HyperoptTools.has_space(self.config, "sell"):
self.assign_params(params_dict, "sell")
self.assign_params(backtesting, params_dict, "sell")
if HyperoptTools.has_space(self.config, "protection"):
self.assign_params(params_dict, "protection")
self.assign_params(backtesting, params_dict, "protection")
if HyperoptTools.has_space(self.config, "roi"):
self.backtesting.strategy.minimal_roi = self.custom_hyperopt.generate_roi_table(
params_dict
)
backtesting.strategy.minimal_roi = self.custom_hyperopt.generate_roi_table(params_dict)
if HyperoptTools.has_space(self.config, "stoploss"):
self.backtesting.strategy.stoploss = params_dict["stoploss"]
backtesting.strategy.stoploss = params_dict["stoploss"]
if HyperoptTools.has_space(self.config, "trailing"):
d = self.custom_hyperopt.generate_trailing_params(params_dict)
self.backtesting.strategy.trailing_stop = d["trailing_stop"]
self.backtesting.strategy.trailing_stop_positive = d["trailing_stop_positive"]
self.backtesting.strategy.trailing_stop_positive_offset = d[
"trailing_stop_positive_offset"
]
self.backtesting.strategy.trailing_only_offset_is_reached = d[
backtesting.strategy.trailing_stop = d["trailing_stop"]
backtesting.strategy.trailing_stop_positive = d["trailing_stop_positive"]
backtesting.strategy.trailing_stop_positive_offset = d["trailing_stop_positive_offset"]
backtesting.strategy.trailing_only_offset_is_reached = d[
"trailing_only_offset_is_reached"
]
@@ -323,15 +326,15 @@ class HyperOptimizer:
self.config.update({"max_open_trades": updated_max_open_trades})
self.backtesting.strategy.max_open_trades = updated_max_open_trades
backtesting.strategy.max_open_trades = updated_max_open_trades
with self.data_pickle_file.open("rb") as f:
with Path(self.data_pickle_file).open("rb") as f:
processed = load(f, mmap_mode="r")
if self.analyze_per_epoch:
# Data is not yet analyzed, rerun populate_indicators.
processed = self.advise_and_trim(processed)
if self.analyze_per_epoch:
# Data is not yet analyzed, rerun populate_indicators.
processed = self.advise_and_trim(processed)
bt_results = self.backtesting.backtest(
bt_results = backtesting.backtest(
processed=processed, start_date=self.min_date, end_date=self.max_date
)
backtest_end_time = datetime.now(timezone.utc)
@@ -341,10 +344,10 @@ class HyperOptimizer:
"backtest_end_time": int(backtest_end_time.timestamp()),
}
)
return self._get_results_dict(
result = self._get_results_dict(
bt_results, self.min_date, self.max_date, params_dict, processed=processed
)
return result
def _get_results_dict(
self,
@@ -443,7 +446,9 @@ class HyperOptimizer:
if isinstance(o_sampler, str):
if o_sampler not in optuna_samplers_dict.keys():
raise OperationalException(f"Optuna Sampler {o_sampler} not supported.")
sampler = optuna_samplers_dict[o_sampler](seed=random_state)
with warnings.catch_warnings():
warnings.filterwarnings(action="ignore", category=ExperimentalWarning)
sampler = optuna_samplers_dict[o_sampler](seed=random_state)
else:
sampler = o_sampler

View File

@@ -608,7 +608,9 @@ def test_generate_optimizer(mocker, hyperopt_conf) -> None:
hyperopt.hyperopter.min_date = dt_utc(2017, 12, 10)
hyperopt.hyperopter.max_date = dt_utc(2017, 12, 13)
hyperopt.hyperopter.init_spaces()
generate_optimizer_value = hyperopt.hyperopter.generate_optimizer(optimizer_param)
generate_optimizer_value = hyperopt.hyperopter.generate_optimizer(
hyperopt.hyperopter.backtesting, optimizer_param
)
# list(optimizer_param.values())
assert generate_optimizer_value == response_expected