chore: cleanup some code

This commit is contained in:
Matthias
2025-04-23 20:07:40 +02:00
parent 057cc2538e
commit ca5ccc8799
2 changed files with 3 additions and 21 deletions

View File

@@ -306,8 +306,6 @@ class Hyperopt:
asked, is_random = self.get_asked_points(
n_points=current_jobs, dimensions=self.hyperopter.o_dimensions
)
# asked_params = [asked1.params for asked1 in asked]
# logger.info(f"asked iteration {i}: {asked} {asked_params}")
f_val = self.run_optimizer_parallel(
parallel,
@@ -317,7 +315,6 @@ class Hyperopt:
f_val_loss = [v["loss"] for v in f_val]
for o_ask, v in zip(asked, f_val_loss, strict=False):
self.opt.tell(o_ask, v)
# logger.info(f"result iteration {i}: {asked} {f_val_loss}")
for j, val in enumerate(f_val):
# Use human-friendly indexes here (starting from 1)

View File

@@ -147,11 +147,8 @@ class HyperOptimizer:
self.hyperopt_pickle_magic(modules.__bases__)
def _get_params_dict(
self,
dimensions: list[DimensionProtocol],
raw_params: dict[str, Any],
self, dimensions: list[DimensionProtocol], raw_params: dict[str, Any]
) -> dict[str, Any]:
# logger.info(f"_get_params_dict: {raw_params}")
# Ensure the number of dimensions match
# the number of parameters in the list.
if len(raw_params) != len(dimensions):
@@ -159,9 +156,6 @@ class HyperOptimizer:
# Return a dict where the keys are the names of the dimensions
# and the values are taken from the list of parameters.
# result = {d.name: v for d, v in zip(dimensions, raw_params, strict=False)}
# logger.info(f"d_get_params_dict: {result}")
# return {d.name: v for d, v in zip(dimensions, raw_params.params, strict=False)}
return raw_params
def _get_params_details(self, params: dict) -> dict:
@@ -273,12 +267,9 @@ class HyperOptimizer:
# noinspection PyProtectedMember
attr.value = params_dict[attr_name]
# @profile
# fp=open('memory_profiler.log','w+')
# @profile(stream=fp)
def generate_optimizer(
self, backtesting: Backtesting, raw_params: dict[str, Any]
) -> dict[str, Any]: # list[Any]
) -> dict[str, Any]:
"""
Used Optimize function.
Called once per epoch to optimize whatever is configured.
@@ -330,7 +321,7 @@ class HyperOptimizer:
backtesting.strategy.max_open_trades = updated_max_open_trades
with Path(self.data_pickle_file).open("rb") as f:
with self.data_pickle_file.open("rb") as f:
processed = load(f, mmap_mode="r")
if self.analyze_per_epoch:
# Data is not yet analyzed, rerun populate_indicators.
@@ -419,7 +410,6 @@ class HyperOptimizer:
f"Unknown search space {original_dim.name} - {original_dim} / \
{type(original_dim)}"
)
# logger.info(f"convert_dimensions_to_optuna_space: {s_dimensions} - {o_dimensions}")
return o_dimensions
def get_optimizer(
@@ -431,11 +421,6 @@ class HyperOptimizer:
)
self.o_dimensions = self.convert_dimensions_to_optuna_space(self.dimensions)
# for save/restore
# with open("sampler.pkl", "wb") as fout:
# pickle.dump(study.sampler, fout)
# restored_sampler = pickle.load(open("sampler.pkl", "rb"))
if isinstance(o_sampler, str):
if o_sampler not in optuna_samplers_dict.keys():
raise OperationalException(f"Optuna Sampler {o_sampler} not supported.")