fix: move logging setup to hyperopter

restores hyperopt logging functionality
This commit is contained in:
Matthias
2025-10-19 09:09:59 +02:00
parent a0c4b520fc
commit be16ffea23
2 changed files with 25 additions and 25 deletions

View File

@@ -9,7 +9,6 @@ import logging
import random
from datetime import datetime
from math import ceil
from multiprocessing import Manager
from pathlib import Path
from typing import Any
@@ -21,7 +20,6 @@ from freqtrade.constants import FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config
from freqtrade.enums import HyperoptState
from freqtrade.exceptions import OperationalException
from freqtrade.misc import file_dump_json, plural
from freqtrade.optimize.hyperopt.hyperopt_logger import logging_mp_handle, logging_mp_setup
from freqtrade.optimize.hyperopt.hyperopt_optimizer import INITIAL_POINTS, HyperOptimizer
from freqtrade.optimize.hyperopt.hyperopt_output import HyperoptOutput
from freqtrade.optimize.hyperopt_tools import (
@@ -35,9 +33,6 @@ from freqtrade.util import get_progress_tracker
logger = logging.getLogger(__name__)
log_queue: Any
class Hyperopt:
"""
Hyperopt class, this class contains all the logic to run a hyperopt simulation
@@ -149,15 +144,7 @@ class Hyperopt:
def run_optimizer_parallel(self, parallel: Parallel, asked: list[list]) -> list[dict[str, Any]]:
"""Start optimizer in a parallel way"""
def optimizer_wrapper(*args, **kwargs):
# global log queue. This must happen in the file that initializes Parallel
logging_mp_setup(
log_queue, logging.INFO if self.config["verbosity"] < 1 else logging.DEBUG
)
return self.hyperopter.generate_optimizer_wrapped(*args, **kwargs)
return parallel(optimizer_wrapper(v) for v in asked)
return parallel(self.hyperopter.generate_optimizer_wrapped(v) for v in asked)
def _set_random_state(self, random_state: int | None) -> int:
return random_state or random.randint(1, 2**16 - 1) # noqa: S311
@@ -236,15 +223,6 @@ class Hyperopt:
self._save_result(val)
def _setup_logging_mp_workaround(self) -> None:
"""
Workaround for logging in child processes.
local_queue must be a global in the file that initializes Parallel.
"""
global log_queue
m = Manager()
log_queue = m.Queue()
def start(self) -> None:
self.random_state = self._set_random_state(self.config.get("hyperopt_random_state"))
logger.info(f"Using optimizer random state: {self.random_state}")
@@ -257,7 +235,6 @@ class Hyperopt:
logger.info(f"Number of parallel jobs set as: {config_jobs}")
self.opt = self.hyperopter.get_optimizer(self.random_state)
self._setup_logging_mp_workaround()
try:
with Parallel(n_jobs=config_jobs) as parallel:
jobs = parallel._effective_n_jobs()
@@ -307,7 +284,7 @@ class Hyperopt:
self.evaluate_result(val, current, is_random[j])
pbar.update(task, advance=1)
logging_mp_handle(log_queue)
self.hyperopter.handle_mp_logging()
gc.collect()
if (

View File

@@ -7,6 +7,7 @@ import logging
import sys
import warnings
from datetime import UTC, datetime
from multiprocessing import Manager
from pathlib import Path
from typing import Any
@@ -29,6 +30,7 @@ from freqtrade.optimize.backtesting import Backtesting
# Import IHyperOptLoss to allow unpickling classes from these modules
from freqtrade.optimize.hyperopt.hyperopt_auto import HyperOptAuto
from freqtrade.optimize.hyperopt.hyperopt_logger import logging_mp_handle, logging_mp_setup
from freqtrade.optimize.hyperopt_loss.hyperopt_loss_interface import IHyperOptLoss
from freqtrade.optimize.hyperopt_tools import HyperoptStateContainer, HyperoptTools
from freqtrade.optimize.optimize_reports import generate_strategy_stats
@@ -58,6 +60,8 @@ optuna_samplers_dict = {
"QMCSampler": optuna.samplers.QMCSampler,
}
log_queue: Any
class HyperOptimizer:
"""
@@ -113,6 +117,24 @@ class HyperOptimizer:
if HyperoptTools.has_space(self.config, "sell"):
# Make sure use_exit_signal is enabled
self.config["use_exit_signal"] = True
self._setup_logging_mp_workaround()
def _setup_logging_mp_workaround(self) -> None:
"""
Workaround for logging in child processes.
local_queue must be a global and passed to the child process via inheritance.
"""
global log_queue
m = Manager()
log_queue = m.Queue()
logger.info(f"manager queue {type(log_queue)}")
def handle_mp_logging(self) -> None:
"""
Handle logging from child processes.
Must be called in the parent process to handle log messages from the child process.
"""
logging_mp_handle(log_queue)
def prepare_hyperopt(self) -> None:
# Initialize spaces ...
@@ -264,6 +286,7 @@ class HyperOptimizer:
@delayed
@wrap_non_picklable_objects
def generate_optimizer_wrapped(self, params_dict: dict[str, Any]) -> dict[str, Any]:
logging_mp_setup(log_queue, logging.INFO if self.config["verbosity"] < 1 else logging.DEBUG)
return self.generate_optimizer(params_dict)
def generate_optimizer(self, params_dict: dict[str, Any]) -> dict[str, Any]: