fix: move logging setup to hyperopter

restores hyperopt logging functionality
This commit is contained in:
Matthias
2025-10-19 09:09:59 +02:00
parent a0c4b520fc
commit be16ffea23
2 changed files with 25 additions and 25 deletions

View File

@@ -9,7 +9,6 @@ import logging
import random import random
from datetime import datetime from datetime import datetime
from math import ceil from math import ceil
from multiprocessing import Manager
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
@@ -21,7 +20,6 @@ from freqtrade.constants import FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config
from freqtrade.enums import HyperoptState from freqtrade.enums import HyperoptState
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
from freqtrade.misc import file_dump_json, plural from freqtrade.misc import file_dump_json, plural
from freqtrade.optimize.hyperopt.hyperopt_logger import logging_mp_handle, logging_mp_setup
from freqtrade.optimize.hyperopt.hyperopt_optimizer import INITIAL_POINTS, HyperOptimizer from freqtrade.optimize.hyperopt.hyperopt_optimizer import INITIAL_POINTS, HyperOptimizer
from freqtrade.optimize.hyperopt.hyperopt_output import HyperoptOutput from freqtrade.optimize.hyperopt.hyperopt_output import HyperoptOutput
from freqtrade.optimize.hyperopt_tools import ( from freqtrade.optimize.hyperopt_tools import (
@@ -35,9 +33,6 @@ from freqtrade.util import get_progress_tracker
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
log_queue: Any
class Hyperopt: class Hyperopt:
""" """
Hyperopt class, this class contains all the logic to run a hyperopt simulation Hyperopt class, this class contains all the logic to run a hyperopt simulation
@@ -149,15 +144,7 @@ class Hyperopt:
def run_optimizer_parallel(self, parallel: Parallel, asked: list[list]) -> list[dict[str, Any]]: def run_optimizer_parallel(self, parallel: Parallel, asked: list[list]) -> list[dict[str, Any]]:
"""Start optimizer in a parallel way""" """Start optimizer in a parallel way"""
def optimizer_wrapper(*args, **kwargs): return parallel(self.hyperopter.generate_optimizer_wrapped(v) for v in asked)
# global log queue. This must happen in the file that initializes Parallel
logging_mp_setup(
log_queue, logging.INFO if self.config["verbosity"] < 1 else logging.DEBUG
)
return self.hyperopter.generate_optimizer_wrapped(*args, **kwargs)
return parallel(optimizer_wrapper(v) for v in asked)
def _set_random_state(self, random_state: int | None) -> int: def _set_random_state(self, random_state: int | None) -> int:
return random_state or random.randint(1, 2**16 - 1) # noqa: S311 return random_state or random.randint(1, 2**16 - 1) # noqa: S311
@@ -236,15 +223,6 @@ class Hyperopt:
self._save_result(val) self._save_result(val)
def _setup_logging_mp_workaround(self) -> None:
"""
Workaround for logging in child processes.
local_queue must be a global in the file that initializes Parallel.
"""
global log_queue
m = Manager()
log_queue = m.Queue()
def start(self) -> None: def start(self) -> None:
self.random_state = self._set_random_state(self.config.get("hyperopt_random_state")) self.random_state = self._set_random_state(self.config.get("hyperopt_random_state"))
logger.info(f"Using optimizer random state: {self.random_state}") logger.info(f"Using optimizer random state: {self.random_state}")
@@ -257,7 +235,6 @@ class Hyperopt:
logger.info(f"Number of parallel jobs set as: {config_jobs}") logger.info(f"Number of parallel jobs set as: {config_jobs}")
self.opt = self.hyperopter.get_optimizer(self.random_state) self.opt = self.hyperopter.get_optimizer(self.random_state)
self._setup_logging_mp_workaround()
try: try:
with Parallel(n_jobs=config_jobs) as parallel: with Parallel(n_jobs=config_jobs) as parallel:
jobs = parallel._effective_n_jobs() jobs = parallel._effective_n_jobs()
@@ -307,7 +284,7 @@ class Hyperopt:
self.evaluate_result(val, current, is_random[j]) self.evaluate_result(val, current, is_random[j])
pbar.update(task, advance=1) pbar.update(task, advance=1)
logging_mp_handle(log_queue) self.hyperopter.handle_mp_logging()
gc.collect() gc.collect()
if ( if (

View File

@@ -7,6 +7,7 @@ import logging
import sys import sys
import warnings import warnings
from datetime import UTC, datetime from datetime import UTC, datetime
from multiprocessing import Manager
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
@@ -29,6 +30,7 @@ from freqtrade.optimize.backtesting import Backtesting
# Import IHyperOptLoss to allow unpickling classes from these modules # Import IHyperOptLoss to allow unpickling classes from these modules
from freqtrade.optimize.hyperopt.hyperopt_auto import HyperOptAuto from freqtrade.optimize.hyperopt.hyperopt_auto import HyperOptAuto
from freqtrade.optimize.hyperopt.hyperopt_logger import logging_mp_handle, logging_mp_setup
from freqtrade.optimize.hyperopt_loss.hyperopt_loss_interface import IHyperOptLoss from freqtrade.optimize.hyperopt_loss.hyperopt_loss_interface import IHyperOptLoss
from freqtrade.optimize.hyperopt_tools import HyperoptStateContainer, HyperoptTools from freqtrade.optimize.hyperopt_tools import HyperoptStateContainer, HyperoptTools
from freqtrade.optimize.optimize_reports import generate_strategy_stats from freqtrade.optimize.optimize_reports import generate_strategy_stats
@@ -58,6 +60,8 @@ optuna_samplers_dict = {
"QMCSampler": optuna.samplers.QMCSampler, "QMCSampler": optuna.samplers.QMCSampler,
} }
log_queue: Any
class HyperOptimizer: class HyperOptimizer:
""" """
@@ -113,6 +117,24 @@ class HyperOptimizer:
if HyperoptTools.has_space(self.config, "sell"): if HyperoptTools.has_space(self.config, "sell"):
# Make sure use_exit_signal is enabled # Make sure use_exit_signal is enabled
self.config["use_exit_signal"] = True self.config["use_exit_signal"] = True
self._setup_logging_mp_workaround()
def _setup_logging_mp_workaround(self) -> None:
"""
Workaround for logging in child processes.
local_queue must be a global and passed to the child process via inheritance.
"""
global log_queue
m = Manager()
log_queue = m.Queue()
logger.info(f"manager queue {type(log_queue)}")
def handle_mp_logging(self) -> None:
"""
Handle logging from child processes.
Must be called in the parent process to handle log messages from the child process.
"""
logging_mp_handle(log_queue)
def prepare_hyperopt(self) -> None: def prepare_hyperopt(self) -> None:
# Initialize spaces ... # Initialize spaces ...
@@ -264,6 +286,7 @@ class HyperOptimizer:
@delayed @delayed
@wrap_non_picklable_objects @wrap_non_picklable_objects
def generate_optimizer_wrapped(self, params_dict: dict[str, Any]) -> dict[str, Any]: def generate_optimizer_wrapped(self, params_dict: dict[str, Any]) -> dict[str, Any]:
logging_mp_setup(log_queue, logging.INFO if self.config["verbosity"] < 1 else logging.DEBUG)
return self.generate_optimizer(params_dict) return self.generate_optimizer(params_dict)
def generate_optimizer(self, params_dict: dict[str, Any]) -> dict[str, Any]: def generate_optimizer(self, params_dict: dict[str, Any]) -> dict[str, Any]: