Merge pull request #11736 from viotemp1/optuna_addons

add early stopping for hyperopt
This commit is contained in:
Matthias
2025-05-20 06:45:15 +02:00
committed by GitHub
8 changed files with 85 additions and 0 deletions

View File

@@ -16,6 +16,7 @@ usage: freqtrade hyperopt [-h] [-v] [--no-color] [--logfile FILE] [-V]
[--random-state INT] [--min-trades INT]
[--hyperopt-loss NAME] [--disable-param-export]
[--ignore-missing-spaces] [--analyze-per-epoch]
[--early-stop INT]
options:
-h, --help show this help message and exit
@@ -87,6 +88,8 @@ options:
Suppress errors for any requested Hyperopt spaces that
do not contain any parameters.
--analyze-per-epoch Run populate_indicators once per epoch.
--early-stop INT Early stop hyperopt if no improvement after (default:
0) epochs.
Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).

View File

@@ -490,6 +490,8 @@ freqtrade hyperopt --config config.json --hyperopt-loss <hyperoptlossname> --str
```
The `-e` option will set how many evaluations hyperopt will do. Since hyperopt uses Bayesian search, running too many epochs at once may not produce greater results. Experience has shown that best results are usually not improving much after 500-1000 epochs.
The `--early-stop` option will set after how many epochs with no improvements hyperopt will stop. A good value is 20-30% of the total epochs. Any value greater than 0 and lower than 20 it will be replaced by 20. Early stop is by default disabled (`--early-stop=0`)
Doing multiple runs (executions) with a few 1000 epochs and different random state will most likely produce different results.
The `--spaces all` option determines that all possible parameters should be optimized. Possibilities are listed below.

View File

@@ -78,6 +78,7 @@ ARGS_HYPEROPT = [
"disableparamexport",
"hyperopt_ignore_missing_space",
"analyze_per_epoch",
"early_stop",
]
ARGS_EDGE = [*ARGS_COMMON_OPTIMIZE, "stoploss_range"]

View File

@@ -262,6 +262,13 @@ AVAILABLE_CLI_OPTIONS = {
metavar="INT",
default=constants.HYPEROPT_EPOCH,
),
"early_stop": Arg(
"--early-stop",
help="Early stop hyperopt if no improvement after (default: %(default)d) epochs.",
type=check_int_positive,
metavar="INT",
default=0, # 0 to disable by default
),
"spaces": Arg(
"--spaces",
help="Specify which parameters to hyperopt. Space-separated list.",

View File

@@ -334,6 +334,19 @@ class Configuration:
("print_all", "Parameter --print-all detected ..."),
]
self._args_to_config_loop(config, configurations)
es_epochs = self.args.get("early_stop", 0)
if es_epochs > 0:
if es_epochs < 20:
logger.warning(
f"Early stop epochs {es_epochs} lower than 20. It will be replaced with 20."
)
config.update({"early_stop": 20})
else:
config.update({"early_stop": self.args["early_stop"]})
logger.info(
f"Parameter --early-stop detected ... Will early stop hyperopt if no improvement "
f"after {config.get('early_stop')} epochs ..."
)
configurations = [
("print_json", "Parameter --print-json detected ..."),

View File

@@ -317,6 +317,13 @@ class Hyperopt:
logging_mp_handle(log_queue)
gc.collect()
if (
self.hyperopter.es_epochs > 0
and self.hyperopter.es_terminator.should_terminate(self.opt)
):
logger.info(f"Early stopping after {(i + 1) * jobs} epochs")
break
except KeyboardInterrupt:
print("User interrupted..")

View File

@@ -14,6 +14,7 @@ import optuna
from joblib import delayed, dump, load, wrap_non_picklable_objects
from joblib.externals import cloudpickle
from optuna.exceptions import ExperimentalWarning
from optuna.terminator import BestValueStagnationEvaluator, Terminator
from pandas import DataFrame
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
@@ -104,6 +105,10 @@ class HyperOptimizer:
self.market_change = 0.0
self.es_epochs = config.get("early_stop", 0)
if self.es_epochs > 0 and self.es_epochs < 0.2 * config.get("epochs", 0):
logger.warning(f"Early stop epochs {self.es_epochs} lower than 20% of total epochs")
if HyperoptTools.has_space(self.config, "sell"):
# Make sure use_exit_signal is enabled
self.config["use_exit_signal"] = True
@@ -424,6 +429,11 @@ class HyperOptimizer:
else:
sampler = o_sampler
if self.es_epochs > 0:
with warnings.catch_warnings():
warnings.filterwarnings(action="ignore", category=ExperimentalWarning)
self.es_terminator = Terminator(BestValueStagnationEvaluator(self.es_epochs))
logger.info(f"Using optuna sampler {o_sampler}.")
return optuna.create_study(sampler=sampler, direction="minimize")

View File

@@ -170,6 +170,48 @@ def test_setup_hyperopt_configuration_stake_amount(mocker, default_conf) -> None
setup_optimize_configuration(get_args(args), RunMode.HYPEROPT)
def test_setup_hyperopt_early_stop_setup(mocker, default_conf, caplog) -> None:
patched_configuration_load_config_file(mocker, default_conf)
args = [
"hyperopt",
"--config",
"config.json",
"--strategy",
"HyperoptableStrategy",
"--early-stop",
"1",
]
conf = setup_optimize_configuration(get_args(args), RunMode.HYPEROPT)
assert isinstance(conf, dict)
assert conf["early_stop"] == 20
msg = (
r"Parameter --early-stop detected ... "
r"Will early stop hyperopt if no improvement after (20|25) epochs ..."
)
msg_adjust = r"Early stop epochs .* lower than 20. It will be replaced with 20."
assert log_has_re(msg_adjust, caplog)
assert log_has_re(msg, caplog)
caplog.clear()
args = [
"hyperopt",
"--config",
"config.json",
"--strategy",
CURRENT_TEST_STRATEGY,
"--early-stop",
"25",
]
conf1 = setup_optimize_configuration(get_args(args), RunMode.HYPEROPT)
assert isinstance(conf1, dict)
assert conf1["early_stop"] == 25
assert not log_has_re(msg_adjust, caplog)
assert log_has_re(msg, caplog)
def test_start_not_installed(mocker, default_conf, import_fails) -> None:
start_mock = MagicMock()
patched_configuration_load_config_file(mocker, default_conf)