Merge pull request #10748 from freqtrade/maint/bump_ruff_minpython

Bump ruff target version to 3.9
This commit is contained in:
Matthias
2024-10-06 08:56:50 +02:00
committed by GitHub
180 changed files with 1204 additions and 1216 deletions

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Any, Dict
from typing import Any
from freqtrade.enums import RunMode
from freqtrade.exceptions import ConfigurationError, OperationalException
@@ -9,7 +9,7 @@ from freqtrade.exceptions import ConfigurationError, OperationalException
logger = logging.getLogger(__name__)
def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str, Any]:
def setup_analyze_configuration(args: dict[str, Any], method: RunMode) -> dict[str, Any]:
"""
Prepare the configuration for the entry/exit reason analysis module
:param args: Cli args from Arguments()
@@ -48,7 +48,7 @@ def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[s
return config
def start_analysis_entries_exits(args: Dict[str, Any]) -> None:
def start_analysis_entries_exits(args: dict[str, Any]) -> None:
"""
Start analysis script
:param args: Cli args from Arguments()

View File

@@ -5,7 +5,7 @@ This module contains the argument manager class
from argparse import ArgumentParser, Namespace, _ArgumentGroup
from functools import partial
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from typing import Any, Optional, Union
from freqtrade.commands.cli_options import AVAILABLE_CLI_OPTIONS
from freqtrade.constants import DEFAULT_CONFIG
@@ -23,7 +23,7 @@ ARGS_STRATEGY = [
ARGS_TRADE = ["db_url", "sd_notify", "dry_run", "dry_run_wallet", "fee"]
ARGS_WEBSERVER: List[str] = []
ARGS_WEBSERVER: list[str] = []
ARGS_COMMON_OPTIMIZE = [
"timeframe",
@@ -277,11 +277,11 @@ class Arguments:
Arguments Class. Manage the arguments received by the cli
"""
def __init__(self, args: Optional[List[str]]) -> None:
def __init__(self, args: Optional[list[str]]) -> None:
self.args = args
self._parsed_arg: Optional[Namespace] = None
def get_parsed_arg(self) -> Dict[str, Any]:
def get_parsed_arg(self) -> dict[str, Any]:
"""
Return the list of arguments
:return: List[str] List of arguments
@@ -322,7 +322,7 @@ class Arguments:
return parsed_arg
def _build_args(
self, optionlist: List[str], parser: Union[ArgumentParser, _ArgumentGroup]
self, optionlist: list[str], parser: Union[ArgumentParser, _ArgumentGroup]
) -> None:
for val in optionlist:
opt = AVAILABLE_CLI_OPTIONS[val]

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Any, Dict
from typing import Any
from freqtrade.enums import RunMode
from freqtrade.exceptions import OperationalException
@@ -9,7 +9,7 @@ from freqtrade.exceptions import OperationalException
logger = logging.getLogger(__name__)
def start_new_config(args: Dict[str, Any]) -> None:
def start_new_config(args: dict[str, Any]) -> None:
"""
Create a new strategy from a template
Asking the user questions to fill out the template accordingly.
@@ -37,7 +37,7 @@ def start_new_config(args: Dict[str, Any]) -> None:
deploy_new_config(config_path, selections)
def start_show_config(args: Dict[str, Any]) -> None:
def start_show_config(args: dict[str, Any]) -> None:
from freqtrade.configuration import sanitize_config
from freqtrade.configuration.config_setup import setup_utils_configuration

View File

@@ -1,7 +1,7 @@
import logging
import sys
from collections import defaultdict
from typing import Any, Dict
from typing import Any
from freqtrade.constants import DATETIME_PRINT_FORMAT, DL_DATA_TIMEFRAMES, Config
from freqtrade.enums import CandleType, RunMode, TradingMode
@@ -26,7 +26,7 @@ def _check_data_config_download_sanity(config: Config) -> None:
)
def start_download_data(args: Dict[str, Any]) -> None:
def start_download_data(args: dict[str, Any]) -> None:
"""
Download data (former download_backtest_data.py script)
"""
@@ -44,7 +44,7 @@ def start_download_data(args: Dict[str, Any]) -> None:
sys.exit("SIGINT received, aborting ...")
def start_convert_trades(args: Dict[str, Any]) -> None:
def start_convert_trades(args: dict[str, Any]) -> None:
from freqtrade.configuration import TimeRange, setup_utils_configuration
from freqtrade.data.converter import convert_trades_to_ohlcv
from freqtrade.resolvers import ExchangeResolver
@@ -87,7 +87,7 @@ def start_convert_trades(args: Dict[str, Any]) -> None:
)
def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
def start_convert_data(args: dict[str, Any], ohlcv: bool = True) -> None:
"""
Convert data from one format to another
"""
@@ -113,7 +113,7 @@ def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
)
def start_list_data(args: Dict[str, Any]) -> None:
def start_list_data(args: dict[str, Any]) -> None:
"""
List available OHLCV data
"""
@@ -179,7 +179,7 @@ def start_list_data(args: Dict[str, Any]) -> None:
)
def start_list_trades_data(args: Dict[str, Any]) -> None:
def start_list_trades_data(args: dict[str, Any]) -> None:
"""
List available Trades data
"""

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from freqtrade.enums import RunMode
@@ -7,7 +7,7 @@ from freqtrade.enums import RunMode
logger = logging.getLogger(__name__)
def start_convert_db(args: Dict[str, Any]) -> None:
def start_convert_db(args: dict[str, Any]) -> None:
from sqlalchemy import func, select
from sqlalchemy.orm import make_transient

View File

@@ -1,7 +1,7 @@
import logging
import sys
from pathlib import Path
from typing import Any, Dict
from typing import Any
from freqtrade.constants import USERPATH_STRATEGIES
from freqtrade.enums import RunMode
@@ -15,7 +15,7 @@ logger = logging.getLogger(__name__)
req_timeout = 30
def start_create_userdir(args: Dict[str, Any]) -> None:
def start_create_userdir(args: dict[str, Any]) -> None:
"""
Create "user_data" directory to contain user data strategies, hyperopt, ...)
:param args: Cli args from Arguments()
@@ -80,7 +80,7 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
strategy_path.write_text(strategy_text)
def start_new_strategy(args: Dict[str, Any]) -> None:
def start_new_strategy(args: dict[str, Any]) -> None:
from freqtrade.configuration import setup_utils_configuration
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
@@ -99,7 +99,7 @@ def start_new_strategy(args: Dict[str, Any]) -> None:
raise ConfigurationError("`new-strategy` requires --strategy to be set.")
def start_install_ui(args: Dict[str, Any]) -> None:
def start_install_ui(args: dict[str, Any]) -> None:
from freqtrade.commands.deploy_ui import (
clean_ui_subdir,
download_and_install_ui,

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Optional, Tuple
from typing import Optional
import requests
@@ -52,7 +52,7 @@ def download_and_install_ui(dest_folder: Path, dl_url: str, version: str):
f.write(version)
def get_ui_download_url(version: Optional[str] = None) -> Tuple[str, str]:
def get_ui_download_url(version: Optional[str] = None) -> tuple[str, str]:
base_url = "https://api.github.com/repos/freqtrade/frequi/"
# Get base UI Repo path

View File

@@ -1,6 +1,6 @@
import logging
from operator import itemgetter
from typing import Any, Dict
from typing import Any
from freqtrade.enums import RunMode
from freqtrade.exceptions import OperationalException
@@ -9,7 +9,7 @@ from freqtrade.exceptions import OperationalException
logger = logging.getLogger(__name__)
def start_hyperopt_list(args: Dict[str, Any]) -> None:
def start_hyperopt_list(args: dict[str, Any]) -> None:
"""
List hyperopt epochs previously evaluated
"""
@@ -56,7 +56,7 @@ def start_hyperopt_list(args: Dict[str, Any]) -> None:
HyperoptTools.export_csv_file(config, epochs, export_csv)
def start_hyperopt_show(args: Dict[str, Any]) -> None:
def start_hyperopt_show(args: dict[str, Any]) -> None:
"""
Show details of a hyperopt epoch previously evaluated
"""

View File

@@ -1,7 +1,7 @@
import csv
import logging
import sys
from typing import Any, Dict, List, Union
from typing import Any, Union
from freqtrade.enums import RunMode
from freqtrade.exceptions import ConfigurationError, OperationalException
@@ -11,7 +11,7 @@ from freqtrade.ft_types import ValidExchangesType
logger = logging.getLogger(__name__)
def start_list_exchanges(args: Dict[str, Any]) -> None:
def start_list_exchanges(args: dict[str, Any]) -> None:
"""
Print available exchanges
:param args: Cli args from Arguments()
@@ -23,7 +23,7 @@ def start_list_exchanges(args: Dict[str, Any]) -> None:
from freqtrade.exchange import list_available_exchanges
available_exchanges: List[ValidExchangesType] = list_available_exchanges(
available_exchanges: list[ValidExchangesType] = list_available_exchanges(
args["list_exchanges_all"]
)
@@ -81,13 +81,13 @@ def start_list_exchanges(args: Dict[str, Any]) -> None:
console.print(table)
def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
def _print_objs_tabular(objs: list, print_colorized: bool) -> None:
from rich.console import Console
from rich.table import Table
from rich.text import Text
names = [s["name"] for s in objs]
objs_to_print: List[Dict[str, Union[Text, str]]] = [
objs_to_print: list[dict[str, Union[Text, str]]] = [
{
"name": Text(s["name"] if s["name"] else "--"),
"location": s["location_rel"],
@@ -125,7 +125,7 @@ def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
console.print(table)
def start_list_strategies(args: Dict[str, Any]) -> None:
def start_list_strategies(args: dict[str, Any]) -> None:
"""
Print files with Strategy custom classes available in the directory
"""
@@ -151,7 +151,7 @@ def start_list_strategies(args: Dict[str, Any]) -> None:
_print_objs_tabular(strategy_objs, config.get("print_colorized", False))
def start_list_freqAI_models(args: Dict[str, Any]) -> None:
def start_list_freqAI_models(args: dict[str, Any]) -> None:
"""
Print files with FreqAI models custom classes available in the directory
"""
@@ -169,7 +169,7 @@ def start_list_freqAI_models(args: Dict[str, Any]) -> None:
_print_objs_tabular(model_objs, config.get("print_colorized", False))
def start_list_timeframes(args: Dict[str, Any]) -> None:
def start_list_timeframes(args: dict[str, Any]) -> None:
"""
Print timeframes available on Exchange
"""
@@ -192,7 +192,7 @@ def start_list_timeframes(args: Dict[str, Any]) -> None:
)
def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
def start_list_markets(args: dict[str, Any], pairs_only: bool = False) -> None:
"""
Print pairs/markets on the exchange
:param args: Cli args from Arguments()
@@ -312,7 +312,7 @@ def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
print(f"{summary_str}.")
def start_show_trades(args: Dict[str, Any]) -> None:
def start_show_trades(args: dict[str, Any]) -> None:
"""
Show trades
"""

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from freqtrade import constants
from freqtrade.enums import RunMode
@@ -9,7 +9,7 @@ from freqtrade.exceptions import ConfigurationError, OperationalException
logger = logging.getLogger(__name__)
def setup_optimize_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str, Any]:
def setup_optimize_configuration(args: dict[str, Any], method: RunMode) -> dict[str, Any]:
"""
Prepare the configuration for the Hyperopt module
:param args: Cli args from Arguments()
@@ -42,7 +42,7 @@ def setup_optimize_configuration(args: Dict[str, Any], method: RunMode) -> Dict[
return config
def start_backtesting(args: Dict[str, Any]) -> None:
def start_backtesting(args: dict[str, Any]) -> None:
"""
Start Backtesting script
:param args: Cli args from Arguments()
@@ -61,7 +61,7 @@ def start_backtesting(args: Dict[str, Any]) -> None:
backtesting.start()
def start_backtesting_show(args: Dict[str, Any]) -> None:
def start_backtesting_show(args: dict[str, Any]) -> None:
"""
Show previous backtest result
"""
@@ -78,7 +78,7 @@ def start_backtesting_show(args: Dict[str, Any]) -> None:
show_sorted_pairlist(config, results)
def start_hyperopt(args: Dict[str, Any]) -> None:
def start_hyperopt(args: dict[str, Any]) -> None:
"""
Start hyperopt script
:param args: Cli args from Arguments()
@@ -123,7 +123,7 @@ def start_hyperopt(args: Dict[str, Any]) -> None:
# Same in Edge and Backtesting start() functions.
def start_edge(args: Dict[str, Any]) -> None:
def start_edge(args: dict[str, Any]) -> None:
"""
Start Edge script
:param args: Cli args from Arguments()
@@ -140,7 +140,7 @@ def start_edge(args: Dict[str, Any]) -> None:
edge_cli.start()
def start_lookahead_analysis(args: Dict[str, Any]) -> None:
def start_lookahead_analysis(args: dict[str, Any]) -> None:
"""
Start the backtest bias tester script
:param args: Cli args from Arguments()
@@ -153,7 +153,7 @@ def start_lookahead_analysis(args: Dict[str, Any]) -> None:
LookaheadAnalysisSubFunctions.start(config)
def start_recursive_analysis(args: Dict[str, Any]) -> None:
def start_recursive_analysis(args: dict[str, Any]) -> None:
"""
Start the backtest recursive tester script
:param args: Cli args from Arguments()

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
import rapidjson
@@ -9,7 +9,7 @@ from freqtrade.enums import RunMode
logger = logging.getLogger(__name__)
def start_test_pairlist(args: Dict[str, Any]) -> None:
def start_test_pairlist(args: dict[str, Any]) -> None:
"""
Test Pairlist configuration
"""

View File

@@ -1,10 +1,10 @@
from typing import Any, Dict
from typing import Any
from freqtrade.enums import RunMode
from freqtrade.exceptions import ConfigurationError
def validate_plot_args(args: Dict[str, Any]) -> None:
def validate_plot_args(args: dict[str, Any]) -> None:
if not args.get("datadir") and not args.get("config"):
raise ConfigurationError(
"You need to specify either `--datadir` or `--config` "
@@ -12,7 +12,7 @@ def validate_plot_args(args: Dict[str, Any]) -> None:
)
def start_plot_dataframe(args: Dict[str, Any]) -> None:
def start_plot_dataframe(args: dict[str, Any]) -> None:
"""
Entrypoint for dataframe plotting
"""
@@ -26,7 +26,7 @@ def start_plot_dataframe(args: Dict[str, Any]) -> None:
load_and_plot_trades(config)
def start_plot_profit(args: Dict[str, Any]) -> None:
def start_plot_profit(args: dict[str, Any]) -> None:
"""
Entrypoint for plot_profit
"""

View File

@@ -1,7 +1,7 @@
import logging
import time
from pathlib import Path
from typing import Any, Dict
from typing import Any
from freqtrade.enums import RunMode
@@ -9,7 +9,7 @@ from freqtrade.enums import RunMode
logger = logging.getLogger(__name__)
def start_strategy_update(args: Dict[str, Any]) -> None:
def start_strategy_update(args: dict[str, Any]) -> None:
"""
Start the strategy updating script
:param args: Cli args from Arguments()

View File

@@ -1,12 +1,12 @@
import logging
import signal
from typing import Any, Dict
from typing import Any
logger = logging.getLogger(__name__)
def start_trading(args: Dict[str, Any]) -> int:
def start_trading(args: dict[str, Any]) -> int:
"""
Main entry point for trading mode
"""

View File

@@ -1,9 +1,9 @@
from typing import Any, Dict
from typing import Any
from freqtrade.enums import RunMode
def start_webserver(args: Dict[str, Any]) -> None:
def start_webserver(args: dict[str, Any]) -> None:
"""
Main entry point for webserver mode
"""

View File

@@ -1,5 +1,4 @@
# Required json-schema for user specified config
from typing import Dict
from freqtrade.constants import (
AVAILABLE_DATAHANDLERS,
@@ -23,7 +22,7 @@ from freqtrade.constants import (
from freqtrade.enums import RPCMessageType
__MESSAGE_TYPE_DICT: Dict[str, Dict[str, str]] = {x: {"type": "object"} for x in RPCMessageType}
__MESSAGE_TYPE_DICT: dict[str, dict[str, str]] = {x: {"type": "object"} for x in RPCMessageType}
__IN_STRATEGY = "\nUsually specified in the strategy and missing in the configuration."

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from freqtrade.enums import RunMode
@@ -11,8 +11,8 @@ logger = logging.getLogger(__name__)
def setup_utils_configuration(
args: Dict[str, Any], method: RunMode, *, set_dry: bool = True
) -> Dict[str, Any]:
args: dict[str, Any], method: RunMode, *, set_dry: bool = True
) -> dict[str, Any]:
"""
Prepare the configuration for utils subcommands
:param args: Cli args from Arguments()

View File

@@ -1,7 +1,7 @@
import logging
from collections import Counter
from copy import deepcopy
from typing import Any, Dict
from typing import Any
from jsonschema import Draft4Validator, validators
from jsonschema.exceptions import ValidationError, best_match
@@ -43,7 +43,7 @@ def _extend_validator(validator_class):
FreqtradeValidator = _extend_validator(Draft4Validator)
def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> Dict[str, Any]:
def validate_config_schema(conf: dict[str, Any], preliminary: bool = False) -> dict[str, Any]:
"""
Validate the configuration follow the Config Schema
:param conf: Config in JSON format
@@ -69,7 +69,7 @@ def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> D
raise ValidationError(best_match(Draft4Validator(conf_schema).iter_errors(conf)).message)
def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = False) -> None:
def validate_config_consistency(conf: dict[str, Any], *, preliminary: bool = False) -> None:
"""
Validate the configuration consistency.
Should be ran after loading both configuration and strategy,
@@ -97,7 +97,7 @@ def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = Fal
validate_config_schema(conf, preliminary=preliminary)
def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
def _validate_unlimited_amount(conf: dict[str, Any]) -> None:
"""
If edge is disabled, either max_open_trades or stake_amount need to be set.
:raise: ConfigurationError if config validation failed
@@ -110,7 +110,7 @@ def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
raise ConfigurationError("`max_open_trades` and `stake_amount` cannot both be unlimited.")
def _validate_price_config(conf: Dict[str, Any]) -> None:
def _validate_price_config(conf: dict[str, Any]) -> None:
"""
When using market orders, price sides must be using the "other" side of the price
"""
@@ -126,7 +126,7 @@ def _validate_price_config(conf: Dict[str, Any]) -> None:
raise ConfigurationError('Market exit orders require exit_pricing.price_side = "other".')
def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
def _validate_trailing_stoploss(conf: dict[str, Any]) -> None:
if conf.get("stoploss") == 0.0:
raise ConfigurationError(
"The config stoploss needs to be different from 0 to avoid problems with sell orders."
@@ -159,7 +159,7 @@ def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
)
def _validate_edge(conf: Dict[str, Any]) -> None:
def _validate_edge(conf: dict[str, Any]) -> None:
"""
Edge and Dynamic whitelist should not both be enabled, since edge overrides dynamic whitelists.
"""
@@ -173,7 +173,7 @@ def _validate_edge(conf: Dict[str, Any]) -> None:
)
def _validate_whitelist(conf: Dict[str, Any]) -> None:
def _validate_whitelist(conf: dict[str, Any]) -> None:
"""
Dynamic whitelist does not require pair_whitelist to be set - however StaticWhitelist does.
"""
@@ -194,7 +194,7 @@ def _validate_whitelist(conf: Dict[str, Any]) -> None:
raise ConfigurationError("StaticPairList requires pair_whitelist to be set.")
def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
def _validate_ask_orderbook(conf: dict[str, Any]) -> None:
ask_strategy = conf.get("exit_pricing", {})
ob_min = ask_strategy.get("order_book_min")
ob_max = ask_strategy.get("order_book_max")
@@ -214,7 +214,7 @@ def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
)
def validate_migrated_strategy_settings(conf: Dict[str, Any]) -> None:
def validate_migrated_strategy_settings(conf: dict[str, Any]) -> None:
_validate_time_in_force(conf)
_validate_order_types(conf)
_validate_unfilledtimeout(conf)
@@ -222,7 +222,7 @@ def validate_migrated_strategy_settings(conf: Dict[str, Any]) -> None:
_strategy_settings(conf)
def _validate_time_in_force(conf: Dict[str, Any]) -> None:
def _validate_time_in_force(conf: dict[str, Any]) -> None:
time_in_force = conf.get("order_time_in_force", {})
if "buy" in time_in_force or "sell" in time_in_force:
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
@@ -243,7 +243,7 @@ def _validate_time_in_force(conf: Dict[str, Any]) -> None:
)
def _validate_order_types(conf: Dict[str, Any]) -> None:
def _validate_order_types(conf: dict[str, Any]) -> None:
order_types = conf.get("order_types", {})
old_order_types = [
"buy",
@@ -278,7 +278,7 @@ def _validate_order_types(conf: Dict[str, Any]) -> None:
process_deprecated_setting(conf, "order_types", o, "order_types", n)
def _validate_unfilledtimeout(conf: Dict[str, Any]) -> None:
def _validate_unfilledtimeout(conf: dict[str, Any]) -> None:
unfilledtimeout = conf.get("unfilledtimeout", {})
if any(x in unfilledtimeout for x in ["buy", "sell"]):
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
@@ -297,7 +297,7 @@ def _validate_unfilledtimeout(conf: Dict[str, Any]) -> None:
process_deprecated_setting(conf, "unfilledtimeout", o, "unfilledtimeout", n)
def _validate_pricing_rules(conf: Dict[str, Any]) -> None:
def _validate_pricing_rules(conf: dict[str, Any]) -> None:
if conf.get("ask_strategy") or conf.get("bid_strategy"):
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
raise ConfigurationError("Please migrate your pricing settings to use the new wording.")
@@ -327,7 +327,7 @@ def _validate_pricing_rules(conf: Dict[str, Any]) -> None:
del conf["ask_strategy"]
def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
def _validate_freqai_hyperopt(conf: dict[str, Any]) -> None:
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
analyze_per_epoch = conf.get("analyze_per_epoch", False)
if analyze_per_epoch and freqai_enabled:
@@ -336,7 +336,7 @@ def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
)
def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool) -> None:
def _validate_freqai_include_timeframes(conf: dict[str, Any], preliminary: bool) -> None:
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
if freqai_enabled:
main_tf = conf.get("timeframe", "5m")
@@ -367,7 +367,7 @@ def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool)
)
def _validate_freqai_backtest(conf: Dict[str, Any]) -> None:
def _validate_freqai_backtest(conf: dict[str, Any]) -> None:
if conf.get("runmode", RunMode.OTHER) == RunMode.BACKTEST:
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
timerange = conf.get("timerange")
@@ -390,7 +390,7 @@ def _validate_freqai_backtest(conf: Dict[str, Any]) -> None:
)
def _validate_consumers(conf: Dict[str, Any]) -> None:
def _validate_consumers(conf: dict[str, Any]) -> None:
emc_conf = conf.get("external_message_consumer", {})
if emc_conf.get("enabled", False):
if len(emc_conf.get("producers", [])) < 1:
@@ -410,7 +410,7 @@ def _validate_consumers(conf: Dict[str, Any]) -> None:
)
def _validate_orderflow(conf: Dict[str, Any]) -> None:
def _validate_orderflow(conf: dict[str, Any]) -> None:
if conf.get("exchange", {}).get("use_public_trades"):
if "orderflow" not in conf:
raise ConfigurationError(
@@ -418,7 +418,7 @@ def _validate_orderflow(conf: Dict[str, Any]) -> None:
)
def _strategy_settings(conf: Dict[str, Any]) -> None:
def _strategy_settings(conf: dict[str, Any]) -> None:
process_deprecated_setting(conf, None, "use_sell_signal", None, "use_exit_signal")
process_deprecated_setting(conf, None, "sell_profit_only", None, "exit_profit_only")
process_deprecated_setting(conf, None, "sell_profit_offset", None, "exit_profit_offset")

View File

@@ -7,7 +7,7 @@ import logging
import warnings
from copy import deepcopy
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple
from typing import Any, Callable, Optional
from freqtrade import constants
from freqtrade.configuration.deprecated_settings import process_temporary_deprecated_settings
@@ -37,7 +37,7 @@ class Configuration:
Reuse this class for the bot, backtesting, hyperopt and every script that required configuration
"""
def __init__(self, args: Dict[str, Any], runmode: Optional[RunMode] = None) -> None:
def __init__(self, args: dict[str, Any], runmode: Optional[RunMode] = None) -> None:
self.args = args
self.config: Optional[Config] = None
self.runmode = runmode
@@ -53,7 +53,7 @@ class Configuration:
return self.config
@staticmethod
def from_files(files: List[str]) -> Dict[str, Any]:
def from_files(files: list[str]) -> dict[str, Any]:
"""
Iterate through the config files passed in, loading all of them
and merging their contents.
@@ -68,7 +68,7 @@ class Configuration:
c = Configuration({"config": files}, RunMode.OTHER)
return c.get_config()
def load_config(self) -> Dict[str, Any]:
def load_config(self) -> dict[str, Any]:
"""
Extract information for sys.argv and load the bot configuration
:return: Configuration dictionary
@@ -421,7 +421,7 @@ class Configuration:
]
self._args_to_config_loop(config, configurations)
def _args_to_config_loop(self, config, configurations: List[Tuple[str, str]]) -> None:
def _args_to_config_loop(self, config, configurations: list[tuple[str, str]]) -> None:
for argname, logstring in configurations:
self._args_to_config(config, argname=argname, logstring=logstring)

View File

@@ -1,7 +1,7 @@
import logging
import secrets
from pathlib import Path
from typing import Any, Dict, List
from typing import Any
from questionary import Separator, prompt
@@ -41,7 +41,7 @@ def ask_user_overwrite(config_path: Path) -> bool:
return answers["overwrite"]
def ask_user_config() -> Dict[str, Any]:
def ask_user_config() -> dict[str, Any]:
"""
Ask user a few questions to build the configuration.
Interactive questions built using https://github.com/tmbo/questionary
@@ -51,7 +51,7 @@ def ask_user_config() -> Dict[str, Any]:
from freqtrade.configuration.detect_environment import running_in_docker
from freqtrade.exchange import available_exchanges
questions: List[Dict[str, Any]] = [
questions: list[dict[str, Any]] = [
{
"type": "confirm",
"name": "dry_run",
@@ -216,7 +216,7 @@ def ask_user_config() -> Dict[str, Any]:
return answers
def deploy_new_config(config_path: Path, selections: Dict[str, Any]) -> None:
def deploy_new_config(config_path: Path, selections: dict[str, Any]) -> None:
"""
Applies selections to the template and writes the result to config_path
:param config_path: Path object for new config file. Should not exist yet

View File

@@ -1,6 +1,6 @@
import logging
import os
from typing import Any, Dict
from typing import Any
from freqtrade.constants import ENV_VAR_PREFIX
from freqtrade.misc import deep_merge_dicts
@@ -24,7 +24,7 @@ def _get_var_typed(val):
return val
def _flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str, Any]:
def _flat_vars_to_nested_dict(env_dict: dict[str, Any], prefix: str) -> dict[str, Any]:
"""
Environment variables must be prefixed with FREQTRADE.
FREQTRADE__{section}__{key}
@@ -33,7 +33,7 @@ def _flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str
:return: Nested dict based on available and relevant variables.
"""
no_convert = ["CHAT_ID", "PASSWORD"]
relevant_vars: Dict[str, Any] = {}
relevant_vars: dict[str, Any] = {}
for env_var, val in sorted(env_dict.items()):
if env_var.startswith(prefix):
@@ -51,7 +51,7 @@ def _flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str
return relevant_vars
def enironment_vars_to_dict() -> Dict[str, Any]:
def enironment_vars_to_dict() -> dict[str, Any]:
"""
Read environment variables and return a nested dict for relevant variables
Relevant variables must follow the FREQTRADE__{section}__{key} pattern

View File

@@ -7,7 +7,7 @@ import re
import sys
from copy import deepcopy
from pathlib import Path
from typing import Any, Dict, List, Optional
from typing import Any, Optional
import rapidjson
@@ -42,7 +42,7 @@ def log_config_error_range(path: str, errmsg: str) -> str:
return ""
def load_file(path: Path) -> Dict[str, Any]:
def load_file(path: Path) -> dict[str, Any]:
try:
with path.open("r") as file:
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
@@ -51,7 +51,7 @@ def load_file(path: Path) -> Dict[str, Any]:
return config
def load_config_file(path: str) -> Dict[str, Any]:
def load_config_file(path: str) -> dict[str, Any]:
"""
Loads a config file from the given path
:param path: path as str
@@ -78,8 +78,8 @@ def load_config_file(path: str) -> Dict[str, Any]:
def load_from_files(
files: List[str], base_path: Optional[Path] = None, level: int = 0
) -> Dict[str, Any]:
files: list[str], base_path: Optional[Path] = None, level: int = 0
) -> dict[str, Any]:
"""
Recursively load configuration files if specified.
Sub-files are assumed to be relative to the initial config.

View File

@@ -4,7 +4,7 @@
bot constants
"""
from typing import Any, Dict, List, Literal, Optional, Tuple
from typing import Any, Literal, Optional
from freqtrade.enums import CandleType, PriceType
@@ -187,14 +187,14 @@ CANCEL_REASON = {
}
# List of pairs with their timeframes
PairWithTimeframe = Tuple[str, str, CandleType]
ListPairsWithTimeframes = List[PairWithTimeframe]
PairWithTimeframe = tuple[str, str, CandleType]
ListPairsWithTimeframes = list[PairWithTimeframe]
# Type for trades list
TradeList = List[List]
TradeList = list[list]
# ticks, pair, timeframe, CandleType
TickWithTimeframe = Tuple[str, str, CandleType, Optional[int], Optional[int]]
ListTicksWithTimeframes = List[TickWithTimeframe]
TickWithTimeframe = tuple[str, str, CandleType, Optional[int], Optional[int]]
ListTicksWithTimeframes = list[TickWithTimeframe]
LongShort = Literal["long", "short"]
EntryExit = Literal["entry", "exit"]
@@ -203,9 +203,9 @@ MakerTaker = Literal["maker", "taker"]
BidAsk = Literal["bid", "ask"]
OBLiteral = Literal["asks", "bids"]
Config = Dict[str, Any]
Config = dict[str, Any]
# Exchange part of the configuration.
ExchangeConfig = Dict[str, Any]
ExchangeConfig = dict[str, Any]
IntOrInf = float

View File

@@ -6,7 +6,7 @@ import logging
from copy import copy
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict, List, Literal, Optional, Union
from typing import Any, Literal, Optional, Union
import numpy as np
import pandas as pd
@@ -137,7 +137,7 @@ def get_latest_hyperopt_file(
return directory / get_latest_hyperopt_filename(directory)
def load_backtest_metadata(filename: Union[Path, str]) -> Dict[str, Any]:
def load_backtest_metadata(filename: Union[Path, str]) -> dict[str, Any]:
"""
Read metadata dictionary from backtest results file without reading and deserializing entire
file.
@@ -176,7 +176,7 @@ def load_backtest_stats(filename: Union[Path, str]) -> BacktestResultType:
return data
def load_and_merge_backtest_result(strategy_name: str, filename: Path, results: Dict[str, Any]):
def load_and_merge_backtest_result(strategy_name: str, filename: Path, results: dict[str, Any]):
"""
Load one strategy from multi-strategy result and merge it with results
:param strategy_name: Name of the strategy contained in the result
@@ -195,12 +195,12 @@ def load_and_merge_backtest_result(strategy_name: str, filename: Path, results:
break
def _get_backtest_files(dirname: Path) -> List[Path]:
def _get_backtest_files(dirname: Path) -> list[Path]:
# Weird glob expression here avoids including .meta.json files.
return list(reversed(sorted(dirname.glob("backtest-result-*-[0-9][0-9].json"))))
def _extract_backtest_result(filename: Path) -> List[BacktestHistoryEntryType]:
def _extract_backtest_result(filename: Path) -> list[BacktestHistoryEntryType]:
metadata = load_backtest_metadata(filename)
return [
{
@@ -220,14 +220,14 @@ def _extract_backtest_result(filename: Path) -> List[BacktestHistoryEntryType]:
]
def get_backtest_result(filename: Path) -> List[BacktestHistoryEntryType]:
def get_backtest_result(filename: Path) -> list[BacktestHistoryEntryType]:
"""
Get backtest result read from metadata file
"""
return _extract_backtest_result(filename)
def get_backtest_resultlist(dirname: Path) -> List[BacktestHistoryEntryType]:
def get_backtest_resultlist(dirname: Path) -> list[BacktestHistoryEntryType]:
"""
Get list of backtest results read from metadata files
"""
@@ -249,7 +249,7 @@ def delete_backtest_result(file_abs: Path):
file_abs_meta.unlink()
def update_backtest_metadata(filename: Path, strategy: str, content: Dict[str, Any]):
def update_backtest_metadata(filename: Path, strategy: str, content: dict[str, Any]):
"""
Updates backtest metadata file with new content.
:raises: ValueError if metadata file does not exist, or strategy is not in this file.
@@ -275,8 +275,8 @@ def get_backtest_market_change(filename: Path, include_ts: bool = True) -> pd.Da
def find_existing_backtest_stats(
dirname: Union[Path, str], run_ids: Dict[str, str], min_backtest_date: Optional[datetime] = None
) -> Dict[str, Any]:
dirname: Union[Path, str], run_ids: dict[str, str], min_backtest_date: Optional[datetime] = None
) -> dict[str, Any]:
"""
Find existing backtest stats that match specified run IDs and load them.
:param dirname: pathlib.Path object, or string pointing to the file.
@@ -287,7 +287,7 @@ def find_existing_backtest_stats(
# Copy so we can modify this dict without affecting parent scope.
run_ids = copy(run_ids)
dirname = Path(dirname)
results: Dict[str, Any] = {
results: dict[str, Any] = {
"metadata": {},
"strategy": {},
"strategy_comparison": [],
@@ -438,7 +438,7 @@ def evaluate_result_multi(
return df_final[df_final["open_trades"] > max_open_trades]
def trade_list_to_dataframe(trades: Union[List[Trade], List[LocalTrade]]) -> pd.DataFrame:
def trade_list_to_dataframe(trades: Union[list[Trade], list[LocalTrade]]) -> pd.DataFrame:
"""
Convert list of Trade objects to pandas Dataframe
:param trades: List of trade objects

View File

@@ -3,7 +3,6 @@ Functions to convert data from one format to another
"""
import logging
from typing import Dict
import numpy as np
import pandas as pd
@@ -158,8 +157,8 @@ def trim_dataframe(
def trim_dataframes(
preprocessed: Dict[str, DataFrame], timerange, startup_candles: int
) -> Dict[str, DataFrame]:
preprocessed: dict[str, DataFrame], timerange, startup_candles: int
) -> dict[str, DataFrame]:
"""
Trim startup period from analyzed dataframes
:param preprocessed: Dict of pair: dataframe
@@ -167,7 +166,7 @@ def trim_dataframes(
:param startup_candles: Startup-candles that should be removed
:return: Dict of trimmed dataframes
"""
processed: Dict[str, DataFrame] = {}
processed: dict[str, DataFrame] = {}
for pair, df in preprocessed.items():
trimed_df = trim_dataframe(df, timerange, startup_candles=startup_candles)

View File

@@ -7,7 +7,6 @@ import time
import typing
from collections import OrderedDict
from datetime import datetime
from typing import Tuple
import numpy as np
import pandas as pd
@@ -62,11 +61,11 @@ def _calculate_ohlcv_candle_start_and_end(df: pd.DataFrame, timeframe: str):
def populate_dataframe_with_trades(
cached_grouped_trades: OrderedDict[Tuple[datetime, datetime], pd.DataFrame],
cached_grouped_trades: OrderedDict[tuple[datetime, datetime], pd.DataFrame],
config: Config,
dataframe: pd.DataFrame,
trades: pd.DataFrame,
) -> Tuple[pd.DataFrame, OrderedDict[Tuple[datetime, datetime], pd.DataFrame]]:
) -> tuple[pd.DataFrame, OrderedDict[tuple[datetime, datetime], pd.DataFrame]]:
"""
Populates a dataframe with trades
:param dataframe: Dataframe to populate

View File

@@ -4,7 +4,6 @@ Functions to convert data from one format to another
import logging
from pathlib import Path
from typing import Dict, List
import pandas as pd
from pandas import DataFrame, to_datetime
@@ -34,7 +33,7 @@ def trades_df_remove_duplicates(trades: pd.DataFrame) -> pd.DataFrame:
return trades.drop_duplicates(subset=["timestamp", "id"])
def trades_dict_to_list(trades: List[Dict]) -> TradeList:
def trades_dict_to_list(trades: list[dict]) -> TradeList:
"""
Convert fetch_trades result into a List (to be more memory efficient).
:param trades: List of trades, as returned by ccxt.fetch_trades.
@@ -91,8 +90,8 @@ def trades_to_ohlcv(trades: DataFrame, timeframe: str) -> DataFrame:
def convert_trades_to_ohlcv(
pairs: List[str],
timeframes: List[str],
pairs: list[str],
timeframes: list[str],
datadir: Path,
timerange: TimeRange,
erase: bool,

View File

@@ -8,7 +8,7 @@ Common Interface for bot and strategy to access data.
import logging
from collections import deque
from datetime import datetime, timezone
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
@@ -48,15 +48,15 @@ class DataProvider:
self._exchange = exchange
self._pairlists = pairlists
self.__rpc = rpc
self.__cached_pairs: Dict[PairWithTimeframe, Tuple[DataFrame, datetime]] = {}
self.__cached_pairs: dict[PairWithTimeframe, tuple[DataFrame, datetime]] = {}
self.__slice_index: Optional[int] = None
self.__slice_date: Optional[datetime] = None
self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {}
self.__producer_pairs_df: Dict[
str, Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]
self.__cached_pairs_backtesting: dict[PairWithTimeframe, DataFrame] = {}
self.__producer_pairs_df: dict[
str, dict[PairWithTimeframe, tuple[DataFrame, datetime]]
] = {}
self.__producer_pairs: Dict[str, List[str]] = {}
self.__producer_pairs: dict[str, list[str]] = {}
self._msg_queue: deque = deque()
self._default_candle_type = self._config.get("candle_type_def", CandleType.SPOT)
@@ -101,7 +101,7 @@ class DataProvider:
self.__cached_pairs[pair_key] = (dataframe, datetime.now(timezone.utc))
# For multiple producers we will want to merge the pairlists instead of overwriting
def _set_producer_pairs(self, pairlist: List[str], producer_name: str = "default"):
def _set_producer_pairs(self, pairlist: list[str], producer_name: str = "default"):
"""
Set the pairs received to later be used.
@@ -109,7 +109,7 @@ class DataProvider:
"""
self.__producer_pairs[producer_name] = pairlist
def get_producer_pairs(self, producer_name: str = "default") -> List[str]:
def get_producer_pairs(self, producer_name: str = "default") -> list[str]:
"""
Get the pairs cached from the producer
@@ -177,7 +177,7 @@ class DataProvider:
timeframe: str,
candle_type: CandleType,
producer_name: str = "default",
) -> Tuple[bool, int]:
) -> tuple[bool, int]:
"""
Append a candle to the existing external dataframe. The incoming dataframe
must have at least 1 candle.
@@ -258,7 +258,7 @@ class DataProvider:
timeframe: Optional[str] = None,
candle_type: Optional[CandleType] = None,
producer_name: str = "default",
) -> Tuple[DataFrame, datetime]:
) -> tuple[DataFrame, datetime]:
"""
Get the pair data from producers.
@@ -377,7 +377,7 @@ class DataProvider:
logger.warning(f"No data found for ({pair}, {timeframe}, {candle_type}).")
return data
def get_analyzed_dataframe(self, pair: str, timeframe: str) -> Tuple[DataFrame, datetime]:
def get_analyzed_dataframe(self, pair: str, timeframe: str) -> tuple[DataFrame, datetime]:
"""
Retrieve the analyzed dataframe. Returns the full dataframe in trade mode (live / dry),
and the last 1000 candles (up to the time evaluated at this moment) in all other modes.
@@ -408,7 +408,7 @@ class DataProvider:
"""
return RunMode(self._config.get("runmode", RunMode.OTHER))
def current_whitelist(self) -> List[str]:
def current_whitelist(self) -> list[str]:
"""
fetch latest available whitelist.
@@ -529,7 +529,7 @@ class DataProvider:
)
return trades_df
def market(self, pair: str) -> Optional[Dict[str, Any]]:
def market(self, pair: str) -> Optional[dict[str, Any]]:
"""
Return market data for the pair
:param pair: Pair to get the data for

View File

@@ -1,6 +1,5 @@
import logging
from pathlib import Path
from typing import Dict, List
import joblib
import pandas as pd
@@ -48,14 +47,14 @@ def _load_signal_candles(backtest_dir: Path):
return _load_backtest_analysis_data(backtest_dir, "signals")
def _load_exit_signal_candles(backtest_dir: Path) -> Dict[str, Dict[str, pd.DataFrame]]:
def _load_exit_signal_candles(backtest_dir: Path) -> dict[str, dict[str, pd.DataFrame]]:
return _load_backtest_analysis_data(backtest_dir, "exited")
def _process_candles_and_indicators(
pairlist, strategy_name, trades, signal_candles, date_col: str = "open_date"
):
analysed_trades_dict: Dict[str, Dict] = {strategy_name: {}}
analysed_trades_dict: dict[str, dict] = {strategy_name: {}}
try:
logger.info(f"Processing {strategy_name} : {len(pairlist)} pairs")
@@ -261,8 +260,8 @@ def prepare_results(
def print_results(
res_df: pd.DataFrame,
exit_df: pd.DataFrame,
analysis_groups: List[str],
indicator_list: List[str],
analysis_groups: list[str],
indicator_list: list[str],
entry_only: bool,
exit_only: bool,
csv_path: Path,
@@ -307,7 +306,7 @@ def print_results(
def _merge_dfs(
entry_df: pd.DataFrame,
exit_df: pd.DataFrame,
available_inds: List[str],
available_inds: list[str],
entry_only: bool,
exit_only: bool,
):
@@ -438,7 +437,7 @@ def _generate_dfs(
pairlist: list,
enter_reason_list: list,
exit_reason_list: list,
signal_candles: Dict,
signal_candles: dict,
strategy_name: str,
timerange: TimeRange,
trades: pd.DataFrame,

View File

@@ -10,7 +10,7 @@ from abc import ABC, abstractmethod
from copy import deepcopy
from datetime import datetime, timezone
from pathlib import Path
from typing import List, Optional, Tuple, Type
from typing import Optional
from pandas import DataFrame, to_datetime
@@ -71,7 +71,7 @@ class IDataHandler(ABC):
]
@classmethod
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]:
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> list[str]:
"""
Returns a list of all pairs with ohlcv data available in this datadir
for the specified timeframe
@@ -107,7 +107,7 @@ class IDataHandler(ABC):
def ohlcv_data_min_max(
self, pair: str, timeframe: str, candle_type: CandleType
) -> Tuple[datetime, datetime, int]:
) -> tuple[datetime, datetime, int]:
"""
Returns the min and max timestamp for the given pair and timeframe.
:param pair: Pair to get min/max for
@@ -168,7 +168,7 @@ class IDataHandler(ABC):
"""
@classmethod
def trades_get_available_data(cls, datadir: Path, trading_mode: TradingMode) -> List[str]:
def trades_get_available_data(cls, datadir: Path, trading_mode: TradingMode) -> list[str]:
"""
Returns a list of all pairs with ohlcv data available in this datadir
:param datadir: Directory to search for ohlcv files
@@ -191,7 +191,7 @@ class IDataHandler(ABC):
self,
pair: str,
trading_mode: TradingMode,
) -> Tuple[datetime, datetime, int]:
) -> tuple[datetime, datetime, int]:
"""
Returns the min and max timestamp for the given pair's trades data.
:param pair: Pair to get min/max for
@@ -212,7 +212,7 @@ class IDataHandler(ABC):
)
@classmethod
def trades_get_pairs(cls, datadir: Path) -> List[str]:
def trades_get_pairs(cls, datadir: Path) -> list[str]:
"""
Returns a list of all pairs for which trade data is available in this
:param datadir: Directory to search for ohlcv files
@@ -532,7 +532,7 @@ class IDataHandler(ABC):
Path(old_name).rename(new_name)
def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
def get_datahandlerclass(datatype: str) -> type[IDataHandler]:
"""
Get datahandler class.
Could be done using Resolvers, but since this may be called often and resolvers

View File

@@ -2,7 +2,7 @@ import logging
import operator
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from typing import Optional
from pandas import DataFrame, concat
@@ -77,7 +77,7 @@ def load_pair_history(
def load_data(
datadir: Path,
timeframe: str,
pairs: List[str],
pairs: list[str],
*,
timerange: Optional[TimeRange] = None,
fill_up_missing: bool = True,
@@ -86,7 +86,7 @@ def load_data(
data_format: str = "feather",
candle_type: CandleType = CandleType.SPOT,
user_futures_funding_rate: Optional[int] = None,
) -> Dict[str, DataFrame]:
) -> dict[str, DataFrame]:
"""
Load ohlcv history data for a list of pairs.
@@ -101,7 +101,7 @@ def load_data(
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: dict(<pair>:<Dataframe>)
"""
result: Dict[str, DataFrame] = {}
result: dict[str, DataFrame] = {}
if startup_candles > 0 and timerange:
logger.info(f"Using indicator startup period: {startup_candles} ...")
@@ -135,7 +135,7 @@ def refresh_data(
*,
datadir: Path,
timeframe: str,
pairs: List[str],
pairs: list[str],
exchange: Exchange,
data_format: Optional[str] = None,
timerange: Optional[TimeRange] = None,
@@ -172,7 +172,7 @@ def _load_cached_data_for_updating(
data_handler: IDataHandler,
candle_type: CandleType,
prepend: bool = False,
) -> Tuple[DataFrame, Optional[int], Optional[int]]:
) -> tuple[DataFrame, Optional[int], Optional[int]]:
"""
Load cached data to download more data.
If timerange is passed in, checks whether data from an before the stored data will be
@@ -318,8 +318,8 @@ def _download_pair_history(
def refresh_backtest_ohlcv_data(
exchange: Exchange,
pairs: List[str],
timeframes: List[str],
pairs: list[str],
timeframes: list[str],
datadir: Path,
trading_mode: str,
timerange: Optional[TimeRange] = None,
@@ -327,7 +327,7 @@ def refresh_backtest_ohlcv_data(
erase: bool = False,
data_format: Optional[str] = None,
prepend: bool = False,
) -> List[str]:
) -> list[str]:
"""
Refresh stored ohlcv data for backtesting and hyperopt operations.
Used by freqtrade download-data subcommand.
@@ -489,14 +489,14 @@ def _download_trades_history(
def refresh_backtest_trades_data(
exchange: Exchange,
pairs: List[str],
pairs: list[str],
datadir: Path,
timerange: TimeRange,
trading_mode: TradingMode,
new_pairs_days: int = 30,
erase: bool = False,
data_format: str = "feather",
) -> List[str]:
) -> list[str]:
"""
Refresh stored trades data for backtesting and hyperopt operations.
Used by freqtrade download-data subcommand.
@@ -531,7 +531,7 @@ def refresh_backtest_trades_data(
return pairs_not_available
def get_timerange(data: Dict[str, DataFrame]) -> Tuple[datetime, datetime]:
def get_timerange(data: dict[str, DataFrame]) -> tuple[datetime, datetime]:
"""
Get the maximum common timerange for the given backtest data.
@@ -588,7 +588,7 @@ def download_data_main(config: Config) -> None:
# Remove stake-currency to skip checks which are not relevant for datadownload
config["stake_currency"] = ""
pairs_not_available: List[str] = []
pairs_not_available: list[str] = []
# Init exchange
from freqtrade.resolvers.exchange_resolver import ExchangeResolver

View File

@@ -2,7 +2,6 @@ import logging
import math
from dataclasses import dataclass
from datetime import datetime
from typing import Dict, Tuple
import numpy as np
import pandas as pd
@@ -11,7 +10,7 @@ import pandas as pd
logger = logging.getLogger(__name__)
def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close") -> float:
def calculate_market_change(data: dict[str, pd.DataFrame], column: str = "close") -> float:
"""
Calculate market change based on "column".
Calculation is done by taking the first non-null and the last non-null element of each column
@@ -32,7 +31,7 @@ def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close"
def combine_dataframes_by_column(
data: Dict[str, pd.DataFrame], column: str = "close"
data: dict[str, pd.DataFrame], column: str = "close"
) -> pd.DataFrame:
"""
Combine multiple dataframes "column"
@@ -50,7 +49,7 @@ def combine_dataframes_by_column(
def combined_dataframes_with_rel_mean(
data: Dict[str, pd.DataFrame], fromdt: datetime, todt: datetime, column: str = "close"
data: dict[str, pd.DataFrame], fromdt: datetime, todt: datetime, column: str = "close"
) -> pd.DataFrame:
"""
Combine multiple dataframes "column"
@@ -70,7 +69,7 @@ def combined_dataframes_with_rel_mean(
def combine_dataframes_with_mean(
data: Dict[str, pd.DataFrame], column: str = "close"
data: dict[str, pd.DataFrame], column: str = "close"
) -> pd.DataFrame:
"""
Combine multiple dataframes "column"
@@ -222,7 +221,7 @@ def calculate_max_drawdown(
)
def calculate_csum(trades: pd.DataFrame, starting_balance: float = 0) -> Tuple[float, float]:
def calculate_csum(trades: pd.DataFrame, starting_balance: float = 0) -> tuple[float, float]:
"""
Calculate min/max cumsum of trades, to show if the wallet/stake amount ratio is sane
:param trades: DataFrame containing trades (requires columns close_date and profit_percent)
@@ -255,7 +254,7 @@ def calculate_cagr(days_passed: int, starting_balance: float, final_balance: flo
return (final_balance / starting_balance) ** (1 / (days_passed / 365)) - 1
def calculate_expectancy(trades: pd.DataFrame) -> Tuple[float, float]:
def calculate_expectancy(trades: pd.DataFrame) -> tuple[float, float]:
"""
Calculate expectancy
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)

View File

@@ -5,7 +5,7 @@ import logging
from collections import defaultdict
from copy import deepcopy
from datetime import timedelta
from typing import Any, Dict, List, NamedTuple
from typing import Any, NamedTuple
import numpy as np
import utils_find_1st as utf1st
@@ -44,7 +44,7 @@ class Edge:
Author: https://github.com/mishaker
"""
_cached_pairs: Dict[str, Any] = {} # Keeps a list of pairs
_cached_pairs: dict[str, Any] = {} # Keeps a list of pairs
def __init__(self, config: Config, exchange, strategy) -> None:
self.config = config
@@ -52,7 +52,7 @@ class Edge:
self.strategy: IStrategy = strategy
self.edge_config = self.config.get("edge", {})
self._cached_pairs: Dict[str, Any] = {} # Keeps a list of pairs
self._cached_pairs: dict[str, Any] = {} # Keeps a list of pairs
self._final_pairs: list = []
# checking max_open_trades. it should be -1 as with Edge
@@ -93,7 +93,7 @@ class Edge:
except IndexError:
self.fee = None
def calculate(self, pairs: List[str]) -> bool:
def calculate(self, pairs: list[str]) -> bool:
if self.fee is None and pairs:
self.fee = self.exchange.get_fee(pairs[0])
@@ -104,7 +104,7 @@ class Edge:
):
return False
data: Dict[str, Any] = {}
data: dict[str, Any] = {}
logger.info("Using stake_currency: %s ...", self.config["stake_currency"])
logger.info("Using local backtesting data (using whitelist in given config) ...")
@@ -231,7 +231,7 @@ class Edge:
)
return self.strategy.stoploss
def adjust(self, pairs: List[str]) -> list:
def adjust(self, pairs: list[str]) -> list:
"""
Filters out and sorts "pairs" according to Edge calculated pairs
"""
@@ -260,7 +260,7 @@ class Edge:
return self._final_pairs
def accepted_pairs(self) -> List[Dict[str, Any]]:
def accepted_pairs(self) -> list[dict[str, Any]]:
"""
return a list of accepted pairs along with their winrate, expectancy and stoploss
"""
@@ -322,7 +322,7 @@ class Edge:
return result
def _process_expectancy(self, results: DataFrame) -> Dict[str, Any]:
def _process_expectancy(self, results: DataFrame) -> dict[str, Any]:
"""
This calculates WinRate, Required Risk Reward, Risk Reward and Expectancy of all pairs
The calculation will be done per pair and per strategy.

View File

@@ -3,7 +3,7 @@
import logging
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from typing import Optional
import ccxt
@@ -46,14 +46,14 @@ class Binance(Exchange):
"ws_enabled": False,
}
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
# TradingMode.SPOT always supported and not required in this list
# (TradingMode.MARGIN, MarginMode.CROSS),
# (TradingMode.FUTURES, MarginMode.CROSS),
(TradingMode.FUTURES, MarginMode.ISOLATED)
]
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
tickers = super().get_tickers(symbols=symbols, cached=cached)
if self.trading_mode == TradingMode.FUTURES:
# Binance's future result has no bid/ask values.
@@ -145,8 +145,8 @@ class Binance(Exchange):
return open_date.minute == 0 and open_date.second < 15
def fetch_funding_rates(
self, symbols: Optional[List[str]] = None
) -> Dict[str, Dict[str, float]]:
self, symbols: Optional[list[str]] = None
) -> dict[str, dict[str, float]]:
"""
Fetch funding rates for the given symbols.
:param symbols: List of symbols to fetch funding rates for
@@ -253,7 +253,7 @@ class Binance(Exchange):
"Freqtrade only supports isolated futures for leverage trading"
)
def load_leverage_tiers(self) -> Dict[str, List[Dict]]:
def load_leverage_tiers(self) -> dict[str, list[dict]]:
if self.trading_mode == TradingMode.FUTURES:
if self._config["dry_run"]:
leverage_tiers_path = Path(__file__).parent / "binance_leverage_tiers.json"

View File

@@ -2,7 +2,7 @@
import logging
from datetime import datetime, timezone
from typing import Dict, List, Optional
from typing import Optional
from freqtrade.exchange import Exchange
@@ -17,8 +17,8 @@ class Bitpanda(Exchange):
"""
def get_trades_for_order(
self, order_id: str, pair: str, since: datetime, params: Optional[Dict] = None
) -> List:
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
) -> list:
"""
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
The "since" argument passed in is coming from the database and is in UTC,

View File

@@ -2,7 +2,7 @@
import logging
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
import ccxt
@@ -53,14 +53,14 @@ class Bybit(Exchange):
},
}
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
# TradingMode.SPOT always supported and not required in this list
# (TradingMode.FUTURES, MarginMode.CROSS),
(TradingMode.FUTURES, MarginMode.ISOLATED)
]
@property
def _ccxt_config(self) -> Dict:
def _ccxt_config(self) -> dict:
# Parameters to add directly to ccxt sync/async initialization.
# ccxt defaults to swap mode.
config = {}
@@ -69,7 +69,7 @@ class Bybit(Exchange):
config.update(super()._ccxt_config)
return config
def market_is_future(self, market: Dict[str, Any]) -> bool:
def market_is_future(self, market: dict[str, Any]) -> bool:
main = super().market_is_future(market)
# For ByBit, we'll only support USDT markets for now.
return main and market["settle"] == "USDT"
@@ -126,7 +126,7 @@ class Bybit(Exchange):
leverage: float,
reduceOnly: bool,
time_in_force: str = "GTC",
) -> Dict:
) -> dict:
params = super()._get_params(
side=side,
ordertype=ordertype,
@@ -220,7 +220,7 @@ class Bybit(Exchange):
logger.warning(f"Could not update funding fees for {pair}.")
return 0.0
def fetch_orders(self, pair: str, since: datetime, params: Optional[Dict] = None) -> List[Dict]:
def fetch_orders(self, pair: str, since: datetime, params: Optional[dict] = None) -> list[dict]:
"""
Fetch all orders for a pair "since"
:param pair: Pair for the query
@@ -237,7 +237,7 @@ class Bybit(Exchange):
return orders
def fetch_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
def fetch_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
if self.exchange_has("fetchOrder"):
# Set acknowledged to True to avoid ccxt exception
params = {"acknowledged": True}
@@ -255,7 +255,7 @@ class Bybit(Exchange):
return order
@retrier
def get_leverage_tiers(self) -> Dict[str, List[Dict]]:
def get_leverage_tiers(self) -> dict[str, list[dict]]:
"""
Cache leverage tiers for 1 day, since they are not expected to change often, and
bybit requires pagination to fetch all tiers.

View File

@@ -2,7 +2,7 @@ import asyncio
import logging
import time
from functools import wraps
from typing import Any, Callable, Dict, List, Optional, TypeVar, cast, overload
from typing import Any, Callable, Optional, TypeVar, cast, overload
from freqtrade.constants import ExchangeConfig
from freqtrade.exceptions import DDosProtection, RetryableOrderError, TemporaryError
@@ -62,7 +62,7 @@ SUPPORTED_EXCHANGES = [
]
# either the main, or replacement methods (array) is required
EXCHANGE_HAS_REQUIRED: Dict[str, List[str]] = {
EXCHANGE_HAS_REQUIRED: dict[str, list[str]] = {
# Required / private
"fetchOrder": ["fetchOpenOrder", "fetchClosedOrder"],
"fetchL2OrderBook": ["fetchTicker"],

View File

@@ -7,11 +7,12 @@ import asyncio
import inspect
import logging
import signal
from collections.abc import Coroutine
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from math import floor, isnan
from threading import Lock
from typing import Any, Coroutine, Dict, List, Literal, Optional, Tuple, Union
from typing import Any, Literal, Optional, Union
import ccxt
import ccxt.pro as ccxt_pro
@@ -114,10 +115,10 @@ logger = logging.getLogger(__name__)
class Exchange:
# Parameters to add directly to buy/sell calls (like agreeing to trading agreement)
_params: Dict = {}
_params: dict = {}
# Additional parameters - added to the ccxt object
_ccxt_params: Dict = {}
_ccxt_params: dict = {}
# Dict to specify which options each exchange implements
# This defines defaults, which can be selectively overridden by subclasses using _ft_has
@@ -160,7 +161,7 @@ class Exchange:
_ft_has: FtHas = {}
_ft_has_futures: FtHas = {}
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
# TradingMode.SPOT always supported and not required in this list
]
@@ -181,9 +182,9 @@ class Exchange:
self._api_async: ccxt_pro.Exchange
self._ws_async: ccxt_pro.Exchange = None
self._exchange_ws: Optional[ExchangeWS] = None
self._markets: Dict = {}
self._trading_fees: Dict[str, Any] = {}
self._leverage_tiers: Dict[str, List[Dict]] = {}
self._markets: dict = {}
self._trading_fees: dict[str, Any] = {}
self._leverage_tiers: dict[str, list[dict]] = {}
# Lock event loop. This is necessary to avoid race-conditions when using force* commands
# Due to funding fee fetching.
self._loop_lock = Lock()
@@ -193,7 +194,7 @@ class Exchange:
self._config.update(config)
# Holds last candle refreshed time of each pair
self._pairs_last_refresh_time: Dict[PairWithTimeframe, int] = {}
self._pairs_last_refresh_time: dict[PairWithTimeframe, int] = {}
# Timestamp of last markets refresh
self._last_markets_refresh: int = 0
@@ -208,19 +209,19 @@ class Exchange:
self._entry_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=300)
# Holds candles
self._klines: Dict[PairWithTimeframe, DataFrame] = {}
self._expiring_candle_cache: Dict[Tuple[str, int], PeriodicCache] = {}
self._klines: dict[PairWithTimeframe, DataFrame] = {}
self._expiring_candle_cache: dict[tuple[str, int], PeriodicCache] = {}
# Holds public_trades
self._trades: Dict[PairWithTimeframe, DataFrame] = {}
self._trades: dict[PairWithTimeframe, DataFrame] = {}
# Holds all open sell orders for dry_run
self._dry_run_open_orders: Dict[str, Any] = {}
self._dry_run_open_orders: dict[str, Any] = {}
if config["dry_run"]:
logger.info("Instance is running with dry_run enabled")
logger.info(f"Using CCXT {ccxt.__version__}")
exchange_conf: Dict[str, Any] = exchange_config if exchange_config else config["exchange"]
exchange_conf: dict[str, Any] = exchange_config if exchange_config else config["exchange"]
remove_exchange_credentials(exchange_conf, config.get("dry_run", False))
self.log_responses = exchange_conf.get("log_responses", False)
@@ -339,7 +340,7 @@ class Exchange:
self.validate_freqai(config)
def _init_ccxt(
self, exchange_config: Dict[str, Any], sync: bool, ccxt_kwargs: Dict[str, Any]
self, exchange_config: dict[str, Any], sync: bool, ccxt_kwargs: dict[str, Any]
) -> ccxt.Exchange:
"""
Initialize ccxt with given config and return valid ccxt instance.
@@ -390,7 +391,7 @@ class Exchange:
return api
@property
def _ccxt_config(self) -> Dict:
def _ccxt_config(self) -> dict:
# Parameters to add directly to ccxt sync/async initialization.
if self.trading_mode == TradingMode.MARGIN:
return {"options": {"defaultType": "margin"}}
@@ -410,11 +411,11 @@ class Exchange:
return self._api.id
@property
def timeframes(self) -> List[str]:
def timeframes(self) -> list[str]:
return list((self._api.timeframes or {}).keys())
@property
def markets(self) -> Dict[str, Any]:
def markets(self) -> dict[str, Any]:
"""exchange ccxt markets"""
if not self._markets:
logger.info("Markets were not loaded. Loading them now..")
@@ -471,14 +472,14 @@ class Exchange:
def get_markets(
self,
base_currencies: Optional[List[str]] = None,
quote_currencies: Optional[List[str]] = None,
base_currencies: Optional[list[str]] = None,
quote_currencies: Optional[list[str]] = None,
spot_only: bool = False,
margin_only: bool = False,
futures_only: bool = False,
tradable_only: bool = True,
active_only: bool = False,
) -> Dict[str, Any]:
) -> dict[str, Any]:
"""
Return exchange ccxt markets, filtered out by base currency and quote currency
if this was requested in parameters.
@@ -503,7 +504,7 @@ class Exchange:
markets = {k: v for k, v in markets.items() if market_is_active(v)}
return markets
def get_quote_currencies(self) -> List[str]:
def get_quote_currencies(self) -> list[str]:
"""
Return a list of supported quote currencies
"""
@@ -518,19 +519,19 @@ class Exchange:
"""Return a pair's base currency (base/quote:settlement)"""
return self.markets.get(pair, {}).get("base", "")
def market_is_future(self, market: Dict[str, Any]) -> bool:
def market_is_future(self, market: dict[str, Any]) -> bool:
return (
market.get(self._ft_has["ccxt_futures_name"], False) is True
and market.get("linear", False) is True
)
def market_is_spot(self, market: Dict[str, Any]) -> bool:
def market_is_spot(self, market: dict[str, Any]) -> bool:
return market.get("spot", False) is True
def market_is_margin(self, market: Dict[str, Any]) -> bool:
def market_is_margin(self, market: dict[str, Any]) -> bool:
return market.get("margin", False) is True
def market_is_tradable(self, market: Dict[str, Any]) -> bool:
def market_is_tradable(self, market: dict[str, Any]) -> bool:
"""
Check if the market symbol is tradable by Freqtrade.
Ensures that Configured mode aligns to
@@ -578,7 +579,7 @@ class Exchange:
else:
return 1
def _trades_contracts_to_amount(self, trades: List) -> List:
def _trades_contracts_to_amount(self, trades: list) -> list:
if len(trades) > 0 and "symbol" in trades[0]:
contract_size = self.get_contract_size(trades[0]["symbol"])
if contract_size != 1:
@@ -586,7 +587,7 @@ class Exchange:
trade["amount"] = trade["amount"] * contract_size
return trades
def _order_contracts_to_amount(self, order: Dict) -> Dict:
def _order_contracts_to_amount(self, order: dict) -> dict:
if "symbol" in order and order["symbol"] is not None:
contract_size = self.get_contract_size(order["symbol"])
if contract_size != 1:
@@ -620,7 +621,7 @@ class Exchange:
if self._exchange_ws:
self._exchange_ws.reset_connections()
async def _api_reload_markets(self, reload: bool = False) -> Dict[str, Any]:
async def _api_reload_markets(self, reload: bool = False) -> dict[str, Any]:
try:
return await self._api_async.load_markets(reload=reload, params={})
except ccxt.DDoSProtection as e:
@@ -632,7 +633,7 @@ class Exchange:
except ccxt.BaseError as e:
raise TemporaryError(e) from e
def _load_async_markets(self, reload: bool = False) -> Dict[str, Any]:
def _load_async_markets(self, reload: bool = False) -> dict[str, Any]:
try:
markets = self.loop.run_until_complete(self._api_reload_markets(reload=reload))
@@ -734,7 +735,7 @@ class Exchange:
):
raise ConfigurationError("Timeframes < 1m are currently not supported by Freqtrade.")
def validate_ordertypes(self, order_types: Dict) -> None:
def validate_ordertypes(self, order_types: dict) -> None:
"""
Checks if order-types configured in strategy/config are supported
"""
@@ -743,7 +744,7 @@ class Exchange:
raise ConfigurationError(f"Exchange {self.name} does not support market orders.")
self.validate_stop_ordertypes(order_types)
def validate_stop_ordertypes(self, order_types: Dict) -> None:
def validate_stop_ordertypes(self, order_types: dict) -> None:
"""
Validate stoploss order types
"""
@@ -762,7 +763,7 @@ class Exchange:
f"On exchange stoploss price type is not supported for {self.name}."
)
def validate_pricing(self, pricing: Dict) -> None:
def validate_pricing(self, pricing: dict) -> None:
if pricing.get("use_order_book", False) and not self.exchange_has("fetchL2OrderBook"):
raise ConfigurationError(f"Orderbook not available for {self.name}.")
if not pricing.get("use_order_book", False) and (
@@ -770,7 +771,7 @@ class Exchange:
):
raise ConfigurationError(f"Ticker pricing not available for {self.name}.")
def validate_order_time_in_force(self, order_time_in_force: Dict) -> None:
def validate_order_time_in_force(self, order_time_in_force: dict) -> None:
"""
Checks if order time in force configured in strategy/config are supported
"""
@@ -782,7 +783,7 @@ class Exchange:
f"Time in force policies are not supported for {self.name} yet."
)
def validate_orderflow(self, exchange: Dict) -> None:
def validate_orderflow(self, exchange: dict) -> None:
if exchange.get("use_public_trades", False) and (
not self.exchange_has("fetchTrades") or not self._ft_has["trades_has_history"]
):
@@ -1000,16 +1001,16 @@ class Exchange:
amount: float,
rate: float,
leverage: float,
params: Optional[Dict] = None,
params: Optional[dict] = None,
stop_loss: bool = False,
) -> Dict[str, Any]:
) -> dict[str, Any]:
now = dt_now()
order_id = f"dry_run_{side}_{pair}_{now.timestamp()}"
# Rounding here must respect to contract sizes
_amount = self._contracts_to_amount(
pair, self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
)
dry_order: Dict[str, Any] = {
dry_order: dict[str, Any] = {
"id": order_id,
"symbol": pair,
"price": rate,
@@ -1071,9 +1072,9 @@ class Exchange:
def add_dry_order_fee(
self,
pair: str,
dry_order: Dict[str, Any],
dry_order: dict[str, Any],
taker_or_maker: MakerTaker,
) -> Dict[str, Any]:
) -> dict[str, Any]:
fee = self.get_fee(pair, taker_or_maker=taker_or_maker)
dry_order.update(
{
@@ -1157,8 +1158,8 @@ class Exchange:
return False
def check_dry_limit_order_filled(
self, order: Dict[str, Any], immediate: bool = False, orderbook: Optional[OrderBook] = None
) -> Dict[str, Any]:
self, order: dict[str, Any], immediate: bool = False, orderbook: Optional[OrderBook] = None
) -> dict[str, Any]:
"""
Check dry-run limit order fill and update fee (if it filled).
"""
@@ -1185,7 +1186,7 @@ class Exchange:
return order
def fetch_dry_run_order(self, order_id) -> Dict[str, Any]:
def fetch_dry_run_order(self, order_id) -> dict[str, Any]:
"""
Return dry-run order
Only call if running in dry-run mode.
@@ -1221,7 +1222,7 @@ class Exchange:
leverage: float,
reduceOnly: bool,
time_in_force: str = "GTC",
) -> Dict:
) -> dict:
params = self._params.copy()
if time_in_force != "GTC" and ordertype != "market":
params.update({"timeInForce": time_in_force.upper()})
@@ -1247,7 +1248,7 @@ class Exchange:
leverage: float,
reduceOnly: bool = False,
time_in_force: str = "GTC",
) -> Dict:
) -> dict:
if self._config["dry_run"]:
dry_order = self.create_dry_run_order(
pair, ordertype, side, amount, self.price_to_precision(pair, rate), leverage
@@ -1305,7 +1306,7 @@ class Exchange:
except ccxt.BaseError as e:
raise OperationalException(e) from e
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
def stoploss_adjust(self, stop_loss: float, order: dict, side: str) -> bool:
"""
Verify stop_loss against stoploss-order value (limit or price)
Returns True if adjustment is necessary.
@@ -1318,8 +1319,8 @@ class Exchange:
or (side == "buy" and stop_loss < float(order[price_param]))
)
def _get_stop_order_type(self, user_order_type) -> Tuple[str, str]:
available_order_Types: Dict[str, str] = self._ft_has["stoploss_order_types"]
def _get_stop_order_type(self, user_order_type) -> tuple[str, str]:
available_order_Types: dict[str, str] = self._ft_has["stoploss_order_types"]
if user_order_type in available_order_Types.keys():
ordertype = available_order_Types[user_order_type]
@@ -1329,7 +1330,7 @@ class Exchange:
user_order_type = list(available_order_Types.keys())[0]
return ordertype, user_order_type
def _get_stop_limit_rate(self, stop_price: float, order_types: Dict, side: str) -> float:
def _get_stop_limit_rate(self, stop_price: float, order_types: dict, side: str) -> float:
# Limit price threshold: As limit price should always be below stop-price
limit_price_pct = order_types.get("stoploss_on_exchange_limit_ratio", 0.99)
if side == "sell":
@@ -1351,7 +1352,7 @@ class Exchange:
)
return limit_rate
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
params = self._params.copy()
# Verify if stopPrice works for your exchange, else configure stop_price_param
params.update({self._ft_has["stop_price_param"]: stop_price})
@@ -1363,10 +1364,10 @@ class Exchange:
pair: str,
amount: float,
stop_price: float,
order_types: Dict,
order_types: dict,
side: BuySell,
leverage: float,
) -> Dict:
) -> dict:
"""
creates a stoploss order.
requires `_ft_has['stoploss_order_types']` to be set as a dict mapping limit and market
@@ -1459,7 +1460,7 @@ class Exchange:
except ccxt.BaseError as e:
raise OperationalException(e) from e
def fetch_order_emulated(self, order_id: str, pair: str, params: Dict) -> Dict:
def fetch_order_emulated(self, order_id: str, pair: str, params: dict) -> dict:
"""
Emulated fetch_order if the exchange doesn't support fetch_order, but requires separate
calls for open and closed orders.
@@ -1493,7 +1494,7 @@ class Exchange:
raise OperationalException(e) from e
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
def fetch_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
def fetch_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
if self._config["dry_run"]:
return self.fetch_dry_run_order(order_id)
if params is None:
@@ -1522,12 +1523,12 @@ class Exchange:
except ccxt.BaseError as e:
raise OperationalException(e) from e
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
return self.fetch_order(order_id, pair, params)
def fetch_order_or_stoploss_order(
self, order_id: str, pair: str, stoploss_order: bool = False
) -> Dict:
) -> dict:
"""
Simple wrapper calling either fetch_order or fetch_stoploss_order depending on
the stoploss_order parameter
@@ -1539,7 +1540,7 @@ class Exchange:
return self.fetch_stoploss_order(order_id, pair)
return self.fetch_order(order_id, pair)
def check_order_canceled_empty(self, order: Dict) -> bool:
def check_order_canceled_empty(self, order: dict) -> bool:
"""
Verify if an order has been cancelled without being partially filled
:param order: Order dict as returned from fetch_order()
@@ -1548,7 +1549,7 @@ class Exchange:
return order.get("status") in NON_OPEN_EXCHANGE_STATES and order.get("filled") == 0.0
@retrier
def cancel_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
def cancel_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
if self._config["dry_run"]:
try:
order = self.fetch_dry_run_order(order_id)
@@ -1577,8 +1578,8 @@ class Exchange:
raise OperationalException(e) from e
def cancel_stoploss_order(
self, order_id: str, pair: str, params: Optional[Dict] = None
) -> Dict:
self, order_id: str, pair: str, params: Optional[dict] = None
) -> dict:
return self.cancel_order(order_id, pair, params)
def is_cancel_order_result_suitable(self, corder) -> bool:
@@ -1588,7 +1589,7 @@ class Exchange:
required = ("fee", "status", "amount")
return all(corder.get(k, None) is not None for k in required)
def cancel_order_with_result(self, order_id: str, pair: str, amount: float) -> Dict:
def cancel_order_with_result(self, order_id: str, pair: str, amount: float) -> dict:
"""
Cancel order returning a result.
Creates a fake result if cancel order returns a non-usable result
@@ -1619,7 +1620,7 @@ class Exchange:
return order
def cancel_stoploss_order_with_result(self, order_id: str, pair: str, amount: float) -> Dict:
def cancel_stoploss_order_with_result(self, order_id: str, pair: str, amount: float) -> dict:
"""
Cancel stoploss order returning a result.
Creates a fake result if cancel order returns a non-usable result
@@ -1661,7 +1662,7 @@ class Exchange:
raise OperationalException(e) from e
@retrier
def fetch_positions(self, pair: Optional[str] = None) -> List[CcxtPosition]:
def fetch_positions(self, pair: Optional[str] = None) -> list[CcxtPosition]:
"""
Fetch positions from the exchange.
If no pair is given, all positions are returned.
@@ -1673,7 +1674,7 @@ class Exchange:
symbols = []
if pair:
symbols.append(pair)
positions: List[CcxtPosition] = self._api.fetch_positions(symbols)
positions: list[CcxtPosition] = self._api.fetch_positions(symbols)
self._log_exchange_response("fetch_positions", positions)
return positions
except ccxt.DDoSProtection as e:
@@ -1685,7 +1686,7 @@ class Exchange:
except ccxt.BaseError as e:
raise OperationalException(e) from e
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> List[Dict]:
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> list[dict]:
orders = []
if self.exchange_has("fetchClosedOrders"):
orders = self._api.fetch_closed_orders(pair, since=since_ms)
@@ -1695,7 +1696,7 @@ class Exchange:
return orders
@retrier(retries=0)
def fetch_orders(self, pair: str, since: datetime, params: Optional[Dict] = None) -> List[Dict]:
def fetch_orders(self, pair: str, since: datetime, params: Optional[dict] = None) -> list[dict]:
"""
Fetch all orders for a pair "since"
:param pair: Pair for the query
@@ -1711,7 +1712,7 @@ class Exchange:
if not params:
params = {}
try:
orders: List[Dict] = self._api.fetch_orders(pair, since=since_ms, params=params)
orders: list[dict] = self._api.fetch_orders(pair, since=since_ms, params=params)
except ccxt.NotSupported:
# Some exchanges don't support fetchOrders
# attempt to fetch open and closed orders separately
@@ -1731,7 +1732,7 @@ class Exchange:
raise OperationalException(e) from e
@retrier
def fetch_trading_fees(self) -> Dict[str, Any]:
def fetch_trading_fees(self) -> dict[str, Any]:
"""
Fetch user account trading fees
Can be cached, should not update often.
@@ -1743,7 +1744,7 @@ class Exchange:
):
return {}
try:
trading_fees: Dict[str, Any] = self._api.fetch_trading_fees()
trading_fees: dict[str, Any] = self._api.fetch_trading_fees()
self._log_exchange_response("fetch_trading_fees", trading_fees)
return trading_fees
except ccxt.DDoSProtection as e:
@@ -1756,7 +1757,7 @@ class Exchange:
raise OperationalException(e) from e
@retrier
def fetch_bids_asks(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
def fetch_bids_asks(self, symbols: Optional[list[str]] = None, cached: bool = False) -> dict:
"""
:param symbols: List of symbols to fetch
:param cached: Allow cached result
@@ -1789,7 +1790,7 @@ class Exchange:
raise OperationalException(e) from e
@retrier
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
"""
:param cached: Allow cached result
:return: fetch_tickers result
@@ -1849,7 +1850,7 @@ class Exchange:
@staticmethod
def get_next_limit_in_list(
limit: int, limit_range: Optional[List[int]], range_required: bool = True
limit: int, limit_range: Optional[list[int]], range_required: bool = True
):
"""
Get next greater value in the list.
@@ -1890,7 +1891,7 @@ class Exchange:
except ccxt.BaseError as e:
raise OperationalException(e) from e
def _get_price_side(self, side: str, is_short: bool, conf_strategy: Dict) -> BidAsk:
def _get_price_side(self, side: str, is_short: bool, conf_strategy: dict) -> BidAsk:
price_side = conf_strategy["price_side"]
if price_side in ("same", "other"):
@@ -1962,7 +1963,7 @@ class Exchange:
return rate
def _get_rate_from_ticker(
self, side: EntryExit, ticker: Ticker, conf_strategy: Dict[str, Any], price_side: BidAsk
self, side: EntryExit, ticker: Ticker, conf_strategy: dict[str, Any], price_side: BidAsk
) -> Optional[float]:
"""
Get rate from ticker.
@@ -2008,7 +2009,7 @@ class Exchange:
)
return rate
def get_rates(self, pair: str, refresh: bool, is_short: bool) -> Tuple[float, float]:
def get_rates(self, pair: str, refresh: bool, is_short: bool) -> tuple[float, float]:
entry_rate = None
exit_rate = None
if not refresh:
@@ -2042,8 +2043,8 @@ class Exchange:
@retrier
def get_trades_for_order(
self, order_id: str, pair: str, since: datetime, params: Optional[Dict] = None
) -> List:
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
) -> list:
"""
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
The "since" argument passed in is coming from the database and is in UTC,
@@ -2089,7 +2090,7 @@ class Exchange:
except ccxt.BaseError as e:
raise OperationalException(e) from e
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
def get_order_id_conditional(self, order: dict[str, Any]) -> str:
return order["id"]
@retrier
@@ -2138,7 +2139,7 @@ class Exchange:
raise OperationalException(e) from e
@staticmethod
def order_has_fee(order: Dict) -> bool:
def order_has_fee(order: dict) -> bool:
"""
Verifies if the passed in order dict has the needed keys to extract fees,
and that these keys (currency, cost) are not empty.
@@ -2156,7 +2157,7 @@ class Exchange:
)
def calculate_fee_rate(
self, fee: Dict, symbol: str, cost: float, amount: float
self, fee: dict, symbol: str, cost: float, amount: float
) -> Optional[float]:
"""
Calculate fee rate if it's not given by the exchange.
@@ -2196,8 +2197,8 @@ class Exchange:
return round((fee_cost * fee_to_quote_rate) / cost, 8)
def extract_cost_curr_rate(
self, fee: Dict, symbol: str, cost: float, amount: float
) -> Tuple[float, str, Optional[float]]:
self, fee: dict, symbol: str, cost: float, amount: float
) -> tuple[float, str, Optional[float]]:
"""
Extract tuple of cost, currency, rate.
Requires order_has_fee to run first!
@@ -2277,7 +2278,7 @@ class Exchange:
for since in range(since_ms, until_ms or dt_ts(), one_call)
]
data: List = []
data: list = []
# Chunk requests into batches of 100 to avoid overwhelming ccxt Throttling
for input_coro in chunks(input_coroutines, 100):
results = await asyncio.gather(*input_coro, return_exceptions=True)
@@ -2371,11 +2372,11 @@ class Exchange:
def _build_ohlcv_dl_jobs(
self, pair_list: ListPairsWithTimeframes, since_ms: Optional[int], cache: bool
) -> Tuple[List[Coroutine], List[PairWithTimeframe]]:
) -> tuple[list[Coroutine], list[PairWithTimeframe]]:
"""
Build Coroutines to execute as part of refresh_latest_ohlcv
"""
input_coroutines: List[Coroutine[Any, Any, OHLCVResponse]] = []
input_coroutines: list[Coroutine[Any, Any, OHLCVResponse]] = []
cached_pairs = []
for pair, timeframe, candle_type in set(pair_list):
if timeframe not in self.timeframes and candle_type in (
@@ -2411,7 +2412,7 @@ class Exchange:
pair: str,
timeframe: str,
c_type: CandleType,
ticks: List[List],
ticks: list[list],
cache: bool,
drop_incomplete: bool,
) -> DataFrame:
@@ -2450,7 +2451,7 @@ class Exchange:
since_ms: Optional[int] = None,
cache: bool = True,
drop_incomplete: Optional[bool] = None,
) -> Dict[PairWithTimeframe, DataFrame]:
) -> dict[PairWithTimeframe, DataFrame]:
"""
Refresh in-memory OHLCV asynchronously and set `_klines` with the result
Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
@@ -2499,8 +2500,8 @@ class Exchange:
return results_df
def refresh_ohlcv_with_cache(
self, pairs: List[PairWithTimeframe], since_ms: int
) -> Dict[PairWithTimeframe, DataFrame]:
self, pairs: list[PairWithTimeframe], since_ms: int
) -> dict[PairWithTimeframe, DataFrame]:
"""
Refresh ohlcv data for all pairs in needed_pairs if necessary.
Caches data with expiring per timeframe.
@@ -2618,7 +2619,7 @@ class Exchange:
timeframe: str,
limit: int,
since_ms: Optional[int] = None,
) -> List[List]:
) -> list[list]:
"""
Fetch funding rate history - used to selectively override this by subclasses.
"""
@@ -2652,7 +2653,7 @@ class Exchange:
pair: str,
timeframe: str,
c_type: CandleType,
ticks: List[List],
ticks: list[list],
cache: bool,
first_required_candle_date: int,
) -> DataFrame:
@@ -2676,13 +2677,13 @@ class Exchange:
async def _build_trades_dl_jobs(
self, pairwt: PairWithTimeframe, data_handler, cache: bool
) -> Tuple[PairWithTimeframe, Optional[DataFrame]]:
) -> tuple[PairWithTimeframe, Optional[DataFrame]]:
"""
Build coroutines to refresh trades for (they're then called through async.gather)
"""
pair, timeframe, candle_type = pairwt
since_ms = None
new_ticks: List = []
new_ticks: list = []
all_stored_ticks_df = DataFrame(columns=DEFAULT_TRADES_COLUMNS + ["date"])
first_candle_ms = self.needed_candle_for_trades_ms(timeframe, candle_type)
# refresh, if
@@ -2767,7 +2768,7 @@ class Exchange:
pair_list: ListPairsWithTimeframes,
*,
cache: bool = True,
) -> Dict[PairWithTimeframe, DataFrame]:
) -> dict[PairWithTimeframe, DataFrame]:
"""
Refresh in-memory TRADES asynchronously and set `_trades` with the result
Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
@@ -2821,7 +2822,7 @@ class Exchange:
@retrier_async
async def _async_fetch_trades(
self, pair: str, since: Optional[int] = None, params: Optional[dict] = None
) -> Tuple[List[List], Any]:
) -> tuple[list[list], Any]:
"""
Asynchronously gets trade history using fetch_trades.
Handles exchange errors, does one call to the exchange.
@@ -2867,7 +2868,7 @@ class Exchange:
"""
return True
def _get_trade_pagination_next_value(self, trades: List[Dict]):
def _get_trade_pagination_next_value(self, trades: list[dict]):
"""
Extract pagination id for the next "from_id" value
Applies only to fetch_trade_history by id.
@@ -2881,7 +2882,7 @@ class Exchange:
async def _async_get_trade_history_id(
self, pair: str, until: int, since: Optional[int] = None, from_id: Optional[str] = None
) -> Tuple[str, List[List]]:
) -> tuple[str, list[list]]:
"""
Asynchronously gets trade history using fetch_trades
use this when exchange uses id-based iteration (check `self._trades_pagination`)
@@ -2892,7 +2893,7 @@ class Exchange:
returns tuple: (pair, trades-list)
"""
trades: List[List] = []
trades: list[list] = []
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
# DEFAULT_TRADES_COLUMNS: 1 -> id
has_overlap = self._ft_has.get("trades_pagination_overlap", True)
@@ -2936,7 +2937,7 @@ class Exchange:
async def _async_get_trade_history_time(
self, pair: str, until: int, since: Optional[int] = None
) -> Tuple[str, List[List]]:
) -> tuple[str, list[list]]:
"""
Asynchronously gets trade history using fetch_trades,
when the exchange uses time-based iteration (check `self._trades_pagination`)
@@ -2946,7 +2947,7 @@ class Exchange:
returns tuple: (pair, trades-list)
"""
trades: List[List] = []
trades: list[list] = []
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
# DEFAULT_TRADES_COLUMNS: 1 -> id
while True:
@@ -2979,7 +2980,7 @@ class Exchange:
since: Optional[int] = None,
until: Optional[int] = None,
from_id: Optional[str] = None,
) -> Tuple[str, List[List]]:
) -> tuple[str, list[list]]:
"""
Async wrapper handling downloading trades using either time or id based methods.
"""
@@ -3010,7 +3011,7 @@ class Exchange:
since: Optional[int] = None,
until: Optional[int] = None,
from_id: Optional[str] = None,
) -> Tuple[str, List]:
) -> tuple[str, list]:
"""
Get trade history data using asyncio.
Handles all async work and returns the list of candles.
@@ -3070,7 +3071,7 @@ class Exchange:
raise OperationalException(e) from e
@retrier
def get_leverage_tiers(self) -> Dict[str, List[Dict]]:
def get_leverage_tiers(self) -> dict[str, list[dict]]:
try:
return self._api.fetch_leverage_tiers()
except ccxt.DDoSProtection as e:
@@ -3083,7 +3084,7 @@ class Exchange:
raise OperationalException(e) from e
@retrier_async
async def get_market_leverage_tiers(self, symbol: str) -> Tuple[str, List[Dict]]:
async def get_market_leverage_tiers(self, symbol: str) -> tuple[str, list[dict]]:
"""Leverage tiers per symbol"""
try:
tier = await self._api_async.fetch_market_leverage_tiers(symbol)
@@ -3098,7 +3099,7 @@ class Exchange:
except ccxt.BaseError as e:
raise OperationalException(e) from e
def load_leverage_tiers(self) -> Dict[str, List[Dict]]:
def load_leverage_tiers(self) -> dict[str, list[dict]]:
if self.trading_mode == TradingMode.FUTURES:
if self.exchange_has("fetchLeverageTiers"):
# Fetch all leverage tiers at once
@@ -3117,7 +3118,7 @@ class Exchange:
)
]
tiers: Dict[str, List[Dict]] = {}
tiers: dict[str, list[dict]] = {}
tiers_cached = self.load_cached_leverage_tiers(self._config["stake_currency"])
if tiers_cached:
@@ -3158,7 +3159,7 @@ class Exchange:
return tiers
return {}
def cache_leverage_tiers(self, tiers: Dict[str, List[Dict]], stake_currency: str) -> None:
def cache_leverage_tiers(self, tiers: dict[str, list[dict]], stake_currency: str) -> None:
filename = self._config["datadir"] / "futures" / f"leverage_tiers_{stake_currency}.json"
if not filename.parent.is_dir():
filename.parent.mkdir(parents=True)
@@ -3170,7 +3171,7 @@ class Exchange:
def load_cached_leverage_tiers(
self, stake_currency: str, cache_time: Optional[timedelta] = None
) -> Optional[Dict[str, List[Dict]]]:
) -> Optional[dict[str, list[dict]]]:
"""
Load cached leverage tiers from disk
:param cache_time: The maximum age of the cache before it is considered outdated
@@ -3205,7 +3206,7 @@ class Exchange:
pair_tiers.append(self.parse_leverage_tier(tier))
self._leverage_tiers[pair] = pair_tiers
def parse_leverage_tier(self, tier) -> Dict:
def parse_leverage_tier(self, tier) -> dict:
info = tier.get("info", {})
return {
"minNotional": tier["minNotional"],
@@ -3345,7 +3346,7 @@ class Exchange:
pair: str,
margin_mode: MarginMode,
accept_fail: bool = False,
params: Optional[Dict] = None,
params: Optional[dict] = None,
):
"""
Set's the margin mode on the exchange to cross or isolated for a specific pair
@@ -3632,7 +3633,7 @@ class Exchange:
self,
pair: str,
notional_value: float,
) -> Tuple[float, Optional[float]]:
) -> tuple[float, Optional[float]]:
"""
Important: Must be fetching data from cached values as this is used by backtesting!
:param pair: Market symbol

View File

@@ -1,11 +1,11 @@
from typing import Dict, List, Optional, Tuple, TypedDict
from typing import Optional, TypedDict
from freqtrade.enums import CandleType
class FtHas(TypedDict, total=False):
order_time_in_force: List[str]
exchange_has_overrides: Dict[str, bool]
order_time_in_force: list[str]
exchange_has_overrides: dict[str, bool]
marketOrderRequiresPrice: bool
# Stoploss on exchange
@@ -13,16 +13,16 @@ class FtHas(TypedDict, total=False):
stop_price_param: str
stop_price_prop: str
stop_price_type_field: str
stop_price_type_value_mapping: Dict
stoploss_order_types: Dict[str, str]
stop_price_type_value_mapping: dict
stoploss_order_types: dict[str, str]
# ohlcv
ohlcv_params: Dict
ohlcv_params: dict
ohlcv_candle_limit: int
ohlcv_has_history: bool
ohlcv_partial_candle: bool
ohlcv_require_since: bool
ohlcv_volume_currency: str
ohlcv_candle_limit_per_timeframe: Dict[str, int]
ohlcv_candle_limit_per_timeframe: dict[str, int]
# Tickers
tickers_have_quoteVolume: bool
tickers_have_percentage: bool
@@ -35,7 +35,7 @@ class FtHas(TypedDict, total=False):
trades_has_history: bool
trades_pagination_overlap: bool
# Orderbook
l2_limit_range: Optional[List[int]]
l2_limit_range: Optional[list[int]]
l2_limit_range_required: bool
# Futures
ccxt_futures_name: str # usually swap
@@ -44,7 +44,7 @@ class FtHas(TypedDict, total=False):
funding_fee_timeframe: str
floor_leverage: bool
needs_trading_fees: bool
order_props_in_contracts: List[str]
order_props_in_contracts: list[str]
# Websocket control
ws_enabled: bool
@@ -63,13 +63,13 @@ class Ticker(TypedDict):
# Several more - only listing required.
Tickers = Dict[str, Ticker]
Tickers = dict[str, Ticker]
class OrderBook(TypedDict):
symbol: str
bids: List[Tuple[float, float]]
asks: List[Tuple[float, float]]
bids: list[tuple[float, float]]
asks: list[tuple[float, float]]
timestamp: Optional[int]
datetime: Optional[str]
nonce: Optional[int]
@@ -81,7 +81,7 @@ class CcxtBalance(TypedDict):
total: float
CcxtBalances = Dict[str, CcxtBalance]
CcxtBalances = dict[str, CcxtBalance]
class CcxtPosition(TypedDict):
@@ -95,4 +95,4 @@ class CcxtPosition(TypedDict):
# pair, timeframe, candleType, OHLCV, drop last?,
OHLCVResponse = Tuple[str, str, CandleType, List, bool]
OHLCVResponse = tuple[str, str, CandleType, list, bool]

View File

@@ -5,7 +5,7 @@ Exchange support utils
import inspect
from datetime import datetime, timedelta, timezone
from math import ceil, floor
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
import ccxt
from ccxt import (
@@ -39,14 +39,14 @@ def is_exchange_known_ccxt(
return exchange_name in ccxt_exchanges(ccxt_module)
def ccxt_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> List[str]:
def ccxt_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> list[str]:
"""
Return the list of all exchanges known to ccxt
"""
return ccxt_module.exchanges if ccxt_module is not None else ccxt.exchanges
def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> List[str]:
def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> list[str]:
"""
Return exchanges available to the bot, i.e. non-bad exchanges in the ccxt list
"""
@@ -54,7 +54,7 @@ def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> List[st
return [x for x in exchanges if validate_exchange(x)[0]]
def validate_exchange(exchange: str) -> Tuple[bool, str, Optional[ccxt.Exchange]]:
def validate_exchange(exchange: str) -> tuple[bool, str, Optional[ccxt.Exchange]]:
"""
returns: can_use, reason, exchange_object
with Reason including both missing and missing_opt
@@ -91,7 +91,7 @@ def validate_exchange(exchange: str) -> Tuple[bool, str, Optional[ccxt.Exchange]
def _build_exchange_list_entry(
exchange_name: str, exchangeClasses: Dict[str, Any]
exchange_name: str, exchangeClasses: dict[str, Any]
) -> ValidExchangesType:
valid, comment, ex_mod = validate_exchange(exchange_name)
result: ValidExchangesType = {
@@ -121,7 +121,7 @@ def _build_exchange_list_entry(
return result
def list_available_exchanges(all_exchanges: bool) -> List[ValidExchangesType]:
def list_available_exchanges(all_exchanges: bool) -> list[ValidExchangesType]:
"""
:return: List of tuples with exchangename, valid, reason.
"""
@@ -130,7 +130,7 @@ def list_available_exchanges(all_exchanges: bool) -> List[ValidExchangesType]:
subclassed = {e["name"].lower(): e for e in ExchangeResolver.search_all_objects({}, False)}
exchanges_valid: List[ValidExchangesType] = [
exchanges_valid: list[ValidExchangesType] = [
_build_exchange_list_entry(e, subclassed) for e in exchanges
]
@@ -155,7 +155,7 @@ def date_minus_candles(
return new_date
def market_is_active(market: Dict) -> bool:
def market_is_active(market: dict) -> bool:
"""
Return True if the market is active.
"""

View File

@@ -4,7 +4,6 @@ import time
from copy import deepcopy
from functools import partial
from threading import Thread
from typing import Dict, Set
import ccxt
@@ -22,12 +21,12 @@ class ExchangeWS:
def __init__(self, config: Config, ccxt_object: ccxt.Exchange) -> None:
self.config = config
self.ccxt_object = ccxt_object
self._background_tasks: Set[asyncio.Task] = set()
self._background_tasks: set[asyncio.Task] = set()
self._klines_watching: Set[PairWithTimeframe] = set()
self._klines_scheduled: Set[PairWithTimeframe] = set()
self.klines_last_refresh: Dict[PairWithTimeframe, float] = {}
self.klines_last_request: Dict[PairWithTimeframe, float] = {}
self._klines_watching: set[PairWithTimeframe] = set()
self._klines_scheduled: set[PairWithTimeframe] = set()
self.klines_last_refresh: dict[PairWithTimeframe, float] = {}
self.klines_last_request: dict[PairWithTimeframe, float] = {}
self._thread = Thread(name="ccxt_ws", target=self._start_forever)
self._thread.start()
self.__cleanup_called = False

View File

@@ -2,7 +2,7 @@
import logging
from datetime import datetime
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
from freqtrade.constants import BuySell
from freqtrade.enums import MarginMode, PriceType, TradingMode
@@ -46,7 +46,7 @@ class Gate(Exchange):
},
}
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
# TradingMode.SPOT always supported and not required in this list
# (TradingMode.MARGIN, MarginMode.CROSS),
# (TradingMode.FUTURES, MarginMode.CROSS),
@@ -60,7 +60,7 @@ class Gate(Exchange):
leverage: float,
reduceOnly: bool,
time_in_force: str = "GTC",
) -> Dict:
) -> dict:
params = super()._get_params(
side=side,
ordertype=ordertype,
@@ -74,8 +74,8 @@ class Gate(Exchange):
return params
def get_trades_for_order(
self, order_id: str, pair: str, since: datetime, params: Optional[Dict] = None
) -> List:
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
) -> list:
trades = super().get_trades_for_order(order_id, pair, since, params)
if self.trading_mode == TradingMode.FUTURES:
@@ -99,10 +99,10 @@ class Gate(Exchange):
}
return trades
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
def get_order_id_conditional(self, order: dict[str, Any]) -> str:
return safe_value_fallback2(order, order, "id_stop", "id")
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
order = self.fetch_order(order_id=order_id, pair=pair, params={"stop": True})
if order.get("status", "open") == "closed":
# Places a real order - which we need to fetch explicitly.
@@ -120,6 +120,6 @@ class Gate(Exchange):
return order
def cancel_stoploss_order(
self, order_id: str, pair: str, params: Optional[Dict] = None
) -> Dict:
self, order_id: str, pair: str, params: Optional[dict] = None
) -> dict:
return self.cancel_order(order_id=order_id, pair=pair, params={"stop": True})

View File

@@ -1,7 +1,6 @@
"""HTX exchange subclass"""
import logging
from typing import Dict
from freqtrade.constants import BuySell
from freqtrade.exchange import Exchange
@@ -32,7 +31,7 @@ class Htx(Exchange):
"trades_has_history": False, # Endpoint doesn't have a "since" parameter
}
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
params = self._params.copy()
params.update(
{

View File

@@ -1,7 +1,6 @@
"""Hyperliquid exchange subclass"""
import logging
from typing import Dict
from freqtrade.enums import TradingMode
from freqtrade.exchange import Exchange
@@ -26,7 +25,7 @@ class Hyperliquid(Exchange):
}
@property
def _ccxt_config(self) -> Dict:
def _ccxt_config(self) -> dict:
# Parameters to add directly to ccxt sync/async initialization.
# ccxt defaults to swap mode.
config = {}

View File

@@ -2,7 +2,7 @@
import logging
from datetime import datetime
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
import ccxt
from pandas import DataFrame
@@ -19,7 +19,7 @@ logger = logging.getLogger(__name__)
class Kraken(Exchange):
_params: Dict = {"trading_agreement": "agree"}
_params: dict = {"trading_agreement": "agree"}
_ft_has: FtHas = {
"stoploss_on_exchange": True,
"stop_price_param": "stopLossPrice",
@@ -35,13 +35,13 @@ class Kraken(Exchange):
"mark_ohlcv_timeframe": "4h",
}
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
# TradingMode.SPOT always supported and not required in this list
# (TradingMode.MARGIN, MarginMode.CROSS),
# (TradingMode.FUTURES, MarginMode.CROSS)
]
def market_is_tradable(self, market: Dict[str, Any]) -> bool:
def market_is_tradable(self, market: dict[str, Any]) -> bool:
"""
Check if the market symbol is tradable by Freqtrade.
Default checks + check if pair is darkpool pair.
@@ -50,7 +50,7 @@ class Kraken(Exchange):
return parent_check and market.get("darkpool", False) is False
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
# Only fetch tickers for current stake currency
# Otherwise the request for kraken becomes too large.
symbols = list(self.get_markets(quote_currencies=[self._config["stake_currency"]]))
@@ -115,7 +115,7 @@ class Kraken(Exchange):
leverage: float,
reduceOnly: bool,
time_in_force: str = "GTC",
) -> Dict:
) -> dict:
params = super()._get_params(
side=side,
ordertype=ordertype,
@@ -165,7 +165,7 @@ class Kraken(Exchange):
return fees if is_short else -fees
def _get_trade_pagination_next_value(self, trades: List[Dict]):
def _get_trade_pagination_next_value(self, trades: list[dict]):
"""
Extract pagination id for the next "from_id" value
Applies only to fetch_trade_history by id.

View File

@@ -1,7 +1,6 @@
"""Kucoin exchange subclass."""
import logging
from typing import Dict
from freqtrade.constants import BuySell
from freqtrade.exchange import Exchange
@@ -32,7 +31,7 @@ class Kucoin(Exchange):
"ohlcv_candle_limit": 1500,
}
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
params = self._params.copy()
params.update({"stopPrice": stop_price, "stop": "loss"})
return params
@@ -48,7 +47,7 @@ class Kucoin(Exchange):
leverage: float,
reduceOnly: bool = False,
time_in_force: str = "GTC",
) -> Dict:
) -> dict:
res = super().create_order(
pair=pair,
ordertype=ordertype,

View File

@@ -1,6 +1,6 @@
import logging
from datetime import timedelta
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
import ccxt
@@ -48,7 +48,7 @@ class Okx(Exchange):
"ws_enabled": True,
}
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
# TradingMode.SPOT always supported and not required in this list
# (TradingMode.MARGIN, MarginMode.CROSS),
# (TradingMode.FUTURES, MarginMode.CROSS),
@@ -57,7 +57,7 @@ class Okx(Exchange):
net_only = True
_ccxt_params: Dict = {"options": {"brokerId": "ffb5405ad327SUDE"}}
_ccxt_params: dict = {"options": {"brokerId": "ffb5405ad327SUDE"}}
def ohlcv_candle_limit(
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None
@@ -119,7 +119,7 @@ class Okx(Exchange):
leverage: float,
reduceOnly: bool,
time_in_force: str = "GTC",
) -> Dict:
) -> dict:
params = super()._get_params(
side=side,
ordertype=ordertype,
@@ -184,14 +184,14 @@ class Okx(Exchange):
pair_tiers = self._leverage_tiers[pair]
return pair_tiers[-1]["maxNotional"] / leverage
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
params = super()._get_stop_params(side, ordertype, stop_price)
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
params["tdMode"] = self.margin_mode.value
params["posSide"] = self._get_posSide(side, True)
return params
def _convert_stop_order(self, pair: str, order_id: str, order: Dict) -> Dict:
def _convert_stop_order(self, pair: str, order_id: str, order: dict) -> dict:
if (
order.get("status", "open") == "closed"
and (real_order_id := order.get("info", {}).get("ordId")) is not None
@@ -209,7 +209,7 @@ class Okx(Exchange):
return order
@retrier(retries=API_RETRY_COUNT)
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
if self._config["dry_run"]:
return self.fetch_dry_run_order(order_id)
@@ -231,7 +231,7 @@ class Okx(Exchange):
return self._fetch_stop_order_fallback(order_id, pair)
def _fetch_stop_order_fallback(self, order_id: str, pair: str) -> Dict:
def _fetch_stop_order_fallback(self, order_id: str, pair: str) -> dict:
params2 = {"stop": True, "ordType": "conditional"}
for method in (
self._api.fetch_open_orders,
@@ -256,14 +256,14 @@ class Okx(Exchange):
raise OperationalException(e) from e
raise RetryableOrderError(f"StoplossOrder not found (pair: {pair} id: {order_id}).")
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
def get_order_id_conditional(self, order: dict[str, Any]) -> str:
if order.get("type", "") == "stop":
return safe_value_fallback2(order, order, "id_stop", "id")
return order["id"]
def cancel_stoploss_order(
self, order_id: str, pair: str, params: Optional[Dict] = None
) -> Dict:
self, order_id: str, pair: str, params: Optional[dict] = None
) -> dict:
params1 = {"stop": True}
# 'ordType': 'conditional'
#
@@ -273,7 +273,7 @@ class Okx(Exchange):
params=params1,
)
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> List[Dict]:
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> list[dict]:
orders = []
orders = self._api.fetch_closed_orders(pair, since=since_ms)

View File

@@ -2,7 +2,7 @@ import logging
import random
from abc import abstractmethod
from enum import Enum
from typing import List, Optional, Type, Union
from typing import Optional, Union
import gymnasium as gym
import numpy as np
@@ -89,7 +89,7 @@ class BaseEnvironment(gym.Env):
self.fee = fee
# set here to default 5Ac, but all children envs can override this
self.actions: Type[Enum] = BaseActions
self.actions: type[Enum] = BaseActions
self.tensorboard_metrics: dict = {}
self.can_short: bool = can_short
self.live: bool = live
@@ -163,7 +163,7 @@ class BaseEnvironment(gym.Env):
Unique to the environment action count. Must be inherited.
"""
def action_masks(self) -> List[bool]:
def action_masks(self) -> list[bool]:
return [self._is_valid(action.value) for action in self.actions]
def seed(self, seed: int = 1):
@@ -375,7 +375,7 @@ class BaseEnvironment(gym.Env):
def current_price(self) -> float:
return self.prices.iloc[self._current_tick].open
def get_actions(self) -> Type[Enum]:
def get_actions(self) -> type[Enum]:
"""
Used by SubprocVecEnv to get actions from
initialized env for tensorboard callback

View File

@@ -4,7 +4,7 @@ import logging
from abc import abstractmethod
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
from typing import Any, Callable, Optional, Union
import gymnasium as gym
import numpy as np
@@ -114,7 +114,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
training_filter=True,
)
dd: Dict[str, Any] = dk.make_train_test_datasets(features_filtered, labels_filtered)
dd: dict[str, Any] = dk.make_train_test_datasets(features_filtered, labels_filtered)
self.df_raw = copy.deepcopy(dd["train_features"])
dk.fit_labels() # FIXME useless for now, but just satiating append methods
@@ -151,7 +151,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
def set_train_and_eval_environments(
self,
data_dictionary: Dict[str, DataFrame],
data_dictionary: dict[str, DataFrame],
prices_train: DataFrame,
prices_test: DataFrame,
dk: FreqaiDataKitchen,
@@ -183,7 +183,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
actions = self.train_env.get_actions()
self.tensorboard_callback = TensorboardCallback(verbose=1, actions=actions)
def pack_env_dict(self, pair: str) -> Dict[str, Any]:
def pack_env_dict(self, pair: str) -> dict[str, Any]:
"""
Create dictionary of environment arguments
"""
@@ -204,7 +204,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
return env_info
@abstractmethod
def fit(self, data_dictionary: Dict[str, Any], dk: FreqaiDataKitchen, **kwargs):
def fit(self, data_dictionary: dict[str, Any], dk: FreqaiDataKitchen, **kwargs):
"""
Agent customizations and abstract Reinforcement Learning customizations
go in here. Abstract method, so this function must be overridden by
@@ -212,7 +212,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
"""
return
def get_state_info(self, pair: str) -> Tuple[float, float, int]:
def get_state_info(self, pair: str) -> tuple[float, float, int]:
"""
State info during dry/live (not backtesting) which is fed back
into the model.
@@ -250,7 +250,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_dataframe: Full dataframe for the current backtest period.
@@ -303,7 +303,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
def build_ohlc_price_dataframes(
self, data_dictionary: dict, pair: str, dk: FreqaiDataKitchen
) -> Tuple[DataFrame, DataFrame]:
) -> tuple[DataFrame, DataFrame]:
"""
Builds the train prices and test prices for the environment.
"""
@@ -482,13 +482,13 @@ class BaseReinforcementLearningModel(IFreqaiModel):
def make_env(
MyRLEnv: Type[BaseEnvironment],
MyRLEnv: type[BaseEnvironment],
env_id: str,
rank: int,
seed: int,
train_df: DataFrame,
price: DataFrame,
env_info: Dict[str, Any] = {},
env_info: dict[str, Any] = {},
) -> Callable:
"""
Utility function for multiprocessed env.

View File

@@ -1,6 +1,6 @@
import logging
from time import time
from typing import Any, Tuple
from typing import Any
import numpy as np
import numpy.typing as npt
@@ -86,7 +86,7 @@ class BaseClassifierModel(IFreqaiModel):
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.

View File

@@ -1,6 +1,6 @@
import logging
from time import time
from typing import Any, Dict, List, Tuple
from typing import Any
import numpy as np
import numpy.typing as npt
@@ -44,7 +44,7 @@ class BasePyTorchClassifier(BasePyTorchModel):
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param dk: dk: The datakitchen object
@@ -100,9 +100,9 @@ class BasePyTorchClassifier(BasePyTorchModel):
def encode_class_names(
self,
data_dictionary: Dict[str, pd.DataFrame],
data_dictionary: dict[str, pd.DataFrame],
dk: FreqaiDataKitchen,
class_names: List[str],
class_names: list[str],
):
"""
encode class name, str -> int
@@ -119,7 +119,7 @@ class BasePyTorchClassifier(BasePyTorchModel):
)
@staticmethod
def assert_valid_class_names(target_column: pd.Series, class_names: List[str]):
def assert_valid_class_names(target_column: pd.Series, class_names: list[str]):
non_defined_labels = set(target_column) - set(class_names)
if len(non_defined_labels) != 0:
raise OperationalException(
@@ -127,7 +127,7 @@ class BasePyTorchClassifier(BasePyTorchModel):
f"expecting labels: {class_names}",
)
def decode_class_names(self, class_ints: torch.Tensor) -> List[str]:
def decode_class_names(self, class_ints: torch.Tensor) -> list[str]:
"""
decode class name, int -> str
"""
@@ -141,14 +141,14 @@ class BasePyTorchClassifier(BasePyTorchModel):
def convert_label_column_to_int(
self,
data_dictionary: Dict[str, pd.DataFrame],
data_dictionary: dict[str, pd.DataFrame],
dk: FreqaiDataKitchen,
class_names: List[str],
class_names: list[str],
):
self.init_class_names_to_index_mapping(class_names)
self.encode_class_names(data_dictionary, dk, class_names)
def get_class_names(self) -> List[str]:
def get_class_names(self) -> list[str]:
if not self.class_names:
raise ValueError(
"self.class_names is empty, "

View File

@@ -1,6 +1,6 @@
import logging
from time import time
from typing import Any, Tuple
from typing import Any
import numpy as np
import numpy.typing as npt
@@ -24,7 +24,7 @@ class BasePyTorchRegressor(BasePyTorchModel):
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.

View File

@@ -1,6 +1,6 @@
import logging
from time import time
from typing import Any, Tuple
from typing import Any
import numpy as np
import numpy.typing as npt
@@ -88,7 +88,7 @@ class BaseRegressionModel(IFreqaiModel):
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.

View File

@@ -7,7 +7,7 @@ import threading
import warnings
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Any, Dict, Tuple, TypedDict
from typing import Any, TypedDict
import numpy as np
import pandas as pd
@@ -69,14 +69,14 @@ class FreqaiDataDrawer:
self.config = config
self.freqai_info = config.get("freqai", {})
# dictionary holding all pair metadata necessary to load in from disk
self.pair_dict: Dict[str, pair_info] = {}
self.pair_dict: dict[str, pair_info] = {}
# dictionary holding all actively inferenced models in memory given a model filename
self.model_dictionary: Dict[str, Any] = {}
self.model_dictionary: dict[str, Any] = {}
# all additional metadata that we want to keep in ram
self.meta_data_dictionary: Dict[str, Dict[str, Any]] = {}
self.model_return_values: Dict[str, DataFrame] = {}
self.historic_data: Dict[str, Dict[str, DataFrame]] = {}
self.historic_predictions: Dict[str, DataFrame] = {}
self.meta_data_dictionary: dict[str, dict[str, Any]] = {}
self.model_return_values: dict[str, DataFrame] = {}
self.historic_data: dict[str, dict[str, DataFrame]] = {}
self.historic_predictions: dict[str, DataFrame] = {}
self.full_path = full_path
self.historic_predictions_path = Path(self.full_path / "historic_predictions.pkl")
self.historic_predictions_bkp_path = Path(
@@ -87,14 +87,14 @@ class FreqaiDataDrawer:
self.metric_tracker_path = Path(self.full_path / "metric_tracker.json")
self.load_drawer_from_disk()
self.load_historic_predictions_from_disk()
self.metric_tracker: Dict[str, Dict[str, Dict[str, list]]] = {}
self.metric_tracker: dict[str, dict[str, dict[str, list]]] = {}
self.load_metric_tracker_from_disk()
self.training_queue: Dict[str, int] = {}
self.training_queue: dict[str, int] = {}
self.history_lock = threading.Lock()
self.save_lock = threading.Lock()
self.pair_dict_lock = threading.Lock()
self.metric_tracker_lock = threading.Lock()
self.old_DBSCAN_eps: Dict[str, float] = {}
self.old_DBSCAN_eps: dict[str, float] = {}
self.empty_pair_dict: pair_info = {
"model_filename": "",
"trained_timestamp": 0,
@@ -228,7 +228,7 @@ class FreqaiDataDrawer:
self.pair_dict, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE
)
def save_global_metadata_to_disk(self, metadata: Dict[str, Any]):
def save_global_metadata_to_disk(self, metadata: dict[str, Any]):
"""
Save global metadata json to disk
"""
@@ -242,7 +242,7 @@ class FreqaiDataDrawer:
if isinstance(obj, np.generic):
return obj.item()
def get_pair_dict_info(self, pair: str) -> Tuple[str, int]:
def get_pair_dict_info(self, pair: str) -> tuple[str, int]:
"""
Locate and load existing model metadata from persistent storage. If not located,
create a new one and append the current pair to it and prepare it for its first
@@ -446,7 +446,7 @@ class FreqaiDataDrawer:
pattern = re.compile(r"sub-train-(\w+)_(\d{10})")
delete_dict: Dict[str, Any] = {}
delete_dict: dict[str, Any] = {}
for directory in model_folders:
result = pattern.match(str(directory.name))
@@ -704,7 +704,7 @@ class FreqaiDataDrawer:
def get_base_and_corr_dataframes(
self, timerange: TimeRange, pair: str, dk: FreqaiDataKitchen
) -> Tuple[Dict[Any, Any], Dict[Any, Any]]:
) -> tuple[dict[Any, Any], dict[Any, Any]]:
"""
Searches through our historic_data in memory and returns the dataframes relevant
to the present pair.
@@ -713,8 +713,8 @@ class FreqaiDataDrawer:
:param metadata: dict = strategy furnished pair metadata
"""
with self.history_lock:
corr_dataframes: Dict[Any, Any] = {}
base_dataframes: Dict[Any, Any] = {}
corr_dataframes: dict[Any, Any] = {}
base_dataframes: dict[Any, Any] = {}
historic_data = self.historic_data
pairs = self.freqai_info["feature_parameters"].get("include_corr_pairlist", [])

View File

@@ -5,7 +5,7 @@ import random
import shutil
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
import numpy as np
import numpy.typing as npt
@@ -64,15 +64,15 @@ class FreqaiDataKitchen:
live: bool = False,
pair: str = "",
):
self.data: Dict[str, Any] = {}
self.data_dictionary: Dict[str, DataFrame] = {}
self.data: dict[str, Any] = {}
self.data_dictionary: dict[str, DataFrame] = {}
self.config = config
self.freqai_config: Dict[str, Any] = config["freqai"]
self.freqai_config: dict[str, Any] = config["freqai"]
self.full_df: DataFrame = DataFrame()
self.append_df: DataFrame = DataFrame()
self.data_path = Path()
self.label_list: List = []
self.training_features_list: List = []
self.label_list: list = []
self.training_features_list: list = []
self.model_filename: str = ""
self.backtesting_results_path = Path()
self.backtest_predictions_folder: str = "backtesting_predictions"
@@ -104,9 +104,9 @@ class FreqaiDataKitchen:
else:
self.thread_count = self.freqai_config["data_kitchen_thread_count"]
self.train_dates: DataFrame = pd.DataFrame()
self.unique_classes: Dict[str, list] = {}
self.unique_classes: dict[str, list] = {}
self.unique_class_list: list = []
self.backtest_live_models_data: Dict[str, Any] = {}
self.backtest_live_models_data: dict[str, Any] = {}
def set_paths(
self,
@@ -127,7 +127,7 @@ class FreqaiDataKitchen:
def make_train_test_datasets(
self, filtered_dataframe: DataFrame, labels: DataFrame
) -> Dict[Any, Any]:
) -> dict[Any, Any]:
"""
Given the dataframe for the full history for training, split the data into
training and test data according to user specified parameters in configuration
@@ -213,10 +213,10 @@ class FreqaiDataKitchen:
def filter_features(
self,
unfiltered_df: DataFrame,
training_feature_list: List,
label_list: List = list(),
training_feature_list: list,
label_list: list = list(),
training_filter: bool = True,
) -> Tuple[DataFrame, DataFrame]:
) -> tuple[DataFrame, DataFrame]:
"""
Filter the unfiltered dataframe to extract the user requested features/labels and properly
remove all NaNs. Any row with a NaN is removed from training dataset or replaced with
@@ -306,7 +306,7 @@ class FreqaiDataKitchen:
test_labels: DataFrame,
train_weights: Any,
test_weights: Any,
) -> Dict:
) -> dict:
self.data_dictionary = {
"train_features": train_df,
"test_features": test_df,
@@ -321,7 +321,7 @@ class FreqaiDataKitchen:
def split_timerange(
self, tr: str, train_split: int = 28, bt_split: float = 7
) -> Tuple[list, list]:
) -> tuple[list, list]:
"""
Function which takes a single time range (tr) and splits it
into sub timeranges to train and backtest on based on user input
@@ -535,7 +535,7 @@ class FreqaiDataKitchen:
def check_if_new_training_required(
self, trained_timestamp: int
) -> Tuple[bool, TimeRange, TimeRange]:
) -> tuple[bool, TimeRange, TimeRange]:
time = datetime.now(tz=timezone.utc).timestamp()
trained_timerange = TimeRange()
data_load_timerange = TimeRange()
@@ -603,7 +603,7 @@ class FreqaiDataKitchen:
def extract_corr_pair_columns_from_populated_indicators(
self, dataframe: DataFrame
) -> Dict[str, DataFrame]:
) -> dict[str, DataFrame]:
"""
Find the columns of the dataframe corresponding to the corr_pairlist, save them
in a dictionary to be reused and attached to other pairs.
@@ -612,7 +612,7 @@ class FreqaiDataKitchen:
:return: corr_dataframes, dictionary of dataframes to be attached
to other pairs in same candle.
"""
corr_dataframes: Dict[str, DataFrame] = {}
corr_dataframes: dict[str, DataFrame] = {}
pairs = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
for pair in pairs:
@@ -628,7 +628,7 @@ class FreqaiDataKitchen:
return corr_dataframes
def attach_corr_pair_columns(
self, dataframe: DataFrame, corr_dataframes: Dict[str, DataFrame], current_pair: str
self, dataframe: DataFrame, corr_dataframes: dict[str, DataFrame], current_pair: str
) -> DataFrame:
"""
Attach the existing corr_pair dataframes to the current pair dataframe before training
@@ -731,7 +731,7 @@ class FreqaiDataKitchen:
:param is_corr_pairs: bool = whether the pair is a corr pair or not
:return: dataframe = populated dataframe
"""
tfs: List[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
tfs: list[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
for tf in tfs:
metadata = {"pair": pair, "tf": tf}
@@ -810,8 +810,8 @@ class FreqaiDataKitchen:
f"{DOCS_LINK}/freqai-feature-engineering/"
)
tfs: List[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
pairs: List[str] = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
tfs: list[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
pairs: list[str] = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
for tf in tfs:
if tf not in base_dataframes:
@@ -828,7 +828,7 @@ class FreqaiDataKitchen:
else:
dataframe = base_dataframes[self.config["timeframe"]].copy()
corr_pairs: List[str] = self.freqai_config["feature_parameters"].get(
corr_pairs: list[str] = self.freqai_config["feature_parameters"].get(
"include_corr_pairlist", []
)
dataframe = self.populate_features(
@@ -953,7 +953,7 @@ class FreqaiDataKitchen:
Returns default FreqAI model path
:param config: Configuration dictionary
"""
freqai_config: Dict[str, Any] = config["freqai"]
freqai_config: dict[str, Any] = config["freqai"]
return Path(config["user_data_dir"] / "models" / str(freqai_config.get("identifier")))
def remove_special_chars_from_feature_names(self, dataframe: pd.DataFrame) -> pd.DataFrame:
@@ -992,7 +992,7 @@ class FreqaiDataKitchen:
return timerange
# deprecated functions
def normalize_data(self, data_dictionary: Dict) -> Dict[Any, Any]:
def normalize_data(self, data_dictionary: dict) -> dict[Any, Any]:
"""
Deprecation warning, migration assistance
"""

View File

@@ -5,7 +5,7 @@ from abc import ABC, abstractmethod
from collections import deque
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict, List, Literal, Optional, Tuple
from typing import Any, Literal, Optional
import datasieve.transforms as ds
import numpy as np
@@ -59,11 +59,11 @@ class IFreqaiModel(ABC):
def __init__(self, config: Config) -> None:
self.config = config
self.assert_config(self.config)
self.freqai_info: Dict[str, Any] = config["freqai"]
self.data_split_parameters: Dict[str, Any] = config.get("freqai", {}).get(
self.freqai_info: dict[str, Any] = config["freqai"]
self.data_split_parameters: dict[str, Any] = config.get("freqai", {}).get(
"data_split_parameters", {}
)
self.model_training_parameters: Dict[str, Any] = config.get("freqai", {}).get(
self.model_training_parameters: dict[str, Any] = config.get("freqai", {}).get(
"model_training_parameters", {}
)
self.identifier: str = self.freqai_info.get("identifier", "no_id_provided")
@@ -80,14 +80,14 @@ class IFreqaiModel(ABC):
self.dd.current_candle = self.current_candle
self.scanning = False
self.ft_params = self.freqai_info["feature_parameters"]
self.corr_pairlist: List[str] = self.ft_params.get("include_corr_pairlist", [])
self.corr_pairlist: list[str] = self.ft_params.get("include_corr_pairlist", [])
self.keras: bool = self.freqai_info.get("keras", False)
if self.keras and self.ft_params.get("DI_threshold", 0):
self.ft_params["DI_threshold"] = 0
logger.warning("DI threshold is not configured for Keras models yet. Deactivating.")
self.CONV_WIDTH = self.freqai_info.get("conv_width", 1)
self.class_names: List[str] = [] # used in classification subclasses
self.class_names: list[str] = [] # used in classification subclasses
self.pair_it = 0
self.pair_it_train = 0
self.total_pairs = len(self.config.get("exchange", {}).get("pair_whitelist"))
@@ -99,13 +99,13 @@ class IFreqaiModel(ABC):
self.base_tf_seconds = timeframe_to_seconds(self.config["timeframe"])
self.continual_learning = self.freqai_info.get("continual_learning", False)
self.plot_features = self.ft_params.get("plot_feature_importances", 0)
self.corr_dataframes: Dict[str, DataFrame] = {}
self.corr_dataframes: dict[str, DataFrame] = {}
# get_corr_dataframes is controlling the caching of corr_dataframes
# for improved performance. Careful with this boolean.
self.get_corr_dataframes: bool = True
self._threads: List[threading.Thread] = []
self._threads: list[threading.Thread] = []
self._stop_event = threading.Event()
self.metadata: Dict[str, Any] = self.dd.load_global_metadata_from_disk()
self.metadata: dict[str, Any] = self.dd.load_global_metadata_from_disk()
self.data_provider: Optional[DataProvider] = None
self.max_system_threads = max(int(psutil.cpu_count() * 2 - 2), 1)
self.can_short = True # overridden in start() with strategy.can_short
@@ -901,7 +901,7 @@ class IFreqaiModel(ABC):
return
def update_metadata(self, metadata: Dict[str, Any]):
def update_metadata(self, metadata: dict[str, Any]):
"""
Update global metadata and save the updated json file
:param metadata: new global metadata dict
@@ -954,7 +954,7 @@ class IFreqaiModel(ABC):
"""
@abstractmethod
def fit(self, data_dictionary: Dict[str, Any], dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict[str, Any], dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
Most regressors use the same function names and arguments e.g. user
can drop in LGBMRegressor in place of CatBoostRegressor and all data
@@ -968,7 +968,7 @@ class IFreqaiModel(ABC):
@abstractmethod
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, NDArray[np.int_]]:
) -> tuple[DataFrame, NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Any, Dict
from typing import Any
from catboost import CatBoostClassifier, Pool
@@ -21,7 +21,7 @@ class CatboostClassifier(BaseClassifierModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Any, Dict
from typing import Any
from catboost import CatBoostClassifier, Pool
@@ -22,7 +22,7 @@ class CatboostClassifierMultiTarget(BaseClassifierModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Any, Dict
from typing import Any
from catboost import CatBoostRegressor, Pool
@@ -21,7 +21,7 @@ class CatboostRegressor(BaseRegressionModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Any, Dict
from typing import Any
from catboost import CatBoostRegressor, Pool
@@ -22,7 +22,7 @@ class CatboostRegressorMultiTarget(BaseRegressionModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from lightgbm import LGBMClassifier
@@ -20,7 +20,7 @@ class LightGBMClassifier(BaseClassifierModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from lightgbm import LGBMClassifier
@@ -21,7 +21,7 @@ class LightGBMClassifierMultiTarget(BaseClassifierModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from lightgbm import LGBMRegressor
@@ -20,7 +20,7 @@ class LightGBMRegressor(BaseRegressionModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from lightgbm import LGBMRegressor
@@ -21,7 +21,7 @@ class LightGBMRegressorMultiTarget(BaseRegressionModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,4 +1,4 @@
from typing import Any, Dict
from typing import Any
import torch
@@ -52,10 +52,10 @@ class PyTorchMLPClassifier(BasePyTorchClassifier):
super().__init__(**kwargs)
config = self.freqai_info.get("model_training_parameters", {})
self.learning_rate: float = config.get("learning_rate", 3e-4)
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
self.model_kwargs: dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: dict[str, Any] = config.get("trainer_kwargs", {})
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,4 +1,4 @@
from typing import Any, Dict
from typing import Any
import torch
@@ -51,10 +51,10 @@ class PyTorchMLPRegressor(BasePyTorchRegressor):
super().__init__(**kwargs)
config = self.freqai_info.get("model_training_parameters", {})
self.learning_rate: float = config.get("learning_rate", 3e-4)
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
self.model_kwargs: dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: dict[str, Any] = config.get("trainer_kwargs", {})
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,4 +1,4 @@
from typing import Any, Dict, Tuple
from typing import Any
import numpy as np
import numpy.typing as npt
@@ -60,10 +60,10 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
super().__init__(**kwargs)
config = self.freqai_info.get("model_training_parameters", {})
self.learning_rate: float = config.get("learning_rate", 3e-4)
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
self.model_kwargs: dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: dict[str, Any] = config.get("trainer_kwargs", {})
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,
@@ -100,7 +100,7 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
def predict(
self, unfiltered_df: pd.DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[pd.DataFrame, npt.NDArray[np.int_]]:
) -> tuple[pd.DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Any, Dict, List, Optional, Type
from typing import Any, Optional
import torch as th
from stable_baselines3.common.callbacks import ProgressBarCallback
@@ -44,7 +44,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
take fine-tuned control over the data handling pipeline.
"""
def fit(self, data_dictionary: Dict[str, Any], dk: FreqaiDataKitchen, **kwargs):
def fit(self, data_dictionary: dict[str, Any], dk: FreqaiDataKitchen, **kwargs):
"""
User customizable fit method
:param data_dictionary: dict = common data dictionary containing all train/test
@@ -77,7 +77,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
)
model = self.dd.model_dictionary[dk.pair]
model.set_env(self.train_env)
callbacks: List[Any] = [self.eval_callback, self.tensorboard_callback]
callbacks: list[Any] = [self.eval_callback, self.tensorboard_callback]
progressbar_callback: Optional[ProgressBarCallback] = None
if self.rl_config.get("progress_bar", False):
progressbar_callback = ProgressBarCallback()
@@ -101,7 +101,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
return model
MyRLEnv: Type[BaseEnvironment]
MyRLEnv: type[BaseEnvironment]
class MyRLEnv(Base5ActionRLEnv): # type: ignore[no-redef]
"""

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from pandas import DataFrame
from sb3_contrib.common.maskable.callbacks import MaskableEvalCallback
@@ -22,7 +22,7 @@ class ReinforcementLearner_multiproc(ReinforcementLearner):
def set_train_and_eval_environments(
self,
data_dictionary: Dict[str, Any],
data_dictionary: dict[str, Any],
prices_train: DataFrame,
prices_test: DataFrame,
dk: FreqaiDataKitchen,

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict, Tuple
from typing import Any
import numpy as np
import numpy.typing as npt
@@ -24,7 +24,7 @@ class SKLearnRandomForestClassifier(BaseClassifierModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,
@@ -61,7 +61,7 @@ class SKLearnRandomForestClassifier(BaseClassifierModel):
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict, Tuple
from typing import Any
import numpy as np
import numpy.typing as npt
@@ -26,7 +26,7 @@ class XGBoostClassifier(BaseClassifierModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,
@@ -64,7 +64,7 @@ class XGBoostClassifier(BaseClassifierModel):
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict, Tuple
from typing import Any
import numpy as np
import numpy.typing as npt
@@ -26,7 +26,7 @@ class XGBoostRFClassifier(BaseClassifierModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,
@@ -64,7 +64,7 @@ class XGBoostRFClassifier(BaseClassifierModel):
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from xgboost import XGBRFRegressor
@@ -21,7 +21,7 @@ class XGBoostRFRegressor(BaseRegressionModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from xgboost import XGBRegressor
@@ -21,7 +21,7 @@ class XGBoostRegressor(BaseRegressionModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict
from typing import Any
from xgboost import XGBRegressor
@@ -21,7 +21,7 @@ class XGBoostRegressorMultiTarget(BaseRegressionModel):
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,

View File

@@ -1,5 +1,5 @@
from enum import Enum
from typing import Any, Dict, Type, Union
from typing import Any, Union
from stable_baselines3.common.callbacks import BaseCallback
from stable_baselines3.common.logger import HParam
@@ -13,10 +13,10 @@ class TensorboardCallback(BaseCallback):
episodic summary reports.
"""
def __init__(self, verbose=1, actions: Type[Enum] = BaseActions):
def __init__(self, verbose=1, actions: type[Enum] = BaseActions):
super().__init__(verbose)
self.model: Any = None
self.actions: Type[Enum] = actions
self.actions: type[Enum] = actions
def _on_training_start(self) -> None:
hparam_dict = {
@@ -27,7 +27,7 @@ class TensorboardCallback(BaseCallback):
# "batch_size": self.model.batch_size,
# "n_steps": self.model.n_steps,
}
metric_dict: Dict[str, Union[float, int]] = {
metric_dict: dict[str, Union[float, int]] = {
"eval/mean_reward": 0,
"rollout/ep_rew_mean": 0,
"rollout/ep_len_mean": 0,

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Any, Dict, List, Optional
from typing import Any, Optional
import pandas as pd
import torch
@@ -25,7 +25,7 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
criterion: nn.Module,
device: str,
data_convertor: PyTorchDataConvertor,
model_meta_data: Dict[str, Any] = {},
model_meta_data: dict[str, Any] = {},
window_size: int = 1,
tb_logger: Any = None,
**kwargs,
@@ -61,7 +61,7 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
self.tb_logger = tb_logger
self.test_batch_counter = 0
def fit(self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]):
def fit(self, data_dictionary: dict[str, pd.DataFrame], splits: list[str]):
"""
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
@@ -102,7 +102,7 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
@torch.no_grad()
def estimate_loss(
self,
data_loader_dictionary: Dict[str, DataLoader],
data_loader_dictionary: dict[str, DataLoader],
split: str,
) -> None:
self.model.eval()
@@ -119,8 +119,8 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
self.model.train()
def create_data_loaders_dictionary(
self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]
) -> Dict[str, DataLoader]:
self, data_dictionary: dict[str, pd.DataFrame], splits: list[str]
) -> dict[str, DataLoader]:
"""
Converts the input data to PyTorch tensors using a data loader.
"""
@@ -181,7 +181,7 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
checkpoint = torch.load(path)
return self.load_from_checkpoint(checkpoint)
def load_from_checkpoint(self, checkpoint: Dict):
def load_from_checkpoint(self, checkpoint: dict):
"""
when using continual_learning, DataDrawer will load the dictionary
(containing state dicts and model_meta_data) by calling torch.load(path).
@@ -200,8 +200,8 @@ class PyTorchTransformerTrainer(PyTorchModelTrainer):
"""
def create_data_loaders_dictionary(
self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]
) -> Dict[str, DataLoader]:
self, data_dictionary: dict[str, pd.DataFrame], splits: list[str]
) -> dict[str, DataLoader]:
"""
Converts the input data to PyTorch tensors using a data loader.
"""

View File

@@ -1,6 +1,5 @@
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Dict, List
import pandas as pd
import torch
@@ -9,7 +8,7 @@ from torch import nn
class PyTorchTrainerInterface(ABC):
@abstractmethod
def fit(self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]) -> None:
def fit(self, data_dictionary: dict[str, pd.DataFrame], splits: list[str]) -> None:
"""
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
@@ -41,7 +40,7 @@ class PyTorchTrainerInterface(ABC):
return self.load_from_checkpoint(checkpoint)
@abstractmethod
def load_from_checkpoint(self, checkpoint: Dict) -> nn.Module:
def load_from_checkpoint(self, checkpoint: dict) -> nn.Module:
"""
when using continual_learning, DataDrawer will load the dictionary
(containing state dicts and model_meta_data) by calling torch.load(path).

View File

@@ -1,7 +1,7 @@
import logging
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict
from typing import Any
import numpy as np
import pandas as pd
@@ -155,7 +155,7 @@ def plot_feature_importance(
store_plot_file(fig, f"{dk.model_filename}-{label}.html", dk.data_path)
def record_params(config: Dict[str, Any], full_path: Path) -> None:
def record_params(config: dict[str, Any], full_path: Path) -> None:
"""
Records run params in the full path for reproducibility
"""

View File

@@ -9,7 +9,7 @@ from datetime import datetime, time, timedelta, timezone
from math import isclose
from threading import Lock
from time import sleep
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
from schedule import Scheduler
@@ -83,7 +83,7 @@ class FreqtradeBot(LoggingMixin):
:param config: configuration dict, you can use Configuration.get_config()
to get the config dict.
"""
self.active_pair_whitelist: List[str] = []
self.active_pair_whitelist: list[str] = []
# Init bot state
self.state = State.STOPPED
@@ -258,7 +258,7 @@ class FreqtradeBot(LoggingMixin):
self.update_trades_without_assigned_fees()
# Query trades from persistence layer
trades: List[Trade] = Trade.get_open_trades()
trades: list[Trade] = Trade.get_open_trades()
self.active_pair_whitelist = self._refresh_active_whitelist(trades)
@@ -325,7 +325,7 @@ class FreqtradeBot(LoggingMixin):
}
self.rpc.send_msg(msg)
def _refresh_active_whitelist(self, trades: Optional[List[Trade]] = None) -> List[str]:
def _refresh_active_whitelist(self, trades: Optional[list[Trade]] = None) -> list[str]:
"""
Refresh active whitelist from pairlist or edge and extend it with
pairs that have open trades.
@@ -371,7 +371,7 @@ class FreqtradeBot(LoggingMixin):
def update_funding_fees(self) -> None:
if self.trading_mode == TradingMode.FUTURES:
trades: List[Trade] = Trade.get_open_trades()
trades: list[Trade] = Trade.get_open_trades()
for trade in trades:
trade.set_funding_fees(
self.exchange.get_funding_fees(
@@ -450,7 +450,7 @@ class FreqtradeBot(LoggingMixin):
# Updating open orders in dry-run does not make sense and will fail.
return
trades: List[Trade] = Trade.get_closed_trades_without_assigned_fees()
trades: list[Trade] = Trade.get_closed_trades_without_assigned_fees()
for trade in trades:
if not trade.is_open and not trade.fee_updated(trade.exit_side):
# Get sell fee
@@ -826,7 +826,7 @@ class FreqtradeBot(LoggingMixin):
exit_tag=order_tag,
)
def _check_depth_of_market(self, pair: str, conf: Dict, side: SignalDirection) -> bool:
def _check_depth_of_market(self, pair: str, conf: dict, side: SignalDirection) -> bool:
"""
Checks depth of market before executing an entry
"""
@@ -1085,7 +1085,7 @@ class FreqtradeBot(LoggingMixin):
trade: Optional[Trade],
mode: EntryExecuteMode,
leverage_: Optional[float],
) -> Tuple[float, float, float]:
) -> tuple[float, float, float]:
"""
Validate and eventually adjust (within limits) limit, amount and leverage
:return: Tuple with (price, amount, leverage)
@@ -1263,7 +1263,7 @@ class FreqtradeBot(LoggingMixin):
# SELL / exit positions / close trades logic and methods
#
def exit_positions(self, trades: List[Trade]) -> int:
def exit_positions(self, trades: list[Trade]) -> int:
"""
Tries to execute exit orders for open trades (positions)
"""
@@ -1349,7 +1349,7 @@ class FreqtradeBot(LoggingMixin):
"""
Check and execute trade exit
"""
exits: List[ExitCheckTuple] = self.strategy.should_exit(
exits: list[ExitCheckTuple] = self.strategy.should_exit(
trade,
exit_rate,
datetime.now(timezone.utc),
@@ -1466,7 +1466,7 @@ class FreqtradeBot(LoggingMixin):
return False
def handle_trailing_stoploss_on_exchange(self, trade: Trade, order: Dict) -> None:
def handle_trailing_stoploss_on_exchange(self, trade: Trade, order: dict) -> None:
"""
Check to see if stoploss on exchange should be updated
in case of trailing stoploss on exchange
@@ -1504,7 +1504,7 @@ class FreqtradeBot(LoggingMixin):
f"Could not create trailing stoploss order for pair {trade.pair}."
)
def manage_trade_stoploss_orders(self, trade: Trade, stoploss_orders: List[Dict]):
def manage_trade_stoploss_orders(self, trade: Trade, stoploss_orders: list[dict]):
"""
Perform required actions according to existing stoploss orders of trade
:param trade: Corresponding Trade
@@ -1580,7 +1580,7 @@ class FreqtradeBot(LoggingMixin):
else:
self.replace_order(order, open_order, trade)
def handle_cancel_order(self, order: Dict, order_obj: Order, trade: Trade, reason: str) -> None:
def handle_cancel_order(self, order: dict, order_obj: Order, trade: Trade, reason: str) -> None:
"""
Check if current analyzed order timed out and cancel if necessary.
:param order: Order dict grabbed with exchange.fetch_order()
@@ -1632,7 +1632,7 @@ class FreqtradeBot(LoggingMixin):
)
trade.delete()
def replace_order(self, order: Dict, order_obj: Optional[Order], trade: Trade) -> None:
def replace_order(self, order: dict, order_obj: Optional[Order], trade: Trade) -> None:
"""
Check if current analyzed entry order should be replaced or simply cancelled.
To simply cancel the existing order(no replacement) adjust_entry_price() should return None
@@ -1736,7 +1736,7 @@ class FreqtradeBot(LoggingMixin):
def handle_cancel_enter(
self,
trade: Trade,
order: Dict,
order: dict,
order_obj: Order,
reason: str,
replacing: Optional[bool] = False,
@@ -1820,7 +1820,7 @@ class FreqtradeBot(LoggingMixin):
)
return was_trade_fully_canceled
def handle_cancel_exit(self, trade: Trade, order: Dict, order_obj: Order, reason: str) -> bool:
def handle_cancel_exit(self, trade: Trade, order: dict, order_obj: Order, reason: str) -> bool:
"""
exit order cancel - cancel order and update trade
:return: True if exit order was cancelled, false otherwise
@@ -2173,7 +2173,7 @@ class FreqtradeBot(LoggingMixin):
self,
trade: Trade,
order_id: Optional[str],
action_order: Optional[Dict[str, Any]] = None,
action_order: Optional[dict[str, Any]] = None,
*,
stoploss_order: bool = False,
send_msg: bool = True,
@@ -2338,7 +2338,7 @@ class FreqtradeBot(LoggingMixin):
return fee_abs
return None
def handle_order_fee(self, trade: Trade, order_obj: Order, order: Dict[str, Any]) -> None:
def handle_order_fee(self, trade: Trade, order_obj: Order, order: dict[str, Any]) -> None:
# Try update amount (binance-fix)
try:
fee_abs = self.get_real_amount(trade, order, order_obj)
@@ -2347,7 +2347,7 @@ class FreqtradeBot(LoggingMixin):
except DependencyException as exception:
logger.warning("Could not update trade amount: %s", exception)
def get_real_amount(self, trade: Trade, order: Dict, order_obj: Order) -> Optional[float]:
def get_real_amount(self, trade: Trade, order: dict, order_obj: Order) -> Optional[float]:
"""
Detect and update trade fee.
Calls trade.update_fee() upon correct detection.
@@ -2394,7 +2394,7 @@ class FreqtradeBot(LoggingMixin):
trade, order, order_obj, order_amount, order.get("trades", [])
)
def _trades_valid_for_fee(self, trades: List[Dict[str, Any]]) -> bool:
def _trades_valid_for_fee(self, trades: list[dict[str, Any]]) -> bool:
"""
Check if trades are valid for fee detection.
:return: True if trades are valid for fee detection, False otherwise
@@ -2407,7 +2407,7 @@ class FreqtradeBot(LoggingMixin):
return True
def fee_detection_from_trades(
self, trade: Trade, order: Dict, order_obj: Order, order_amount: float, trades: List
self, trade: Trade, order: dict, order_obj: Order, order_amount: float, trades: list
) -> Optional[float]:
"""
fee-detection fallback to Trades.
@@ -2426,7 +2426,7 @@ class FreqtradeBot(LoggingMixin):
fee_abs = 0.0
fee_cost = 0.0
trade_base_currency = self.exchange.get_pair_base_currency(trade.pair)
fee_rate_array: List[float] = []
fee_rate_array: list[float] = []
for exectrade in trades:
amount += exectrade["amount"]
if self.exchange.order_has_fee(exectrade):

View File

@@ -1,4 +1,4 @@
from typing import Any, Dict, List, Optional
from typing import Any, Optional
from typing_extensions import TypedDict
@@ -9,9 +9,9 @@ class BacktestMetadataType(TypedDict):
class BacktestResultType(TypedDict):
metadata: Dict[str, Any] # BacktestMetadataType
strategy: Dict[str, Any]
strategy_comparison: List[Any]
metadata: dict[str, Any] # BacktestMetadataType
strategy: dict[str, Any]
strategy_comparison: list[Any]
def get_BacktestResultType_default() -> BacktestResultType:

View File

@@ -1,5 +1,5 @@
# Used for list-exchanges
from typing import List, Optional
from typing import Optional
from typing_extensions import TypedDict
@@ -18,4 +18,4 @@ class ValidExchangesType(TypedDict):
dex: bool
is_alias: bool
alias_for: Optional[str]
trade_modes: List[TradeModeType]
trade_modes: list[TradeModeType]

View File

@@ -6,7 +6,7 @@ Read the documentation to know what cli arguments you need.
import logging
import sys
from typing import Any, List, Optional
from typing import Any, Optional
# check min. python version
@@ -24,7 +24,7 @@ from freqtrade.system import asyncio_setup, gc_set_threshold
logger = logging.getLogger("freqtrade")
def main(sysargv: Optional[List[str]] = None) -> None:
def main(sysargv: Optional[list[str]] = None) -> None:
"""
This function will initiate the bot and start the trading loop.
:return: None

View File

@@ -4,9 +4,10 @@ Various tool function for Freqtrade and scripts
import gzip
import logging
from collections.abc import Iterator, Mapping
from io import StringIO
from pathlib import Path
from typing import Any, Dict, Iterator, List, Mapping, Optional, TextIO, Union
from typing import Any, Optional, TextIO, Union
from urllib.parse import urlparse
import pandas as pd
@@ -128,7 +129,7 @@ def round_dict(d, n):
return {k: (round(v, n) if isinstance(v, float) else v) for k, v in d.items()}
DictMap = Union[Dict[str, Any], Mapping[str, Any]]
DictMap = Union[dict[str, Any], Mapping[str, Any]]
def safe_value_fallback(obj: DictMap, key1: str, key2: Optional[str] = None, default_value=None):
@@ -164,7 +165,7 @@ def plural(num: float, singular: str, plural: Optional[str] = None) -> str:
return singular if (num == 1 or num == -1) else plural or singular + "s"
def chunks(lst: List[Any], n: int) -> Iterator[List[Any]]:
def chunks(lst: list[Any], n: int) -> Iterator[list[Any]]:
"""
Split lst into chunks of the size n.
:param lst: list to split into chunks

View File

@@ -3,7 +3,7 @@ import shutil
from copy import deepcopy
from datetime import datetime, timedelta
from pathlib import Path
from typing import Any, Dict, List
from typing import Any
from pandas import DataFrame
@@ -25,16 +25,16 @@ class Analysis:
self.total_signals = 0
self.false_entry_signals = 0
self.false_exit_signals = 0
self.false_indicators: List[str] = []
self.false_indicators: list[str] = []
self.has_bias = False
class LookaheadAnalysis(BaseAnalysis):
def __init__(self, config: Dict[str, Any], strategy_obj: Dict):
def __init__(self, config: dict[str, Any], strategy_obj: dict):
super().__init__(config, strategy_obj)
self.entry_varHolders: List[VarHolder] = []
self.exit_varHolders: List[VarHolder] = []
self.entry_varHolders: list[VarHolder] = []
self.exit_varHolders: list[VarHolder] = []
self.current_analysis = Analysis()
self.minimum_trade_amount = config["minimum_trade_amount"]
@@ -99,7 +99,7 @@ class LookaheadAnalysis(BaseAnalysis):
f"{str(self_value)} != {str(other_value)}"
)
def prepare_data(self, varholder: VarHolder, pairs_to_load: List[DataFrame]):
def prepare_data(self, varholder: VarHolder, pairs_to_load: list[DataFrame]):
if "freqai" in self.local_config and "identifier" in self.local_config["freqai"]:
# purge previous data if the freqai model is defined
# (to be sure nothing is carried over from older backtests)

View File

@@ -1,7 +1,7 @@
import logging
import time
from pathlib import Path
from typing import Any, Dict, List, Union
from typing import Any, Union
import pandas as pd
from rich.text import Text
@@ -19,8 +19,8 @@ logger = logging.getLogger(__name__)
class LookaheadAnalysisSubFunctions:
@staticmethod
def text_table_lookahead_analysis_instances(
config: Dict[str, Any],
lookahead_instances: List[LookaheadAnalysis],
config: dict[str, Any],
lookahead_instances: list[LookaheadAnalysis],
caption: Union[str, None] = None,
):
headers = [
@@ -73,7 +73,7 @@ class LookaheadAnalysisSubFunctions:
return data
@staticmethod
def export_to_csv(config: Dict[str, Any], lookahead_analysis: List[LookaheadAnalysis]):
def export_to_csv(config: dict[str, Any], lookahead_analysis: list[LookaheadAnalysis]):
def add_or_update_row(df, row_data):
if (
(df["filename"] == row_data["filename"]) & (df["strategy"] == row_data["strategy"])
@@ -198,7 +198,7 @@ class LookaheadAnalysisSubFunctions:
return config
@staticmethod
def initialize_single_lookahead_analysis(config: Config, strategy_obj: Dict[str, Any]):
def initialize_single_lookahead_analysis(config: Config, strategy_obj: dict[str, Any]):
logger.info(f"Bias test of {Path(strategy_obj['location']).name} started.")
start = time.perf_counter()
current_instance = LookaheadAnalysis(config, strategy_obj)

View File

@@ -3,7 +3,7 @@ import shutil
from copy import deepcopy
from datetime import timedelta
from pathlib import Path
from typing import Any, Dict, List
from typing import Any
from pandas import DataFrame
@@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
class RecursiveAnalysis(BaseAnalysis):
def __init__(self, config: Dict[str, Any], strategy_obj: Dict):
def __init__(self, config: dict[str, Any], strategy_obj: dict):
self._startup_candle = list(
map(int, config.get("startup_candle", [199, 399, 499, 999, 1999]))
)
@@ -35,10 +35,10 @@ class RecursiveAnalysis(BaseAnalysis):
self._startup_candle.append(self._strat_scc)
self._startup_candle.sort()
self.partial_varHolder_array: List[VarHolder] = []
self.partial_varHolder_lookahead_array: List[VarHolder] = []
self.partial_varHolder_array: list[VarHolder] = []
self.partial_varHolder_lookahead_array: list[VarHolder] = []
self.dict_recursive: Dict[str, Any] = dict()
self.dict_recursive: dict[str, Any] = dict()
# For recursive bias check
# analyzes two data frames with processed indicators and shows differences between them.
@@ -114,7 +114,7 @@ class RecursiveAnalysis(BaseAnalysis):
else:
logger.info("No lookahead bias on indicators found.")
def prepare_data(self, varholder: VarHolder, pairs_to_load: List[DataFrame]):
def prepare_data(self, varholder: VarHolder, pairs_to_load: list[DataFrame]):
if "freqai" in self.local_config and "identifier" in self.local_config["freqai"]:
# purge previous data if the freqai model is defined
# (to be sure nothing is carried over from older backtests)

View File

@@ -1,7 +1,7 @@
import logging
import time
from pathlib import Path
from typing import Any, Dict, List
from typing import Any
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
@@ -15,7 +15,7 @@ logger = logging.getLogger(__name__)
class RecursiveAnalysisSubFunctions:
@staticmethod
def text_table_recursive_analysis_instances(recursive_instances: List[RecursiveAnalysis]):
def text_table_recursive_analysis_instances(recursive_instances: list[RecursiveAnalysis]):
startups = recursive_instances[0]._startup_candle
strat_scc = recursive_instances[0]._strat_scc
headers = ["Indicators"]
@@ -63,7 +63,7 @@ class RecursiveAnalysisSubFunctions:
return config
@staticmethod
def initialize_single_recursive_analysis(config: Config, strategy_obj: Dict[str, Any]):
def initialize_single_recursive_analysis(config: Config, strategy_obj: dict[str, Any]):
logger.info(f"Recursive test of {Path(strategy_obj['location']).name} started.")
start = time.perf_counter()
current_instance = RecursiveAnalysis(config, strategy_obj)

View File

@@ -8,7 +8,7 @@ import logging
from collections import defaultdict
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
from numpy import nan
from pandas import DataFrame
@@ -118,13 +118,13 @@ class Backtesting:
self.order_id_counter: int = 0
config["dry_run"] = True
self.run_ids: Dict[str, str] = {}
self.strategylist: List[IStrategy] = []
self.all_results: Dict[str, Dict] = {}
self.processed_dfs: Dict[str, Dict] = {}
self.rejected_dict: Dict[str, List] = {}
self.rejected_df: Dict[str, Dict] = {}
self.exited_dfs: Dict[str, Dict] = {}
self.run_ids: dict[str, str] = {}
self.strategylist: list[IStrategy] = []
self.all_results: dict[str, dict] = {}
self.processed_dfs: dict[str, dict] = {}
self.rejected_dict: dict[str, list] = {}
self.rejected_df: dict[str, dict] = {}
self.exited_dfs: dict[str, dict] = {}
self._exchange_name = self.config["exchange"]["name"]
if not exchange:
@@ -246,8 +246,8 @@ class Backtesting:
else:
self.timeframe_detail_td = timedelta(seconds=0)
self.detail_data: Dict[str, DataFrame] = {}
self.futures_data: Dict[str, DataFrame] = {}
self.detail_data: dict[str, DataFrame] = {}
self.futures_data: dict[str, DataFrame] = {}
def init_backtest(self):
self.prepare_backtest(False)
@@ -278,7 +278,7 @@ class Backtesting:
if self.config.get("enable_protections", False):
self.protections = ProtectionManager(self.config, strategy.protections)
def load_bt_data(self) -> Tuple[Dict[str, DataFrame], TimeRange]:
def load_bt_data(self) -> tuple[dict[str, DataFrame], TimeRange]:
"""
Loads backtest data and returns the data combined with the timerange
as tuple.
@@ -408,7 +408,7 @@ class Backtesting:
self.abort = False
raise DependencyException("Stop requested")
def _get_ohlcv_as_lists(self, processed: Dict[str, DataFrame]) -> Dict[str, Tuple]:
def _get_ohlcv_as_lists(self, processed: dict[str, DataFrame]) -> dict[str, tuple]:
"""
Helper function to convert a processed dataframes into lists for performance reasons.
@@ -418,7 +418,7 @@ class Backtesting:
optimize memory usage!
"""
data: Dict = {}
data: dict = {}
self.progress.init_step(BacktestState.CONVERT, len(processed))
# Create dict with data
@@ -466,7 +466,7 @@ class Backtesting:
return data
def _get_close_rate(
self, row: Tuple, trade: LocalTrade, exit_: ExitCheckTuple, trade_dur: int
self, row: tuple, trade: LocalTrade, exit_: ExitCheckTuple, trade_dur: int
) -> float:
"""
Get close rate for backtesting result
@@ -484,7 +484,7 @@ class Backtesting:
return row[OPEN_IDX]
def _get_close_rate_for_stoploss(
self, row: Tuple, trade: LocalTrade, exit_: ExitCheckTuple, trade_dur: int
self, row: tuple, trade: LocalTrade, exit_: ExitCheckTuple, trade_dur: int
) -> float:
# our stoploss was already lower than candle high,
# possibly due to a cancelled trade exit.
@@ -538,7 +538,7 @@ class Backtesting:
return stoploss_value
def _get_close_rate_for_roi(
self, row: Tuple, trade: LocalTrade, exit_: ExitCheckTuple, trade_dur: int
self, row: tuple, trade: LocalTrade, exit_: ExitCheckTuple, trade_dur: int
) -> float:
is_short = trade.is_short or False
leverage = trade.leverage or 1.0
@@ -601,7 +601,7 @@ class Backtesting:
return row[OPEN_IDX]
def _get_adjust_trade_entry_for_candle(
self, trade: LocalTrade, row: Tuple, current_time: datetime
self, trade: LocalTrade, row: tuple, current_time: datetime
) -> LocalTrade:
current_rate: float = row[OPEN_IDX]
current_profit = trade.calc_profit_ratio(current_rate)
@@ -669,7 +669,7 @@ class Backtesting:
return trade
def _get_order_filled(self, rate: float, row: Tuple) -> bool:
def _get_order_filled(self, rate: float, row: tuple) -> bool:
"""Rate is within candle, therefore filled"""
return row[LOW_IDX] <= rate <= row[HIGH_IDX]
@@ -685,7 +685,7 @@ class Backtesting:
)
def _try_close_open_order(
self, order: Optional[Order], trade: LocalTrade, current_date: datetime, row: Tuple
self, order: Optional[Order], trade: LocalTrade, current_date: datetime, row: tuple
) -> bool:
"""
Check if an order is open and if it should've filled.
@@ -719,7 +719,7 @@ class Backtesting:
return False
def _process_exit_order(
self, order: Order, trade: LocalTrade, current_time: datetime, row: Tuple, pair: str
self, order: Order, trade: LocalTrade, current_time: datetime, row: tuple, pair: str
):
"""
Takes an exit order and processes it, potentially closing the trade.
@@ -740,7 +740,7 @@ class Backtesting:
def _get_exit_for_signal(
self,
trade: LocalTrade,
row: Tuple,
row: tuple,
exit_: ExitCheckTuple,
current_time: datetime,
amount: Optional[float] = None,
@@ -820,7 +820,7 @@ class Backtesting:
def _exit_trade(
self,
trade: LocalTrade,
sell_row: Tuple,
sell_row: tuple,
close_rate: float,
amount: float,
exit_reason: Optional[str],
@@ -859,7 +859,7 @@ class Backtesting:
return trade
def _check_trade_exit(
self, trade: LocalTrade, row: Tuple, current_time: datetime
self, trade: LocalTrade, row: tuple, current_time: datetime
) -> Optional[LocalTrade]:
self._run_funding_fees(trade, current_time)
@@ -905,7 +905,7 @@ class Backtesting:
def get_valid_price_and_stake(
self,
pair: str,
row: Tuple,
row: tuple,
propose_rate: float,
stake_amount: float,
direction: LongShort,
@@ -914,7 +914,7 @@ class Backtesting:
trade: Optional[LocalTrade],
order_type: str,
price_precision: Optional[float],
) -> Tuple[float, float, float, float]:
) -> tuple[float, float, float, float]:
if order_type == "limit":
new_rate = strategy_safe_wrapper(
self.strategy.custom_entry_price, default_retval=propose_rate
@@ -1003,7 +1003,7 @@ class Backtesting:
def _enter_trade(
self,
pair: str,
row: Tuple,
row: tuple,
direction: LongShort,
stake_amount: Optional[float] = None,
trade: Optional[LocalTrade] = None,
@@ -1150,7 +1150,7 @@ class Backtesting:
return trade
def handle_left_open(
self, open_trades: Dict[str, List[LocalTrade]], data: Dict[str, List[Tuple]]
self, open_trades: dict[str, list[LocalTrade]], data: dict[str, list[tuple]]
) -> None:
"""
Handling of left open trades at the end of backtesting
@@ -1197,7 +1197,7 @@ class Backtesting:
self.protections.stop_per_pair(pair, current_time, side)
self.protections.global_stop(current_time, side)
def manage_open_orders(self, trade: LocalTrade, current_time: datetime, row: Tuple) -> bool:
def manage_open_orders(self, trade: LocalTrade, current_time: datetime, row: tuple) -> bool:
"""
Check if any open order needs to be cancelled or replaced.
Returns True if the trade should be deleted.
@@ -1246,7 +1246,7 @@ class Backtesting:
return None
def check_order_replace(
self, trade: LocalTrade, order: Order, current_time, row: Tuple
self, trade: LocalTrade, order: Order, current_time, row: tuple
) -> bool:
"""
Check if current analyzed entry order has to be replaced and do so.
@@ -1297,8 +1297,8 @@ class Backtesting:
return False
def validate_row(
self, data: Dict, pair: str, row_index: int, current_time: datetime
) -> Optional[Tuple]:
self, data: dict, pair: str, row_index: int, current_time: datetime
) -> Optional[tuple]:
try:
# Row is treated as "current incomplete candle".
# entry / exit signals are shifted by 1 to compensate for this.
@@ -1329,7 +1329,7 @@ class Backtesting:
def backtest_loop(
self,
row: Tuple,
row: tuple,
pair: str,
current_time: datetime,
end_date: datetime,
@@ -1383,7 +1383,7 @@ class Backtesting:
self._process_exit_order(order, trade, current_time, row, pair)
def time_pair_generator(
self, start_date: datetime, end_date: datetime, increment: timedelta, pairs: List[str]
self, start_date: datetime, end_date: datetime, increment: timedelta, pairs: list[str]
):
"""
Backtest time and pair generator
@@ -1404,7 +1404,7 @@ class Backtesting:
self.progress.increment()
current_time += increment
def backtest(self, processed: Dict, start_date: datetime, end_date: datetime) -> Dict[str, Any]:
def backtest(self, processed: dict, start_date: datetime, end_date: datetime) -> dict[str, Any]:
"""
Implement backtesting functionality
@@ -1423,10 +1423,10 @@ class Backtesting:
self.wallets.update()
# Use dict of lists with data for performance
# (looping lists is a lot faster than pandas DataFrames)
data: Dict = self._get_ohlcv_as_lists(processed)
data: dict = self._get_ohlcv_as_lists(processed)
# Indexes per pair, so some pairs are allowed to have a missing start.
indexes: Dict = defaultdict(int)
indexes: dict = defaultdict(int)
# Loop timerange and get candle for each pair at that point in time
for current_time, pair, is_first in self.time_pair_generator(
@@ -1510,7 +1510,7 @@ class Backtesting:
}
def backtest_one_strategy(
self, strat: IStrategy, data: Dict[str, DataFrame], timerange: TimeRange
self, strat: IStrategy, data: dict[str, DataFrame], timerange: TimeRange
):
self.progress.init_step(BacktestState.ANALYZE, 0)
strategy_name = strat.get_strategy_name()
@@ -1605,7 +1605,7 @@ class Backtesting:
"""
Run backtesting end-to-end
"""
data: Dict[str, DataFrame] = {}
data: dict[str, DataFrame] = {}
data, timerange = self.load_bt_data()
self.load_bt_data_detail()

View File

@@ -1,7 +1,7 @@
import logging
from copy import deepcopy
from datetime import datetime, timezone
from typing import Any, Dict, Optional
from typing import Any, Optional
from pandas import DataFrame
@@ -14,7 +14,7 @@ logger = logging.getLogger(__name__)
class VarHolder:
timerange: TimeRange
data: DataFrame
indicators: Dict[str, DataFrame]
indicators: dict[str, DataFrame]
result: DataFrame
compared: DataFrame
from_dt: datetime
@@ -25,7 +25,7 @@ class VarHolder:
class BaseAnalysis:
def __init__(self, config: Dict[str, Any], strategy_obj: Dict):
def __init__(self, config: dict[str, Any], strategy_obj: dict):
self.failed_bias_check = True
self.full_varHolder = VarHolder()
self.exchange: Optional[Any] = None

View File

@@ -11,7 +11,7 @@ import warnings
from datetime import datetime, timezone
from math import ceil
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Optional
import rapidjson
from joblib import Parallel, cpu_count, delayed, dump, load, wrap_non_picklable_objects
@@ -70,14 +70,14 @@ class Hyperopt:
"""
def __init__(self, config: Config) -> None:
self.buy_space: List[Dimension] = []
self.sell_space: List[Dimension] = []
self.protection_space: List[Dimension] = []
self.roi_space: List[Dimension] = []
self.stoploss_space: List[Dimension] = []
self.trailing_space: List[Dimension] = []
self.max_open_trades_space: List[Dimension] = []
self.dimensions: List[Dimension] = []
self.buy_space: list[Dimension] = []
self.sell_space: list[Dimension] = []
self.protection_space: list[Dimension] = []
self.roi_space: list[Dimension] = []
self.stoploss_space: list[Dimension] = []
self.trailing_space: list[Dimension] = []
self.max_open_trades_space: list[Dimension] = []
self.dimensions: list[Dimension] = []
self._hyper_out: HyperoptOutput = HyperoptOutput(streaming=True)
@@ -125,7 +125,7 @@ class Hyperopt:
self.market_change = 0.0
self.num_epochs_saved = 0
self.current_best_epoch: Optional[Dict[str, Any]] = None
self.current_best_epoch: Optional[dict[str, Any]] = None
# Use max_open_trades for hyperopt as well, except --disable-max-market-positions is set
if not self.config.get("use_max_market_positions", True):
@@ -168,8 +168,8 @@ class Hyperopt:
self.hyperopt_pickle_magic(modules.__bases__)
def _get_params_dict(
self, dimensions: List[Dimension], raw_params: List[Any]
) -> Dict[str, Any]:
self, dimensions: list[Dimension], raw_params: list[Any]
) -> dict[str, Any]:
# Ensure the number of dimensions match
# the number of parameters in the list.
if len(raw_params) != len(dimensions):
@@ -179,7 +179,7 @@ class Hyperopt:
# and the values are taken from the list of parameters.
return {d.name: v for d, v in zip(dimensions, raw_params)}
def _save_result(self, epoch: Dict) -> None:
def _save_result(self, epoch: dict) -> None:
"""
Save hyperopt results to file
Store one line per epoch.
@@ -205,11 +205,11 @@ class Hyperopt:
latest_filename = Path.joinpath(self.results_file.parent, LAST_BT_RESULT_FN)
file_dump_json(latest_filename, {"latest_hyperopt": str(self.results_file.name)}, log=False)
def _get_params_details(self, params: Dict) -> Dict:
def _get_params_details(self, params: dict) -> dict:
"""
Return the params for each space
"""
result: Dict = {}
result: dict = {}
if HyperoptTools.has_space(self.config, "buy"):
result["buy"] = {p.name: params.get(p.name) for p in self.buy_space}
@@ -236,11 +236,11 @@ class Hyperopt:
return result
def _get_no_optimize_details(self) -> Dict[str, Any]:
def _get_no_optimize_details(self) -> dict[str, Any]:
"""
Get non-optimized parameters
"""
result: Dict[str, Any] = {}
result: dict[str, Any] = {}
strategy = self.backtesting.strategy
if not HyperoptTools.has_space(self.config, "roi"):
result["roi"] = {str(k): v for k, v in strategy.minimal_roi.items()}
@@ -257,7 +257,7 @@ class Hyperopt:
result["max_open_trades"] = {"max_open_trades": strategy.max_open_trades}
return result
def print_results(self, results: Dict[str, Any]) -> None:
def print_results(self, results: dict[str, Any]) -> None:
"""
Log results if it is better than any previous evaluation
TODO: this should be moved to HyperoptTools too
@@ -318,7 +318,7 @@ class Hyperopt:
+ self.max_open_trades_space
)
def assign_params(self, params_dict: Dict[str, Any], category: str) -> None:
def assign_params(self, params_dict: dict[str, Any], category: str) -> None:
"""
Assign hyperoptable parameters
"""
@@ -327,7 +327,7 @@ class Hyperopt:
# noinspection PyProtectedMember
attr.value = params_dict[attr_name]
def generate_optimizer(self, raw_params: List[Any]) -> Dict[str, Any]:
def generate_optimizer(self, raw_params: list[Any]) -> dict[str, Any]:
"""
Used Optimize function.
Called once per epoch to optimize whatever is configured.
@@ -406,12 +406,12 @@ class Hyperopt:
def _get_results_dict(
self,
backtesting_results: Dict[str, Any],
backtesting_results: dict[str, Any],
min_date: datetime,
max_date: datetime,
params_dict: Dict[str, Any],
processed: Dict[str, DataFrame],
) -> Dict[str, Any]:
params_dict: dict[str, Any],
processed: dict[str, DataFrame],
) -> dict[str, Any]:
params_details = self._get_params_details(params_dict)
strat_stats = generate_strategy_stats(
@@ -458,7 +458,7 @@ class Hyperopt:
"total_profit": total_profit,
}
def get_optimizer(self, dimensions: List[Dimension], cpu_count) -> Optimizer:
def get_optimizer(self, dimensions: list[Dimension], cpu_count) -> Optimizer:
estimator = self.custom_hyperopt.generate_estimator(dimensions=dimensions)
acq_optimizer = "sampling"
@@ -479,7 +479,7 @@ class Hyperopt:
model_queue_size=SKOPT_MODEL_QUEUE_SIZE,
)
def run_optimizer_parallel(self, parallel: Parallel, asked: List[List]) -> List[Dict[str, Any]]:
def run_optimizer_parallel(self, parallel: Parallel, asked: list[list]) -> list[dict[str, Any]]:
"""Start optimizer in a parallel way"""
return parallel(
delayed(wrap_non_picklable_objects(self.generate_optimizer))(v) for v in asked
@@ -488,7 +488,7 @@ class Hyperopt:
def _set_random_state(self, random_state: Optional[int]) -> int:
return random_state or random.randint(1, 2**16 - 1) # noqa: S311
def advise_and_trim(self, data: Dict[str, DataFrame]) -> Dict[str, DataFrame]:
def advise_and_trim(self, data: dict[str, DataFrame]) -> dict[str, DataFrame]:
preprocessed = self.backtesting.strategy.advise_all_indicators(data)
# Trim startup period from analyzed dataframe to get correct dates for output.
@@ -524,7 +524,7 @@ class Hyperopt:
else:
dump(data, self.data_pickle_file)
def get_asked_points(self, n_points: int) -> Tuple[List[List[Any]], List[bool]]:
def get_asked_points(self, n_points: int) -> tuple[list[list[Any]], list[bool]]:
"""
Enforce points returned from `self.opt.ask` have not been already evaluated
@@ -545,8 +545,8 @@ class Hyperopt:
return new_list
i = 0
asked_non_tried: List[List[Any]] = []
is_random_non_tried: List[bool] = []
asked_non_tried: list[list[Any]] = []
is_random_non_tried: list[bool] = []
while i < 5 and len(asked_non_tried) < n_points:
if i < 3:
self.opt.cache_ = {}
@@ -573,7 +573,7 @@ class Hyperopt:
else:
return self.opt.ask(n_points=n_points), [False for _ in range(n_points)]
def evaluate_result(self, val: Dict[str, Any], current: int, is_random: bool):
def evaluate_result(self, val: dict[str, Any], current: int, is_random: bool):
"""
Evaluate results returned from generate_optimizer
"""

View File

@@ -6,7 +6,7 @@ This module implements a convenience auto-hyperopt class, which can be used toge
import logging
from contextlib import suppress
from typing import Callable, Dict, List
from typing import Callable
from freqtrade.exceptions import OperationalException
@@ -59,7 +59,7 @@ class HyperOptAuto(IHyperOpt):
if attr.optimize:
yield attr.get_space(attr_name)
def _get_indicator_space(self, category) -> List:
def _get_indicator_space(self, category) -> list:
# TODO: is this necessary, or can we call "generate_space" directly?
indicator_space = list(self._generate_indicator_space(category))
if len(indicator_space) > 0:
@@ -70,32 +70,32 @@ class HyperOptAuto(IHyperOpt):
)
return []
def buy_indicator_space(self) -> List["Dimension"]:
def buy_indicator_space(self) -> list["Dimension"]:
return self._get_indicator_space("buy")
def sell_indicator_space(self) -> List["Dimension"]:
def sell_indicator_space(self) -> list["Dimension"]:
return self._get_indicator_space("sell")
def protection_space(self) -> List["Dimension"]:
def protection_space(self) -> list["Dimension"]:
return self._get_indicator_space("protection")
def generate_roi_table(self, params: Dict) -> Dict[int, float]:
def generate_roi_table(self, params: dict) -> dict[int, float]:
return self._get_func("generate_roi_table")(params)
def roi_space(self) -> List["Dimension"]:
def roi_space(self) -> list["Dimension"]:
return self._get_func("roi_space")()
def stoploss_space(self) -> List["Dimension"]:
def stoploss_space(self) -> list["Dimension"]:
return self._get_func("stoploss_space")()
def generate_trailing_params(self, params: Dict) -> Dict:
def generate_trailing_params(self, params: dict) -> dict:
return self._get_func("generate_trailing_params")(params)
def trailing_space(self) -> List["Dimension"]:
def trailing_space(self) -> list["Dimension"]:
return self._get_func("trailing_space")()
def max_open_trades_space(self) -> List["Dimension"]:
def max_open_trades_space(self) -> list["Dimension"]:
return self._get_func("max_open_trades_space")()
def generate_estimator(self, dimensions: List["Dimension"], **kwargs) -> EstimatorType:
def generate_estimator(self, dimensions: list["Dimension"], **kwargs) -> EstimatorType:
return self._get_func("generate_estimator")(dimensions=dimensions, **kwargs)

View File

@@ -1,5 +1,4 @@
import logging
from typing import List
from freqtrade.exceptions import OperationalException
@@ -7,7 +6,7 @@ from freqtrade.exceptions import OperationalException
logger = logging.getLogger(__name__)
def hyperopt_filter_epochs(epochs: List, filteroptions: dict, log: bool = True) -> List:
def hyperopt_filter_epochs(epochs: list, filteroptions: dict, log: bool = True) -> list:
"""
Filter our items from the list of hyperopt results
"""
@@ -33,14 +32,14 @@ def hyperopt_filter_epochs(epochs: List, filteroptions: dict, log: bool = True)
return epochs
def _hyperopt_filter_epochs_trade(epochs: List, trade_count: int):
def _hyperopt_filter_epochs_trade(epochs: list, trade_count: int):
"""
Filter epochs with trade-counts > trades
"""
return [x for x in epochs if x["results_metrics"].get("total_trades", 0) > trade_count]
def _hyperopt_filter_epochs_trade_count(epochs: List, filteroptions: dict) -> List:
def _hyperopt_filter_epochs_trade_count(epochs: list, filteroptions: dict) -> list:
if filteroptions["filter_min_trades"] > 0:
epochs = _hyperopt_filter_epochs_trade(epochs, filteroptions["filter_min_trades"])
@@ -53,7 +52,7 @@ def _hyperopt_filter_epochs_trade_count(epochs: List, filteroptions: dict) -> Li
return epochs
def _hyperopt_filter_epochs_duration(epochs: List, filteroptions: dict) -> List:
def _hyperopt_filter_epochs_duration(epochs: list, filteroptions: dict) -> list:
def get_duration_value(x):
# Duration in minutes ...
if "holding_avg_s" in x["results_metrics"]:
@@ -74,7 +73,7 @@ def _hyperopt_filter_epochs_duration(epochs: List, filteroptions: dict) -> List:
return epochs
def _hyperopt_filter_epochs_profit(epochs: List, filteroptions: dict) -> List:
def _hyperopt_filter_epochs_profit(epochs: list, filteroptions: dict) -> list:
if filteroptions["filter_min_avg_profit"] is not None:
epochs = _hyperopt_filter_epochs_trade(epochs, 0)
epochs = [
@@ -110,7 +109,7 @@ def _hyperopt_filter_epochs_profit(epochs: List, filteroptions: dict) -> List:
return epochs
def _hyperopt_filter_epochs_objective(epochs: List, filteroptions: dict) -> List:
def _hyperopt_filter_epochs_objective(epochs: list, filteroptions: dict) -> list:
if filteroptions["filter_min_objective"] is not None:
epochs = _hyperopt_filter_epochs_trade(epochs, 0)

View File

@@ -6,7 +6,7 @@ This module defines the interface to apply for hyperopt
import logging
import math
from abc import ABC
from typing import Dict, List, Union
from typing import Union
from sklearn.base import RegressorMixin
from skopt.space import Categorical, Dimension, Integer
@@ -41,7 +41,7 @@ class IHyperOpt(ABC):
# Assign timeframe to be used in hyperopt
IHyperOpt.timeframe = str(config["timeframe"])
def generate_estimator(self, dimensions: List[Dimension], **kwargs) -> EstimatorType:
def generate_estimator(self, dimensions: list[Dimension], **kwargs) -> EstimatorType:
"""
Return base_estimator.
Can be any of "GP", "RF", "ET", "GBRT" or an instance of a class
@@ -49,7 +49,7 @@ class IHyperOpt(ABC):
"""
return "ET"
def generate_roi_table(self, params: Dict) -> Dict[int, float]:
def generate_roi_table(self, params: dict) -> dict[int, float]:
"""
Create a ROI table.
@@ -64,7 +64,7 @@ class IHyperOpt(ABC):
return roi_table
def roi_space(self) -> List[Dimension]:
def roi_space(self) -> list[Dimension]:
"""
Create a ROI space.
@@ -146,7 +146,7 @@ class IHyperOpt(ABC):
),
]
def stoploss_space(self) -> List[Dimension]:
def stoploss_space(self) -> list[Dimension]:
"""
Create a stoploss space.
@@ -157,7 +157,7 @@ class IHyperOpt(ABC):
SKDecimal(-0.35, -0.02, decimals=3, name="stoploss"),
]
def generate_trailing_params(self, params: Dict) -> Dict:
def generate_trailing_params(self, params: dict) -> dict:
"""
Create dict with trailing stop parameters.
"""
@@ -170,7 +170,7 @@ class IHyperOpt(ABC):
"trailing_only_offset_is_reached": params["trailing_only_offset_is_reached"],
}
def trailing_space(self) -> List[Dimension]:
def trailing_space(self) -> list[Dimension]:
"""
Create a trailing stoploss space.
@@ -194,7 +194,7 @@ class IHyperOpt(ABC):
Categorical([True, False], name="trailing_only_offset_is_reached"),
]
def max_open_trades_space(self) -> List[Dimension]:
def max_open_trades_space(self) -> list[Dimension]:
"""
Create a max open trades space.

View File

@@ -5,7 +5,7 @@ This module defines the interface for the loss-function for hyperopt
from abc import ABC, abstractmethod
from datetime import datetime
from typing import Any, Dict
from typing import Any
from pandas import DataFrame
@@ -29,8 +29,8 @@ class IHyperOptLoss(ABC):
min_date: datetime,
max_date: datetime,
config: Config,
processed: Dict[str, DataFrame],
backtest_stats: Dict[str, Any],
processed: dict[str, DataFrame],
backtest_stats: dict[str, Any],
**kwargs,
) -> float:
"""

View File

@@ -1,6 +1,6 @@
import sys
from os import get_terminal_size
from typing import Any, List, Optional
from typing import Any, Optional
from rich.align import Align
from rich.console import Console
@@ -14,7 +14,7 @@ from freqtrade.util import fmt_coin
class HyperoptOutput:
def __init__(self, streaming=False) -> None:
self._results: List[Any] = []
self._results: list[Any] = []
self._streaming = streaming
self.__init_table()

View File

@@ -1,8 +1,9 @@
import logging
from collections.abc import Iterator
from copy import deepcopy
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict, Iterator, List, Optional, Tuple
from typing import Any, Optional
import numpy as np
import rapidjson
@@ -83,7 +84,7 @@ class HyperoptTools:
)
@staticmethod
def load_params(filename: Path) -> Dict:
def load_params(filename: Path) -> dict:
"""
Load parameters from file
"""
@@ -92,7 +93,7 @@ class HyperoptTools:
return params
@staticmethod
def try_export_params(config: Config, strategy_name: str, params: Dict):
def try_export_params(config: Config, strategy_name: str, params: dict):
if params.get(FTHYPT_FILEVERSION, 1) >= 2 and not config.get("disableparamexport", False):
# Export parameters ...
fn = HyperoptTools.get_strategy_filename(config, strategy_name)
@@ -113,7 +114,7 @@ class HyperoptTools:
return any(s in config["spaces"] for s in [space, "all", "default"])
@staticmethod
def _read_results(results_file: Path, batch_size: int = 10) -> Iterator[List[Any]]:
def _read_results(results_file: Path, batch_size: int = 10) -> Iterator[list[Any]]:
"""
Stream hyperopt results from file
"""
@@ -143,7 +144,7 @@ class HyperoptTools:
return False
@staticmethod
def load_filtered_results(results_file: Path, config: Config) -> Tuple[List, int]:
def load_filtered_results(results_file: Path, config: Config) -> tuple[list, int]:
filteroptions = {
"only_best": config.get("hyperopt_list_best", False),
"only_profitable": config.get("hyperopt_list_profitable", False),
@@ -204,7 +205,7 @@ class HyperoptTools:
print(f"\n{header_str}:\n\n{explanation_str}\n")
if print_json:
result_dict: Dict = {}
result_dict: dict = {}
for s in [
"buy",
"sell",
@@ -256,7 +257,7 @@ class HyperoptTools:
@staticmethod
def _params_pretty_print(
params, space: str, header: str, non_optimized: Optional[Dict] = None
params, space: str, header: str, non_optimized: Optional[dict] = None
) -> None:
if space in params or (non_optimized and space in non_optimized):
space_params = HyperoptTools._space_params(params, space, 5)
@@ -298,7 +299,7 @@ class HyperoptTools:
print(result)
@staticmethod
def _space_params(params, space: str, r: Optional[int] = None) -> Dict:
def _space_params(params, space: str, r: Optional[int] = None) -> dict:
d = params.get(space)
if d:
# Round floats to `r` digits after the decimal point if requested
@@ -328,7 +329,7 @@ class HyperoptTools:
return bool(results["loss"] < current_best_loss)
@staticmethod
def format_results_explanation_string(results_metrics: Dict, stake_currency: str) -> str:
def format_results_explanation_string(results_metrics: dict, stake_currency: str) -> str:
"""
Return the formatted results explanation in a string
"""

View File

@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict, List, Literal, Union
from typing import Any, Literal, Union
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT, Config
from freqtrade.ft_types import BacktestResultType
@@ -10,7 +10,7 @@ from freqtrade.util import decimals_per_coin, fmt_coin, print_rich_table
logger = logging.getLogger(__name__)
def _get_line_floatfmt(stake_currency: str) -> List[str]:
def _get_line_floatfmt(stake_currency: str) -> list[str]:
"""
Generate floatformat (goes in line with _generate_result_line())
"""
@@ -18,8 +18,8 @@ def _get_line_floatfmt(stake_currency: str) -> List[str]:
def _get_line_header(
first_column: Union[str, List[str]], stake_currency: str, direction: str = "Trades"
) -> List[str]:
first_column: Union[str, list[str]], stake_currency: str, direction: str = "Trades"
) -> list[str]:
"""
Generate header lines (goes in line with _generate_result_line())
"""
@@ -45,7 +45,7 @@ def generate_wins_draws_losses(wins, draws, losses):
def text_table_bt_results(
pair_results: List[Dict[str, Any]], stake_currency: str, title: str
pair_results: list[dict[str, Any]], stake_currency: str, title: str
) -> None:
"""
Generates and returns a text table for the given backtest data and the results dataframe
@@ -73,7 +73,7 @@ def text_table_bt_results(
def text_table_tags(
tag_type: Literal["enter_tag", "exit_tag", "mix_tag"],
tag_results: List[Dict[str, Any]],
tag_results: list[dict[str, Any]],
stake_currency: str,
) -> None:
"""
@@ -123,7 +123,7 @@ def text_table_tags(
def text_table_periodic_breakdown(
days_breakdown_stats: List[Dict[str, Any]], stake_currency: str, period: str
days_breakdown_stats: list[dict[str, Any]], stake_currency: str, period: str
) -> None:
"""
Generate small table with Backtest results by days
@@ -191,7 +191,7 @@ def text_table_strategy(strategy_results, stake_currency: str, title: str):
print_rich_table(output, headers, summary=title)
def text_table_add_metrics(strat_results: Dict) -> None:
def text_table_add_metrics(strat_results: dict) -> None:
if len(strat_results["trades"]) > 0:
best_trade = max(strat_results["trades"], key=lambda x: x["profit_ratio"])
worst_trade = min(strat_results["trades"], key=lambda x: x["profit_ratio"])
@@ -411,7 +411,7 @@ def text_table_add_metrics(strat_results: Dict) -> None:
print(message)
def _show_tag_subresults(results: Dict[str, Any], stake_currency: str):
def _show_tag_subresults(results: dict[str, Any], stake_currency: str):
"""
Print tag subresults (enter_tag, exit_reason_summary, mix_tag_stats)
"""
@@ -426,7 +426,7 @@ def _show_tag_subresults(results: Dict[str, Any], stake_currency: str):
def show_backtest_result(
strategy: str, results: Dict[str, Any], stake_currency: str, backtest_breakdown: List[str]
strategy: str, results: dict[str, Any], stake_currency: str, backtest_breakdown: list[str]
):
"""
Print results for one strategy

View File

@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Dict, Optional
from typing import Optional
from pandas import DataFrame
@@ -70,7 +70,7 @@ def store_backtest_stats(
def _store_backtest_analysis_data(
recordfilename: Path, data: Dict[str, Dict], dtappendix: str, name: str
recordfilename: Path, data: dict[str, dict], dtappendix: str, name: str
) -> Path:
"""
Stores backtest trade candles for analysis
@@ -91,9 +91,9 @@ def _store_backtest_analysis_data(
def store_backtest_analysis_results(
recordfilename: Path,
candles: Dict[str, Dict],
trades: Dict[str, Dict],
exited: Dict[str, Dict],
candles: dict[str, dict],
trades: dict[str, dict],
exited: dict[str, dict],
dtappendix: str,
) -> None:
_store_backtest_analysis_data(recordfilename, candles, dtappendix, "signals")

View File

@@ -1,7 +1,7 @@
import logging
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, List, Literal, Tuple, Union
from typing import Any, Literal, Union
import numpy as np
from pandas import DataFrame, Series, concat, to_datetime
@@ -25,8 +25,8 @@ logger = logging.getLogger(__name__)
def generate_trade_signal_candles(
preprocessed_df: Dict[str, DataFrame], bt_results: Dict[str, Any], date_col: str
) -> Dict[str, DataFrame]:
preprocessed_df: dict[str, DataFrame], bt_results: dict[str, Any], date_col: str
) -> dict[str, DataFrame]:
signal_candles_only = {}
for pair in preprocessed_df.keys():
signal_candles_only_df = DataFrame()
@@ -48,8 +48,8 @@ def generate_trade_signal_candles(
def generate_rejected_signals(
preprocessed_df: Dict[str, DataFrame], rejected_dict: Dict[str, DataFrame]
) -> Dict[str, DataFrame]:
preprocessed_df: dict[str, DataFrame], rejected_dict: dict[str, DataFrame]
) -> dict[str, DataFrame]:
rejected_candles_only = {}
for pair, signals in rejected_dict.items():
rejected_signals_only_df = DataFrame()
@@ -69,8 +69,8 @@ def generate_rejected_signals(
def _generate_result_line(
result: DataFrame, starting_balance: int, first_column: Union[str, List[str]]
) -> Dict:
result: DataFrame, starting_balance: int, first_column: Union[str, list[str]]
) -> dict:
"""
Generate one result dict, with "first_column" as key.
"""
@@ -109,12 +109,12 @@ def _generate_result_line(
def generate_pair_metrics(
pairlist: List[str],
pairlist: list[str],
stake_currency: str,
starting_balance: int,
results: DataFrame,
skip_nan: bool = False,
) -> List[Dict]:
) -> list[dict]:
"""
Generates and returns a list for the given backtest data and the results dataframe
:param pairlist: Pairlist used
@@ -143,11 +143,11 @@ def generate_pair_metrics(
def generate_tag_metrics(
tag_type: Union[Literal["enter_tag", "exit_reason"], List[Literal["enter_tag", "exit_reason"]]],
tag_type: Union[Literal["enter_tag", "exit_reason"], list[Literal["enter_tag", "exit_reason"]]],
starting_balance: int,
results: DataFrame,
skip_nan: bool = False,
) -> List[Dict]:
) -> list[dict]:
"""
Generates and returns a list of metrics for the given tag trades and the results dataframe
:param starting_balance: Starting balance
@@ -177,7 +177,7 @@ def generate_tag_metrics(
return []
def generate_strategy_comparison(bt_stats: Dict) -> List[Dict]:
def generate_strategy_comparison(bt_stats: dict) -> list[dict]:
"""
Generate summary per strategy
:param bt_stats: Dict of <Strategyname: DataFrame> containing results for all strategies
@@ -208,8 +208,8 @@ def _get_resample_from_period(period: str) -> str:
def generate_periodic_breakdown_stats(
trade_list: Union[List, DataFrame], period: str
) -> List[Dict[str, Any]]:
trade_list: Union[list, DataFrame], period: str
) -> list[dict[str, Any]]:
results = trade_list if not isinstance(trade_list, list) else DataFrame.from_records(trade_list)
if len(results) == 0:
return []
@@ -237,14 +237,14 @@ def generate_periodic_breakdown_stats(
return stats
def generate_all_periodic_breakdown_stats(trade_list: List) -> Dict[str, List]:
def generate_all_periodic_breakdown_stats(trade_list: list) -> dict[str, list]:
result = {}
for period in BACKTEST_BREAKDOWNS:
result[period] = generate_periodic_breakdown_stats(trade_list, period)
return result
def calc_streak(dataframe: DataFrame) -> Tuple[int, int]:
def calc_streak(dataframe: DataFrame) -> tuple[int, int]:
"""
Calculate consecutive win and loss streaks
:param dataframe: Dataframe containing the trades dataframe, with profit_ratio column
@@ -261,7 +261,7 @@ def calc_streak(dataframe: DataFrame) -> Tuple[int, int]:
return cons_wins, cons_losses
def generate_trading_stats(results: DataFrame) -> Dict[str, Any]:
def generate_trading_stats(results: DataFrame) -> dict[str, Any]:
"""Generate overall trade statistics"""
if len(results) == 0:
return {
@@ -313,7 +313,7 @@ def generate_trading_stats(results: DataFrame) -> Dict[str, Any]:
}
def generate_daily_stats(results: DataFrame) -> Dict[str, Any]:
def generate_daily_stats(results: DataFrame) -> dict[str, Any]:
"""Generate daily statistics"""
if len(results) == 0:
return {
@@ -350,14 +350,14 @@ def generate_daily_stats(results: DataFrame) -> Dict[str, Any]:
def generate_strategy_stats(
pairlist: List[str],
pairlist: list[str],
strategy: str,
content: Dict[str, Any],
content: dict[str, Any],
min_date: datetime,
max_date: datetime,
market_change: float,
is_hyperopt: bool = False,
) -> Dict[str, Any]:
) -> dict[str, Any]:
"""
:param pairlist: List of pairs to backtest
:param strategy: Strategy name
@@ -368,7 +368,7 @@ def generate_strategy_stats(
:param market_change: float indicating the market change
:return: Dictionary containing results per strategy and a strategy summary.
"""
results: Dict[str, DataFrame] = content["results"]
results: dict[str, DataFrame] = content["results"]
if not isinstance(results, DataFrame):
return {}
config = content["config"]
@@ -558,8 +558,8 @@ def generate_strategy_stats(
def generate_backtest_stats(
btdata: Dict[str, DataFrame],
all_results: Dict[str, Dict[str, Union[DataFrame, Dict]]],
btdata: dict[str, DataFrame],
all_results: dict[str, dict[str, Union[DataFrame, dict]]],
min_date: datetime,
max_date: datetime,
) -> BacktestResultType:

Some files were not shown because too many files have changed in this diff Show More