mirror of
https://github.com/freqtrade/freqtrade.git
synced 2026-02-02 20:30:25 +00:00
@@ -18,15 +18,22 @@
|
||||
"editor.insertSpaces": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"[markdown]": {
|
||||
"files.trimTrailingWhitespace": false,
|
||||
"files.trimTrailingWhitespace": false
|
||||
},
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"[python]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit"
|
||||
},
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
}
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.isort",
|
||||
"charliermarsh.ruff",
|
||||
"davidanson.vscode-markdownlint",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
|
||||
18
.github/workflows/ci.yml
vendored
18
.github/workflows/ci.yml
vendored
@@ -111,7 +111,11 @@ jobs:
|
||||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --output-format=github .
|
||||
ruff check --output-format=github
|
||||
|
||||
- name: Run Ruff format check
|
||||
run: |
|
||||
ruff format --check
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
@@ -230,7 +234,11 @@ jobs:
|
||||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --output-format=github .
|
||||
ruff check --output-format=github
|
||||
|
||||
- name: Run Ruff format check
|
||||
run: |
|
||||
ruff format --check
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
@@ -300,7 +308,11 @@ jobs:
|
||||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --output-format=github .
|
||||
ruff check --output-format=github
|
||||
|
||||
- name: Run Ruff format check
|
||||
run: |
|
||||
ruff format --check
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
|
||||
11
.vscode/extensions.json
vendored
Normal file
11
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"charliermarsh.ruff",
|
||||
"davidanson.vscode-markdownlint",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"github.vscode-github-actions",
|
||||
]
|
||||
}
|
||||
@@ -72,12 +72,12 @@ you can manually run pre-commit with `pre-commit run -a`.
|
||||
mypy freqtrade
|
||||
```
|
||||
|
||||
### 4. Ensure all imports are correct
|
||||
### 4. Ensure formatting is correct
|
||||
|
||||
#### Run isort
|
||||
#### Run ruff
|
||||
|
||||
``` bash
|
||||
isort .
|
||||
ruff format .
|
||||
```
|
||||
|
||||
## (Core)-Committer Guide
|
||||
|
||||
@@ -6,21 +6,18 @@ from pathlib import Path
|
||||
import ccxt
|
||||
|
||||
|
||||
key = os.environ.get('FREQTRADE__EXCHANGE__KEY')
|
||||
secret = os.environ.get('FREQTRADE__EXCHANGE__SECRET')
|
||||
key = os.environ.get("FREQTRADE__EXCHANGE__KEY")
|
||||
secret = os.environ.get("FREQTRADE__EXCHANGE__SECRET")
|
||||
|
||||
proxy = os.environ.get('CI_WEB_PROXY')
|
||||
proxy = os.environ.get("CI_WEB_PROXY")
|
||||
|
||||
exchange = ccxt.binance({
|
||||
'apiKey': key,
|
||||
'secret': secret,
|
||||
'httpsProxy': proxy,
|
||||
'options': {'defaultType': 'swap'}
|
||||
})
|
||||
exchange = ccxt.binance(
|
||||
{"apiKey": key, "secret": secret, "httpsProxy": proxy, "options": {"defaultType": "swap"}}
|
||||
)
|
||||
_ = exchange.load_markets()
|
||||
|
||||
lev_tiers = exchange.fetch_leverage_tiers()
|
||||
|
||||
# Assumes this is running in the root of the repository.
|
||||
file = Path('freqtrade/exchange/binance_leverage_tiers.json')
|
||||
json.dump(dict(sorted(lev_tiers.items())), file.open('w'), indent=2)
|
||||
file = Path("freqtrade/exchange/binance_leverage_tiers.json")
|
||||
json.dump(dict(sorted(lev_tiers.items())), file.open("w"), indent=2)
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
from freqtrade_client import __version__ as client_version
|
||||
|
||||
from freqtrade import __version__ as ft_version
|
||||
from freqtrade_client import __version__ as client_version
|
||||
|
||||
|
||||
def main():
|
||||
if ft_version != client_version:
|
||||
print(f"Versions do not match: \n"
|
||||
f"ft: {ft_version} \n"
|
||||
f"client: {client_version}")
|
||||
print(f"Versions do not match: \nft: {ft_version} \nclient: {client_version}")
|
||||
exit(1)
|
||||
print(f"Versions match: ft: {ft_version}, client: {client_version}")
|
||||
exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -6,28 +6,30 @@ from pathlib import Path
|
||||
import yaml
|
||||
|
||||
|
||||
pre_commit_file = Path('.pre-commit-config.yaml')
|
||||
require_dev = Path('requirements-dev.txt')
|
||||
require = Path('requirements.txt')
|
||||
pre_commit_file = Path(".pre-commit-config.yaml")
|
||||
require_dev = Path("requirements-dev.txt")
|
||||
require = Path("requirements.txt")
|
||||
|
||||
with require_dev.open('r') as rfile:
|
||||
with require_dev.open("r") as rfile:
|
||||
requirements = rfile.readlines()
|
||||
|
||||
with require.open('r') as rfile:
|
||||
with require.open("r") as rfile:
|
||||
requirements.extend(rfile.readlines())
|
||||
|
||||
# Extract types only
|
||||
type_reqs = [r.strip('\n') for r in requirements if r.startswith(
|
||||
'types-') or r.startswith('SQLAlchemy')]
|
||||
type_reqs = [
|
||||
r.strip("\n") for r in requirements if r.startswith("types-") or r.startswith("SQLAlchemy")
|
||||
]
|
||||
|
||||
with pre_commit_file.open('r') as file:
|
||||
with pre_commit_file.open("r") as file:
|
||||
f = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
|
||||
|
||||
mypy_repo = [repo for repo in f['repos'] if repo['repo']
|
||||
== 'https://github.com/pre-commit/mirrors-mypy']
|
||||
mypy_repo = [
|
||||
repo for repo in f["repos"] if repo["repo"] == "https://github.com/pre-commit/mirrors-mypy"
|
||||
]
|
||||
|
||||
hooks = mypy_repo[0]['hooks'][0]['additional_dependencies']
|
||||
hooks = mypy_repo[0]["hooks"][0]["additional_dependencies"]
|
||||
|
||||
errors = []
|
||||
for hook in hooks:
|
||||
|
||||
@@ -1,21 +1,33 @@
|
||||
""" Freqtrade bot """
|
||||
__version__ = '2024.5-dev'
|
||||
"""Freqtrade bot"""
|
||||
|
||||
if 'dev' in __version__:
|
||||
__version__ = "2024.5-dev"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import subprocess
|
||||
|
||||
freqtrade_basedir = Path(__file__).parent
|
||||
|
||||
__version__ = __version__ + '-' + subprocess.check_output(
|
||||
['git', 'log', '--format="%h"', '-n 1'],
|
||||
stderr=subprocess.DEVNULL, cwd=freqtrade_basedir).decode("utf-8").rstrip().strip('"')
|
||||
__version__ = (
|
||||
__version__
|
||||
+ "-"
|
||||
+ subprocess.check_output(
|
||||
["git", "log", '--format="%h"', "-n 1"],
|
||||
stderr=subprocess.DEVNULL,
|
||||
cwd=freqtrade_basedir,
|
||||
)
|
||||
.decode("utf-8")
|
||||
.rstrip()
|
||||
.strip('"')
|
||||
)
|
||||
|
||||
except Exception: # pragma: no cover
|
||||
# git not available, ignore
|
||||
try:
|
||||
# Try Fallback to freqtrade_commit file (created by CI while building docker image)
|
||||
versionfile = Path('./freqtrade_commit')
|
||||
versionfile = Path("./freqtrade_commit")
|
||||
if versionfile.is_file():
|
||||
__version__ = f"docker-{__version__}-{versionfile.read_text()[:8]}"
|
||||
except Exception:
|
||||
|
||||
@@ -9,5 +9,5 @@ To launch Freqtrade as a module
|
||||
from freqtrade import main
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main.main()
|
||||
|
||||
@@ -6,22 +6,39 @@ Contains all start-commands, subcommands and CLI Interface creation.
|
||||
Note: Be careful with file-scoped imports in these subfiles.
|
||||
as they are parsed on startup, nothing containing optional modules should be loaded.
|
||||
"""
|
||||
|
||||
from freqtrade.commands.analyze_commands import start_analysis_entries_exits
|
||||
from freqtrade.commands.arguments import Arguments
|
||||
from freqtrade.commands.build_config_commands import start_new_config, start_show_config
|
||||
from freqtrade.commands.data_commands import (start_convert_data, start_convert_trades,
|
||||
start_download_data, start_list_data)
|
||||
from freqtrade.commands.data_commands import (
|
||||
start_convert_data,
|
||||
start_convert_trades,
|
||||
start_download_data,
|
||||
start_list_data,
|
||||
)
|
||||
from freqtrade.commands.db_commands import start_convert_db
|
||||
from freqtrade.commands.deploy_commands import (start_create_userdir, start_install_ui,
|
||||
start_new_strategy)
|
||||
from freqtrade.commands.deploy_commands import (
|
||||
start_create_userdir,
|
||||
start_install_ui,
|
||||
start_new_strategy,
|
||||
)
|
||||
from freqtrade.commands.hyperopt_commands import start_hyperopt_list, start_hyperopt_show
|
||||
from freqtrade.commands.list_commands import (start_list_exchanges, start_list_freqAI_models,
|
||||
start_list_markets, start_list_strategies,
|
||||
start_list_timeframes, start_show_trades)
|
||||
from freqtrade.commands.optimize_commands import (start_backtesting, start_backtesting_show,
|
||||
start_edge, start_hyperopt,
|
||||
start_lookahead_analysis,
|
||||
start_recursive_analysis)
|
||||
from freqtrade.commands.list_commands import (
|
||||
start_list_exchanges,
|
||||
start_list_freqAI_models,
|
||||
start_list_markets,
|
||||
start_list_strategies,
|
||||
start_list_timeframes,
|
||||
start_show_trades,
|
||||
)
|
||||
from freqtrade.commands.optimize_commands import (
|
||||
start_backtesting,
|
||||
start_backtesting_show,
|
||||
start_edge,
|
||||
start_hyperopt,
|
||||
start_lookahead_analysis,
|
||||
start_recursive_analysis,
|
||||
)
|
||||
from freqtrade.commands.pairlist_commands import start_test_pairlist
|
||||
from freqtrade.commands.plot_commands import start_plot_dataframe, start_plot_profit
|
||||
from freqtrade.commands.strategy_utils_commands import start_strategy_update
|
||||
|
||||
@@ -20,25 +20,25 @@ def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[s
|
||||
config = setup_utils_configuration(args, method)
|
||||
|
||||
no_unlimited_runmodes = {
|
||||
RunMode.BACKTEST: 'backtesting',
|
||||
RunMode.BACKTEST: "backtesting",
|
||||
}
|
||||
if method in no_unlimited_runmodes.keys():
|
||||
from freqtrade.data.btanalysis import get_latest_backtest_filename
|
||||
|
||||
if 'exportfilename' in config:
|
||||
if config['exportfilename'].is_dir():
|
||||
btfile = Path(get_latest_backtest_filename(config['exportfilename']))
|
||||
if "exportfilename" in config:
|
||||
if config["exportfilename"].is_dir():
|
||||
btfile = Path(get_latest_backtest_filename(config["exportfilename"]))
|
||||
signals_file = f"{config['exportfilename']}/{btfile.stem}_signals.pkl"
|
||||
else:
|
||||
if config['exportfilename'].exists():
|
||||
btfile = Path(config['exportfilename'])
|
||||
if config["exportfilename"].exists():
|
||||
btfile = Path(config["exportfilename"])
|
||||
signals_file = f"{btfile.parent}/{btfile.stem}_signals.pkl"
|
||||
else:
|
||||
raise ConfigurationError(f"{config['exportfilename']} does not exist.")
|
||||
else:
|
||||
raise ConfigurationError('exportfilename not in config.')
|
||||
raise ConfigurationError("exportfilename not in config.")
|
||||
|
||||
if (not Path(signals_file).exists()):
|
||||
if not Path(signals_file).exists():
|
||||
raise OperationalException(
|
||||
f"Cannot find latest backtest signals file: {signals_file}."
|
||||
"Run backtesting with `--export signals`."
|
||||
@@ -58,6 +58,6 @@ def start_analysis_entries_exits(args: Dict[str, Any]) -> None:
|
||||
# Initialize configuration
|
||||
config = setup_analyze_configuration(args, RunMode.BACKTEST)
|
||||
|
||||
logger.info('Starting freqtrade in analysis mode')
|
||||
logger.info("Starting freqtrade in analysis mode")
|
||||
|
||||
process_entry_exit_reasons(config)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
This module contains the argument manager class
|
||||
"""
|
||||
|
||||
import argparse
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
@@ -12,35 +13,72 @@ from freqtrade.constants import DEFAULT_CONFIG
|
||||
|
||||
ARGS_COMMON = ["verbosity", "logfile", "version", "config", "datadir", "user_data_dir"]
|
||||
|
||||
ARGS_STRATEGY = ["strategy", "strategy_path", "recursive_strategy_search", "freqaimodel",
|
||||
"freqaimodel_path"]
|
||||
ARGS_STRATEGY = [
|
||||
"strategy",
|
||||
"strategy_path",
|
||||
"recursive_strategy_search",
|
||||
"freqaimodel",
|
||||
"freqaimodel_path",
|
||||
]
|
||||
|
||||
ARGS_TRADE = ["db_url", "sd_notify", "dry_run", "dry_run_wallet", "fee"]
|
||||
|
||||
ARGS_WEBSERVER: List[str] = []
|
||||
|
||||
ARGS_COMMON_OPTIMIZE = ["timeframe", "timerange", "dataformat_ohlcv",
|
||||
"max_open_trades", "stake_amount", "fee", "pairs"]
|
||||
ARGS_COMMON_OPTIMIZE = [
|
||||
"timeframe",
|
||||
"timerange",
|
||||
"dataformat_ohlcv",
|
||||
"max_open_trades",
|
||||
"stake_amount",
|
||||
"fee",
|
||||
"pairs",
|
||||
]
|
||||
|
||||
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + ["position_stacking", "use_max_market_positions",
|
||||
"enable_protections", "dry_run_wallet", "timeframe_detail",
|
||||
"strategy_list", "export", "exportfilename",
|
||||
"backtest_breakdown", "backtest_cache",
|
||||
"freqai_backtest_live_models"]
|
||||
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + [
|
||||
"position_stacking",
|
||||
"use_max_market_positions",
|
||||
"enable_protections",
|
||||
"dry_run_wallet",
|
||||
"timeframe_detail",
|
||||
"strategy_list",
|
||||
"export",
|
||||
"exportfilename",
|
||||
"backtest_breakdown",
|
||||
"backtest_cache",
|
||||
"freqai_backtest_live_models",
|
||||
]
|
||||
|
||||
ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + ["hyperopt", "hyperopt_path",
|
||||
"position_stacking", "use_max_market_positions",
|
||||
"enable_protections", "dry_run_wallet", "timeframe_detail",
|
||||
"epochs", "spaces", "print_all",
|
||||
"print_colorized", "print_json", "hyperopt_jobs",
|
||||
"hyperopt_random_state", "hyperopt_min_trades",
|
||||
"hyperopt_loss", "disableparamexport",
|
||||
"hyperopt_ignore_missing_space", "analyze_per_epoch"]
|
||||
ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + [
|
||||
"hyperopt",
|
||||
"hyperopt_path",
|
||||
"position_stacking",
|
||||
"use_max_market_positions",
|
||||
"enable_protections",
|
||||
"dry_run_wallet",
|
||||
"timeframe_detail",
|
||||
"epochs",
|
||||
"spaces",
|
||||
"print_all",
|
||||
"print_colorized",
|
||||
"print_json",
|
||||
"hyperopt_jobs",
|
||||
"hyperopt_random_state",
|
||||
"hyperopt_min_trades",
|
||||
"hyperopt_loss",
|
||||
"disableparamexport",
|
||||
"hyperopt_ignore_missing_space",
|
||||
"analyze_per_epoch",
|
||||
]
|
||||
|
||||
ARGS_EDGE = ARGS_COMMON_OPTIMIZE + ["stoploss_range"]
|
||||
|
||||
ARGS_LIST_STRATEGIES = ["strategy_path", "print_one_column", "print_colorized",
|
||||
"recursive_strategy_search"]
|
||||
ARGS_LIST_STRATEGIES = [
|
||||
"strategy_path",
|
||||
"print_one_column",
|
||||
"print_colorized",
|
||||
"recursive_strategy_search",
|
||||
]
|
||||
|
||||
ARGS_LIST_FREQAIMODELS = ["freqaimodel_path", "print_one_column", "print_colorized"]
|
||||
|
||||
@@ -52,12 +90,27 @@ ARGS_LIST_EXCHANGES = ["print_one_column", "list_exchanges_all"]
|
||||
|
||||
ARGS_LIST_TIMEFRAMES = ["exchange", "print_one_column"]
|
||||
|
||||
ARGS_LIST_PAIRS = ["exchange", "print_list", "list_pairs_print_json", "print_one_column",
|
||||
"print_csv", "base_currencies", "quote_currencies", "list_pairs_all",
|
||||
"trading_mode"]
|
||||
ARGS_LIST_PAIRS = [
|
||||
"exchange",
|
||||
"print_list",
|
||||
"list_pairs_print_json",
|
||||
"print_one_column",
|
||||
"print_csv",
|
||||
"base_currencies",
|
||||
"quote_currencies",
|
||||
"list_pairs_all",
|
||||
"trading_mode",
|
||||
]
|
||||
|
||||
ARGS_TEST_PAIRLIST = ["user_data_dir", "verbosity", "config", "quote_currencies",
|
||||
"print_one_column", "list_pairs_print_json", "exchange"]
|
||||
ARGS_TEST_PAIRLIST = [
|
||||
"user_data_dir",
|
||||
"verbosity",
|
||||
"config",
|
||||
"quote_currencies",
|
||||
"print_one_column",
|
||||
"list_pairs_print_json",
|
||||
"exchange",
|
||||
]
|
||||
|
||||
ARGS_CREATE_USERDIR = ["user_data_dir", "reset"]
|
||||
|
||||
@@ -70,22 +123,58 @@ ARGS_CONVERT_DATA_TRADES = ["pairs", "format_from_trades", "format_to", "erase",
|
||||
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase", "exchange"]
|
||||
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes", "trading_mode", "candle_types"]
|
||||
|
||||
ARGS_CONVERT_TRADES = ["pairs", "timeframes", "exchange", "dataformat_ohlcv", "dataformat_trades",
|
||||
"trading_mode"]
|
||||
ARGS_CONVERT_TRADES = [
|
||||
"pairs",
|
||||
"timeframes",
|
||||
"exchange",
|
||||
"dataformat_ohlcv",
|
||||
"dataformat_trades",
|
||||
"trading_mode",
|
||||
]
|
||||
|
||||
ARGS_LIST_DATA = ["exchange", "dataformat_ohlcv", "pairs", "trading_mode", "show_timerange"]
|
||||
|
||||
ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "new_pairs_days", "include_inactive",
|
||||
"timerange", "download_trades", "exchange", "timeframes",
|
||||
"erase", "dataformat_ohlcv", "dataformat_trades", "trading_mode",
|
||||
"prepend_data"]
|
||||
ARGS_DOWNLOAD_DATA = [
|
||||
"pairs",
|
||||
"pairs_file",
|
||||
"days",
|
||||
"new_pairs_days",
|
||||
"include_inactive",
|
||||
"timerange",
|
||||
"download_trades",
|
||||
"exchange",
|
||||
"timeframes",
|
||||
"erase",
|
||||
"dataformat_ohlcv",
|
||||
"dataformat_trades",
|
||||
"trading_mode",
|
||||
"prepend_data",
|
||||
]
|
||||
|
||||
ARGS_PLOT_DATAFRAME = ["pairs", "indicators1", "indicators2", "plot_limit",
|
||||
"db_url", "trade_source", "export", "exportfilename",
|
||||
"timerange", "timeframe", "no_trades"]
|
||||
ARGS_PLOT_DATAFRAME = [
|
||||
"pairs",
|
||||
"indicators1",
|
||||
"indicators2",
|
||||
"plot_limit",
|
||||
"db_url",
|
||||
"trade_source",
|
||||
"export",
|
||||
"exportfilename",
|
||||
"timerange",
|
||||
"timeframe",
|
||||
"no_trades",
|
||||
]
|
||||
|
||||
ARGS_PLOT_PROFIT = ["pairs", "timerange", "export", "exportfilename", "db_url",
|
||||
"trade_source", "timeframe", "plot_auto_open", ]
|
||||
ARGS_PLOT_PROFIT = [
|
||||
"pairs",
|
||||
"timerange",
|
||||
"export",
|
||||
"exportfilename",
|
||||
"db_url",
|
||||
"trade_source",
|
||||
"timeframe",
|
||||
"plot_auto_open",
|
||||
]
|
||||
|
||||
ARGS_CONVERT_DB = ["db_url", "db_url_from"]
|
||||
|
||||
@@ -93,36 +182,76 @@ ARGS_INSTALL_UI = ["erase_ui_only", "ui_version"]
|
||||
|
||||
ARGS_SHOW_TRADES = ["db_url", "trade_ids", "print_json"]
|
||||
|
||||
ARGS_HYPEROPT_LIST = ["hyperopt_list_best", "hyperopt_list_profitable",
|
||||
"hyperopt_list_min_trades", "hyperopt_list_max_trades",
|
||||
"hyperopt_list_min_avg_time", "hyperopt_list_max_avg_time",
|
||||
"hyperopt_list_min_avg_profit", "hyperopt_list_max_avg_profit",
|
||||
"hyperopt_list_min_total_profit", "hyperopt_list_max_total_profit",
|
||||
"hyperopt_list_min_objective", "hyperopt_list_max_objective",
|
||||
"print_colorized", "print_json", "hyperopt_list_no_details",
|
||||
"hyperoptexportfilename", "export_csv"]
|
||||
ARGS_HYPEROPT_LIST = [
|
||||
"hyperopt_list_best",
|
||||
"hyperopt_list_profitable",
|
||||
"hyperopt_list_min_trades",
|
||||
"hyperopt_list_max_trades",
|
||||
"hyperopt_list_min_avg_time",
|
||||
"hyperopt_list_max_avg_time",
|
||||
"hyperopt_list_min_avg_profit",
|
||||
"hyperopt_list_max_avg_profit",
|
||||
"hyperopt_list_min_total_profit",
|
||||
"hyperopt_list_max_total_profit",
|
||||
"hyperopt_list_min_objective",
|
||||
"hyperopt_list_max_objective",
|
||||
"print_colorized",
|
||||
"print_json",
|
||||
"hyperopt_list_no_details",
|
||||
"hyperoptexportfilename",
|
||||
"export_csv",
|
||||
]
|
||||
|
||||
ARGS_HYPEROPT_SHOW = ["hyperopt_list_best", "hyperopt_list_profitable", "hyperopt_show_index",
|
||||
"print_json", "hyperoptexportfilename", "hyperopt_show_no_header",
|
||||
"disableparamexport", "backtest_breakdown"]
|
||||
ARGS_HYPEROPT_SHOW = [
|
||||
"hyperopt_list_best",
|
||||
"hyperopt_list_profitable",
|
||||
"hyperopt_show_index",
|
||||
"print_json",
|
||||
"hyperoptexportfilename",
|
||||
"hyperopt_show_no_header",
|
||||
"disableparamexport",
|
||||
"backtest_breakdown",
|
||||
]
|
||||
|
||||
ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason_list",
|
||||
"exit_reason_list", "indicator_list", "timerange",
|
||||
"analysis_rejected", "analysis_to_csv", "analysis_csv_path"]
|
||||
ARGS_ANALYZE_ENTRIES_EXITS = [
|
||||
"exportfilename",
|
||||
"analysis_groups",
|
||||
"enter_reason_list",
|
||||
"exit_reason_list",
|
||||
"indicator_list",
|
||||
"timerange",
|
||||
"analysis_rejected",
|
||||
"analysis_to_csv",
|
||||
"analysis_csv_path",
|
||||
]
|
||||
|
||||
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
||||
"list-markets", "list-pairs", "list-strategies", "list-freqaimodels",
|
||||
"list-data", "hyperopt-list", "hyperopt-show", "backtest-filter",
|
||||
"plot-dataframe", "plot-profit", "show-trades", "trades-to-ohlcv",
|
||||
"strategy-updater"]
|
||||
NO_CONF_REQURIED = [
|
||||
"convert-data",
|
||||
"convert-trade-data",
|
||||
"download-data",
|
||||
"list-timeframes",
|
||||
"list-markets",
|
||||
"list-pairs",
|
||||
"list-strategies",
|
||||
"list-freqaimodels",
|
||||
"list-data",
|
||||
"hyperopt-list",
|
||||
"hyperopt-show",
|
||||
"backtest-filter",
|
||||
"plot-dataframe",
|
||||
"plot-profit",
|
||||
"show-trades",
|
||||
"trades-to-ohlcv",
|
||||
"strategy-updater",
|
||||
]
|
||||
|
||||
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-strategy"]
|
||||
|
||||
ARGS_STRATEGY_UPDATER = ["strategy_list", "strategy_path", "recursive_strategy_search"]
|
||||
|
||||
ARGS_LOOKAHEAD_ANALYSIS = [
|
||||
a for a in ARGS_BACKTEST if a not in ("position_stacking", "use_max_market_positions", 'cache')
|
||||
] + ["minimum_trade_amount", "targeted_trade_amount", "lookahead_analysis_exportfilename"]
|
||||
a for a in ARGS_BACKTEST if a not in ("position_stacking", "use_max_market_positions", "cache")
|
||||
] + ["minimum_trade_amount", "targeted_trade_amount", "lookahead_analysis_exportfilename"]
|
||||
|
||||
ARGS_RECURSIVE_ANALYSIS = ["timeframe", "timerange", "dataformat_ohlcv", "pairs", "startup_candle"]
|
||||
|
||||
@@ -156,14 +285,14 @@ class Arguments:
|
||||
# Workaround issue in argparse with action='append' and default value
|
||||
# (see https://bugs.python.org/issue16399)
|
||||
# Allow no-config for certain commands (like downloading / plotting)
|
||||
if ('config' in parsed_arg and parsed_arg.config is None):
|
||||
conf_required = ('command' in parsed_arg and parsed_arg.command in NO_CONF_REQURIED)
|
||||
if "config" in parsed_arg and parsed_arg.config is None:
|
||||
conf_required = "command" in parsed_arg and parsed_arg.command in NO_CONF_REQURIED
|
||||
|
||||
if 'user_data_dir' in parsed_arg and parsed_arg.user_data_dir is not None:
|
||||
if "user_data_dir" in parsed_arg and parsed_arg.user_data_dir is not None:
|
||||
user_dir = parsed_arg.user_data_dir
|
||||
else:
|
||||
# Default case
|
||||
user_dir = 'user_data'
|
||||
user_dir = "user_data"
|
||||
# Try loading from "user_data/config.json"
|
||||
cfgfile = Path(user_dir) / DEFAULT_CONFIG
|
||||
if cfgfile.is_file():
|
||||
@@ -177,7 +306,6 @@ class Arguments:
|
||||
return parsed_arg
|
||||
|
||||
def _build_args(self, optionlist, parser):
|
||||
|
||||
for val in optionlist:
|
||||
opt = AVAILABLE_CLI_OPTIONS[val]
|
||||
parser.add_argument(*opt.cli, dest=val, **opt.kwargs)
|
||||
@@ -198,43 +326,61 @@ class Arguments:
|
||||
|
||||
# Build main command
|
||||
self.parser = argparse.ArgumentParser(
|
||||
prog="freqtrade",
|
||||
description='Free, open source crypto trading bot'
|
||||
prog="freqtrade", description="Free, open source crypto trading bot"
|
||||
)
|
||||
self._build_args(optionlist=['version'], parser=self.parser)
|
||||
self._build_args(optionlist=["version"], parser=self.parser)
|
||||
|
||||
from freqtrade.commands import (start_analysis_entries_exits, start_backtesting,
|
||||
start_backtesting_show, start_convert_data,
|
||||
start_convert_db, start_convert_trades,
|
||||
start_create_userdir, start_download_data, start_edge,
|
||||
start_hyperopt, start_hyperopt_list, start_hyperopt_show,
|
||||
start_install_ui, start_list_data, start_list_exchanges,
|
||||
start_list_freqAI_models, start_list_markets,
|
||||
start_list_strategies, start_list_timeframes,
|
||||
start_lookahead_analysis, start_new_config,
|
||||
start_new_strategy, start_plot_dataframe, start_plot_profit,
|
||||
start_recursive_analysis, start_show_config,
|
||||
start_show_trades, start_strategy_update,
|
||||
start_test_pairlist, start_trading, start_webserver)
|
||||
from freqtrade.commands import (
|
||||
start_analysis_entries_exits,
|
||||
start_backtesting,
|
||||
start_backtesting_show,
|
||||
start_convert_data,
|
||||
start_convert_db,
|
||||
start_convert_trades,
|
||||
start_create_userdir,
|
||||
start_download_data,
|
||||
start_edge,
|
||||
start_hyperopt,
|
||||
start_hyperopt_list,
|
||||
start_hyperopt_show,
|
||||
start_install_ui,
|
||||
start_list_data,
|
||||
start_list_exchanges,
|
||||
start_list_freqAI_models,
|
||||
start_list_markets,
|
||||
start_list_strategies,
|
||||
start_list_timeframes,
|
||||
start_lookahead_analysis,
|
||||
start_new_config,
|
||||
start_new_strategy,
|
||||
start_plot_dataframe,
|
||||
start_plot_profit,
|
||||
start_recursive_analysis,
|
||||
start_show_config,
|
||||
start_show_trades,
|
||||
start_strategy_update,
|
||||
start_test_pairlist,
|
||||
start_trading,
|
||||
start_webserver,
|
||||
)
|
||||
|
||||
subparsers = self.parser.add_subparsers(dest='command',
|
||||
# Use custom message when no subhandler is added
|
||||
# shown from `main.py`
|
||||
# required=True
|
||||
)
|
||||
subparsers = self.parser.add_subparsers(
|
||||
dest="command",
|
||||
# Use custom message when no subhandler is added
|
||||
# shown from `main.py`
|
||||
# required=True
|
||||
)
|
||||
|
||||
# Add trade subcommand
|
||||
trade_cmd = subparsers.add_parser(
|
||||
'trade',
|
||||
help='Trade module.',
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
"trade", help="Trade module.", parents=[_common_parser, _strategy_parser]
|
||||
)
|
||||
trade_cmd.set_defaults(func=start_trading)
|
||||
self._build_args(optionlist=ARGS_TRADE, parser=trade_cmd)
|
||||
|
||||
# add create-userdir subcommand
|
||||
create_userdir_cmd = subparsers.add_parser(
|
||||
'create-userdir',
|
||||
"create-userdir",
|
||||
help="Create user-data directory.",
|
||||
)
|
||||
create_userdir_cmd.set_defaults(func=start_create_userdir)
|
||||
@@ -242,7 +388,7 @@ class Arguments:
|
||||
|
||||
# add new-config subcommand
|
||||
build_config_cmd = subparsers.add_parser(
|
||||
'new-config',
|
||||
"new-config",
|
||||
help="Create new config",
|
||||
)
|
||||
build_config_cmd.set_defaults(func=start_new_config)
|
||||
@@ -250,7 +396,7 @@ class Arguments:
|
||||
|
||||
# add show-config subcommand
|
||||
show_config_cmd = subparsers.add_parser(
|
||||
'show-config',
|
||||
"show-config",
|
||||
help="Show resolved config",
|
||||
)
|
||||
show_config_cmd.set_defaults(func=start_show_config)
|
||||
@@ -258,7 +404,7 @@ class Arguments:
|
||||
|
||||
# add new-strategy subcommand
|
||||
build_strategy_cmd = subparsers.add_parser(
|
||||
'new-strategy',
|
||||
"new-strategy",
|
||||
help="Create new strategy",
|
||||
)
|
||||
build_strategy_cmd.set_defaults(func=start_new_strategy)
|
||||
@@ -266,8 +412,8 @@ class Arguments:
|
||||
|
||||
# Add download-data subcommand
|
||||
download_data_cmd = subparsers.add_parser(
|
||||
'download-data',
|
||||
help='Download backtesting data.',
|
||||
"download-data",
|
||||
help="Download backtesting data.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
download_data_cmd.set_defaults(func=start_download_data)
|
||||
@@ -275,8 +421,8 @@ class Arguments:
|
||||
|
||||
# Add convert-data subcommand
|
||||
convert_data_cmd = subparsers.add_parser(
|
||||
'convert-data',
|
||||
help='Convert candle (OHLCV) data from one format to another.',
|
||||
"convert-data",
|
||||
help="Convert candle (OHLCV) data from one format to another.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
convert_data_cmd.set_defaults(func=partial(start_convert_data, ohlcv=True))
|
||||
@@ -284,8 +430,8 @@ class Arguments:
|
||||
|
||||
# Add convert-trade-data subcommand
|
||||
convert_trade_data_cmd = subparsers.add_parser(
|
||||
'convert-trade-data',
|
||||
help='Convert trade data from one format to another.',
|
||||
"convert-trade-data",
|
||||
help="Convert trade data from one format to another.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
convert_trade_data_cmd.set_defaults(func=partial(start_convert_data, ohlcv=False))
|
||||
@@ -293,8 +439,8 @@ class Arguments:
|
||||
|
||||
# Add trades-to-ohlcv subcommand
|
||||
convert_trade_data_cmd = subparsers.add_parser(
|
||||
'trades-to-ohlcv',
|
||||
help='Convert trade data to OHLCV data.',
|
||||
"trades-to-ohlcv",
|
||||
help="Convert trade data to OHLCV data.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
convert_trade_data_cmd.set_defaults(func=start_convert_trades)
|
||||
@@ -302,8 +448,8 @@ class Arguments:
|
||||
|
||||
# Add list-data subcommand
|
||||
list_data_cmd = subparsers.add_parser(
|
||||
'list-data',
|
||||
help='List downloaded data.',
|
||||
"list-data",
|
||||
help="List downloaded data.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_data_cmd.set_defaults(func=start_list_data)
|
||||
@@ -311,17 +457,15 @@ class Arguments:
|
||||
|
||||
# Add backtesting subcommand
|
||||
backtesting_cmd = subparsers.add_parser(
|
||||
'backtesting',
|
||||
help='Backtesting module.',
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
"backtesting", help="Backtesting module.", parents=[_common_parser, _strategy_parser]
|
||||
)
|
||||
backtesting_cmd.set_defaults(func=start_backtesting)
|
||||
self._build_args(optionlist=ARGS_BACKTEST, parser=backtesting_cmd)
|
||||
|
||||
# Add backtesting-show subcommand
|
||||
backtesting_show_cmd = subparsers.add_parser(
|
||||
'backtesting-show',
|
||||
help='Show past Backtest results',
|
||||
"backtesting-show",
|
||||
help="Show past Backtest results",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
backtesting_show_cmd.set_defaults(func=start_backtesting_show)
|
||||
@@ -329,26 +473,22 @@ class Arguments:
|
||||
|
||||
# Add backtesting analysis subcommand
|
||||
analysis_cmd = subparsers.add_parser(
|
||||
'backtesting-analysis',
|
||||
help='Backtest Analysis module.',
|
||||
parents=[_common_parser]
|
||||
"backtesting-analysis", help="Backtest Analysis module.", parents=[_common_parser]
|
||||
)
|
||||
analysis_cmd.set_defaults(func=start_analysis_entries_exits)
|
||||
self._build_args(optionlist=ARGS_ANALYZE_ENTRIES_EXITS, parser=analysis_cmd)
|
||||
|
||||
# Add edge subcommand
|
||||
edge_cmd = subparsers.add_parser(
|
||||
'edge',
|
||||
help='Edge module.',
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
"edge", help="Edge module.", parents=[_common_parser, _strategy_parser]
|
||||
)
|
||||
edge_cmd.set_defaults(func=start_edge)
|
||||
self._build_args(optionlist=ARGS_EDGE, parser=edge_cmd)
|
||||
|
||||
# Add hyperopt subcommand
|
||||
hyperopt_cmd = subparsers.add_parser(
|
||||
'hyperopt',
|
||||
help='Hyperopt module.',
|
||||
"hyperopt",
|
||||
help="Hyperopt module.",
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
hyperopt_cmd.set_defaults(func=start_hyperopt)
|
||||
@@ -356,8 +496,8 @@ class Arguments:
|
||||
|
||||
# Add hyperopt-list subcommand
|
||||
hyperopt_list_cmd = subparsers.add_parser(
|
||||
'hyperopt-list',
|
||||
help='List Hyperopt results',
|
||||
"hyperopt-list",
|
||||
help="List Hyperopt results",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
hyperopt_list_cmd.set_defaults(func=start_hyperopt_list)
|
||||
@@ -365,8 +505,8 @@ class Arguments:
|
||||
|
||||
# Add hyperopt-show subcommand
|
||||
hyperopt_show_cmd = subparsers.add_parser(
|
||||
'hyperopt-show',
|
||||
help='Show details of Hyperopt results',
|
||||
"hyperopt-show",
|
||||
help="Show details of Hyperopt results",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
hyperopt_show_cmd.set_defaults(func=start_hyperopt_show)
|
||||
@@ -374,8 +514,8 @@ class Arguments:
|
||||
|
||||
# Add list-exchanges subcommand
|
||||
list_exchanges_cmd = subparsers.add_parser(
|
||||
'list-exchanges',
|
||||
help='Print available exchanges.',
|
||||
"list-exchanges",
|
||||
help="Print available exchanges.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_exchanges_cmd.set_defaults(func=start_list_exchanges)
|
||||
@@ -383,8 +523,8 @@ class Arguments:
|
||||
|
||||
# Add list-markets subcommand
|
||||
list_markets_cmd = subparsers.add_parser(
|
||||
'list-markets',
|
||||
help='Print markets on exchange.',
|
||||
"list-markets",
|
||||
help="Print markets on exchange.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_markets_cmd.set_defaults(func=partial(start_list_markets, pairs_only=False))
|
||||
@@ -392,8 +532,8 @@ class Arguments:
|
||||
|
||||
# Add list-pairs subcommand
|
||||
list_pairs_cmd = subparsers.add_parser(
|
||||
'list-pairs',
|
||||
help='Print pairs on exchange.',
|
||||
"list-pairs",
|
||||
help="Print pairs on exchange.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_pairs_cmd.set_defaults(func=partial(start_list_markets, pairs_only=True))
|
||||
@@ -401,8 +541,8 @@ class Arguments:
|
||||
|
||||
# Add list-strategies subcommand
|
||||
list_strategies_cmd = subparsers.add_parser(
|
||||
'list-strategies',
|
||||
help='Print available strategies.',
|
||||
"list-strategies",
|
||||
help="Print available strategies.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_strategies_cmd.set_defaults(func=start_list_strategies)
|
||||
@@ -410,8 +550,8 @@ class Arguments:
|
||||
|
||||
# Add list-freqAI Models subcommand
|
||||
list_freqaimodels_cmd = subparsers.add_parser(
|
||||
'list-freqaimodels',
|
||||
help='Print available freqAI models.',
|
||||
"list-freqaimodels",
|
||||
help="Print available freqAI models.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_freqaimodels_cmd.set_defaults(func=start_list_freqAI_models)
|
||||
@@ -419,8 +559,8 @@ class Arguments:
|
||||
|
||||
# Add list-timeframes subcommand
|
||||
list_timeframes_cmd = subparsers.add_parser(
|
||||
'list-timeframes',
|
||||
help='Print available timeframes for the exchange.',
|
||||
"list-timeframes",
|
||||
help="Print available timeframes for the exchange.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_timeframes_cmd.set_defaults(func=start_list_timeframes)
|
||||
@@ -428,8 +568,8 @@ class Arguments:
|
||||
|
||||
# Add show-trades subcommand
|
||||
show_trades = subparsers.add_parser(
|
||||
'show-trades',
|
||||
help='Show trades.',
|
||||
"show-trades",
|
||||
help="Show trades.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
show_trades.set_defaults(func=start_show_trades)
|
||||
@@ -437,8 +577,8 @@ class Arguments:
|
||||
|
||||
# Add test-pairlist subcommand
|
||||
test_pairlist_cmd = subparsers.add_parser(
|
||||
'test-pairlist',
|
||||
help='Test your pairlist configuration.',
|
||||
"test-pairlist",
|
||||
help="Test your pairlist configuration.",
|
||||
)
|
||||
test_pairlist_cmd.set_defaults(func=start_test_pairlist)
|
||||
self._build_args(optionlist=ARGS_TEST_PAIRLIST, parser=test_pairlist_cmd)
|
||||
@@ -453,16 +593,16 @@ class Arguments:
|
||||
|
||||
# Add install-ui subcommand
|
||||
install_ui_cmd = subparsers.add_parser(
|
||||
'install-ui',
|
||||
help='Install FreqUI',
|
||||
"install-ui",
|
||||
help="Install FreqUI",
|
||||
)
|
||||
install_ui_cmd.set_defaults(func=start_install_ui)
|
||||
self._build_args(optionlist=ARGS_INSTALL_UI, parser=install_ui_cmd)
|
||||
|
||||
# Add Plotting subcommand
|
||||
plot_dataframe_cmd = subparsers.add_parser(
|
||||
'plot-dataframe',
|
||||
help='Plot candles with indicators.',
|
||||
"plot-dataframe",
|
||||
help="Plot candles with indicators.",
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
plot_dataframe_cmd.set_defaults(func=start_plot_dataframe)
|
||||
@@ -470,8 +610,8 @@ class Arguments:
|
||||
|
||||
# Plot profit
|
||||
plot_profit_cmd = subparsers.add_parser(
|
||||
'plot-profit',
|
||||
help='Generate plot showing profits.',
|
||||
"plot-profit",
|
||||
help="Generate plot showing profits.",
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
plot_profit_cmd.set_defaults(func=start_plot_profit)
|
||||
@@ -479,40 +619,36 @@ class Arguments:
|
||||
|
||||
# Add webserver subcommand
|
||||
webserver_cmd = subparsers.add_parser(
|
||||
'webserver',
|
||||
help='Webserver module.',
|
||||
parents=[_common_parser]
|
||||
"webserver", help="Webserver module.", parents=[_common_parser]
|
||||
)
|
||||
webserver_cmd.set_defaults(func=start_webserver)
|
||||
self._build_args(optionlist=ARGS_WEBSERVER, parser=webserver_cmd)
|
||||
|
||||
# Add strategy_updater subcommand
|
||||
strategy_updater_cmd = subparsers.add_parser(
|
||||
'strategy-updater',
|
||||
help='updates outdated strategy files to the current version',
|
||||
parents=[_common_parser]
|
||||
"strategy-updater",
|
||||
help="updates outdated strategy files to the current version",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
strategy_updater_cmd.set_defaults(func=start_strategy_update)
|
||||
self._build_args(optionlist=ARGS_STRATEGY_UPDATER, parser=strategy_updater_cmd)
|
||||
|
||||
# Add lookahead_analysis subcommand
|
||||
lookahead_analayis_cmd = subparsers.add_parser(
|
||||
'lookahead-analysis',
|
||||
"lookahead-analysis",
|
||||
help="Check for potential look ahead bias.",
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
lookahead_analayis_cmd.set_defaults(func=start_lookahead_analysis)
|
||||
|
||||
self._build_args(optionlist=ARGS_LOOKAHEAD_ANALYSIS,
|
||||
parser=lookahead_analayis_cmd)
|
||||
self._build_args(optionlist=ARGS_LOOKAHEAD_ANALYSIS, parser=lookahead_analayis_cmd)
|
||||
|
||||
# Add recursive_analysis subcommand
|
||||
recursive_analayis_cmd = subparsers.add_parser(
|
||||
'recursive-analysis',
|
||||
"recursive-analysis",
|
||||
help="Check for potential recursive formula issue.",
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
recursive_analayis_cmd.set_defaults(func=start_recursive_analysis)
|
||||
|
||||
self._build_args(optionlist=ARGS_RECURSIVE_ANALYSIS,
|
||||
parser=recursive_analayis_cmd)
|
||||
self._build_args(optionlist=ARGS_RECURSIVE_ANALYSIS, parser=recursive_analayis_cmd)
|
||||
|
||||
@@ -45,7 +45,7 @@ def ask_user_overwrite(config_path: Path) -> bool:
|
||||
},
|
||||
]
|
||||
answers = prompt(questions)
|
||||
return answers['overwrite']
|
||||
return answers["overwrite"]
|
||||
|
||||
|
||||
def ask_user_config() -> Dict[str, Any]:
|
||||
@@ -65,7 +65,7 @@ def ask_user_config() -> Dict[str, Any]:
|
||||
"type": "text",
|
||||
"name": "stake_currency",
|
||||
"message": "Please insert your stake currency:",
|
||||
"default": 'USDT',
|
||||
"default": "USDT",
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
@@ -73,36 +73,35 @@ def ask_user_config() -> Dict[str, Any]:
|
||||
"message": f"Please insert your stake amount (Number or '{UNLIMITED_STAKE_AMOUNT}'):",
|
||||
"default": "unlimited",
|
||||
"validate": lambda val: val == UNLIMITED_STAKE_AMOUNT or validate_is_float(val),
|
||||
"filter": lambda val: '"' + UNLIMITED_STAKE_AMOUNT + '"'
|
||||
if val == UNLIMITED_STAKE_AMOUNT
|
||||
else val
|
||||
"filter": lambda val: (
|
||||
'"' + UNLIMITED_STAKE_AMOUNT + '"' if val == UNLIMITED_STAKE_AMOUNT else val
|
||||
),
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "max_open_trades",
|
||||
"message": "Please insert max_open_trades (Integer or -1 for unlimited open trades):",
|
||||
"default": "3",
|
||||
"validate": lambda val: validate_is_int(val)
|
||||
"validate": lambda val: validate_is_int(val),
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"name": "timeframe_in_config",
|
||||
"message": "Time",
|
||||
"choices": ["Have the strategy define timeframe.", "Override in configuration."]
|
||||
"choices": ["Have the strategy define timeframe.", "Override in configuration."],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "timeframe",
|
||||
"message": "Please insert your desired timeframe (e.g. 5m):",
|
||||
"default": "5m",
|
||||
"when": lambda x: x["timeframe_in_config"] == 'Override in configuration.'
|
||||
|
||||
"when": lambda x: x["timeframe_in_config"] == "Override in configuration.",
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "fiat_display_currency",
|
||||
"message": "Please insert your display Currency (for reporting):",
|
||||
"default": 'USD',
|
||||
"default": "USD",
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
@@ -125,33 +124,33 @@ def ask_user_config() -> Dict[str, Any]:
|
||||
"name": "trading_mode",
|
||||
"message": "Do you want to trade Perpetual Swaps (perpetual futures)?",
|
||||
"default": False,
|
||||
"filter": lambda val: 'futures' if val else 'spot',
|
||||
"when": lambda x: x["exchange_name"] in ['binance', 'gate', 'okx'],
|
||||
"filter": lambda val: "futures" if val else "spot",
|
||||
"when": lambda x: x["exchange_name"] in ["binance", "gate", "okx"],
|
||||
},
|
||||
{
|
||||
"type": "autocomplete",
|
||||
"name": "exchange_name",
|
||||
"message": "Type your exchange name (Must be supported by ccxt)",
|
||||
"choices": available_exchanges(),
|
||||
"when": lambda x: x["exchange_name"] == 'other'
|
||||
"when": lambda x: x["exchange_name"] == "other",
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_key",
|
||||
"message": "Insert Exchange Key",
|
||||
"when": lambda x: not x['dry_run']
|
||||
"when": lambda x: not x["dry_run"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_secret",
|
||||
"message": "Insert Exchange Secret",
|
||||
"when": lambda x: not x['dry_run']
|
||||
"when": lambda x: not x["dry_run"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_key_password",
|
||||
"message": "Insert Exchange API Key password",
|
||||
"when": lambda x: not x['dry_run'] and x['exchange_name'] in ('kucoin', 'okx')
|
||||
"when": lambda x: not x["dry_run"] and x["exchange_name"] in ("kucoin", "okx"),
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
@@ -163,13 +162,13 @@ def ask_user_config() -> Dict[str, Any]:
|
||||
"type": "password",
|
||||
"name": "telegram_token",
|
||||
"message": "Insert Telegram token",
|
||||
"when": lambda x: x['telegram']
|
||||
"when": lambda x: x["telegram"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "telegram_chat_id",
|
||||
"message": "Insert Telegram chat id",
|
||||
"when": lambda x: x['telegram']
|
||||
"when": lambda x: x["telegram"],
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
@@ -180,23 +179,25 @@ def ask_user_config() -> Dict[str, Any]:
|
||||
{
|
||||
"type": "text",
|
||||
"name": "api_server_listen_addr",
|
||||
"message": ("Insert Api server Listen Address (0.0.0.0 for docker, "
|
||||
"otherwise best left untouched)"),
|
||||
"message": (
|
||||
"Insert Api server Listen Address (0.0.0.0 for docker, "
|
||||
"otherwise best left untouched)"
|
||||
),
|
||||
"default": "127.0.0.1" if not running_in_docker() else "0.0.0.0",
|
||||
"when": lambda x: x['api_server']
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "api_server_username",
|
||||
"message": "Insert api-server username",
|
||||
"default": "freqtrader",
|
||||
"when": lambda x: x['api_server']
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "api_server_password",
|
||||
"message": "Insert api-server password",
|
||||
"when": lambda x: x['api_server']
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
]
|
||||
answers = prompt(questions)
|
||||
@@ -205,15 +206,11 @@ def ask_user_config() -> Dict[str, Any]:
|
||||
# Interrupted questionary sessions return an empty dict.
|
||||
raise OperationalException("User interrupted interactive questions.")
|
||||
# Ensure default is set for non-futures exchanges
|
||||
answers['trading_mode'] = answers.get('trading_mode', "spot")
|
||||
answers['margin_mode'] = (
|
||||
'isolated'
|
||||
if answers.get('trading_mode') == 'futures'
|
||||
else ''
|
||||
)
|
||||
answers["trading_mode"] = answers.get("trading_mode", "spot")
|
||||
answers["margin_mode"] = "isolated" if answers.get("trading_mode") == "futures" else ""
|
||||
# Force JWT token to be a random string
|
||||
answers['api_server_jwt_key'] = secrets.token_hex()
|
||||
answers['api_server_ws_token'] = secrets.token_urlsafe(25)
|
||||
answers["api_server_jwt_key"] = secrets.token_hex()
|
||||
answers["api_server_ws_token"] = secrets.token_urlsafe(25)
|
||||
|
||||
return answers
|
||||
|
||||
@@ -225,26 +222,26 @@ def deploy_new_config(config_path: Path, selections: Dict[str, Any]) -> None:
|
||||
:param selections: Dict containing selections taken by the user.
|
||||
"""
|
||||
from jinja2.exceptions import TemplateNotFound
|
||||
|
||||
try:
|
||||
exchange_template = MAP_EXCHANGE_CHILDCLASS.get(
|
||||
selections['exchange_name'], selections['exchange_name'])
|
||||
selections["exchange_name"], selections["exchange_name"]
|
||||
)
|
||||
|
||||
selections['exchange'] = render_template(
|
||||
templatefile=f"subtemplates/exchange_{exchange_template}.j2",
|
||||
arguments=selections
|
||||
selections["exchange"] = render_template(
|
||||
templatefile=f"subtemplates/exchange_{exchange_template}.j2", arguments=selections
|
||||
)
|
||||
except TemplateNotFound:
|
||||
selections['exchange'] = render_template(
|
||||
templatefile="subtemplates/exchange_generic.j2",
|
||||
arguments=selections
|
||||
selections["exchange"] = render_template(
|
||||
templatefile="subtemplates/exchange_generic.j2", arguments=selections
|
||||
)
|
||||
|
||||
config_text = render_template(templatefile='base_config.json.j2',
|
||||
arguments=selections)
|
||||
config_text = render_template(templatefile="base_config.json.j2", arguments=selections)
|
||||
|
||||
logger.info(f"Writing config to `{config_path}`.")
|
||||
logger.info(
|
||||
"Please make sure to check the configuration contents and adjust settings to your needs.")
|
||||
"Please make sure to check the configuration contents and adjust settings to your needs."
|
||||
)
|
||||
|
||||
config_path.write_text(config_text)
|
||||
|
||||
@@ -255,7 +252,7 @@ def start_new_config(args: Dict[str, Any]) -> None:
|
||||
Asking the user questions to fill out the template accordingly.
|
||||
"""
|
||||
|
||||
config_path = Path(args['config'][0])
|
||||
config_path = Path(args["config"][0])
|
||||
chown_user_directory(config_path.parent)
|
||||
if config_path.exists():
|
||||
overwrite = ask_user_overwrite(config_path)
|
||||
@@ -264,22 +261,22 @@ def start_new_config(args: Dict[str, Any]) -> None:
|
||||
else:
|
||||
raise OperationalException(
|
||||
f"Configuration file `{config_path}` already exists. "
|
||||
"Please delete it or use a different configuration file name.")
|
||||
"Please delete it or use a different configuration file name."
|
||||
)
|
||||
selections = ask_user_config()
|
||||
deploy_new_config(config_path, selections)
|
||||
|
||||
|
||||
def start_show_config(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE, set_dry=False)
|
||||
|
||||
# TODO: Sanitize from sensitive info before printing
|
||||
|
||||
print("Your combined configuration is:")
|
||||
config_sanitized = sanitize_config(
|
||||
config['original_config'],
|
||||
show_sensitive=args.get('show_sensitive', False)
|
||||
config["original_config"], show_sensitive=args.get("show_sensitive", False)
|
||||
)
|
||||
|
||||
from rich import print_json
|
||||
|
||||
print_json(data=config_sanitized)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,8 +5,11 @@ from typing import Any, Dict
|
||||
|
||||
from freqtrade.configuration import TimeRange, setup_utils_configuration
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, DL_DATA_TIMEFRAMES, Config
|
||||
from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format,
|
||||
convert_trades_to_ohlcv)
|
||||
from freqtrade.data.converter import (
|
||||
convert_ohlcv_format,
|
||||
convert_trades_format,
|
||||
convert_trades_to_ohlcv,
|
||||
)
|
||||
from freqtrade.data.history import download_data_main
|
||||
from freqtrade.enums import CandleType, RunMode, TradingMode
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
@@ -20,14 +23,17 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _check_data_config_download_sanity(config: Config) -> None:
|
||||
if 'days' in config and 'timerange' in config:
|
||||
raise ConfigurationError("--days and --timerange are mutually exclusive. "
|
||||
"You can only specify one or the other.")
|
||||
if "days" in config and "timerange" in config:
|
||||
raise ConfigurationError(
|
||||
"--days and --timerange are mutually exclusive. "
|
||||
"You can only specify one or the other."
|
||||
)
|
||||
|
||||
if 'pairs' not in config:
|
||||
if "pairs" not in config:
|
||||
raise ConfigurationError(
|
||||
"Downloading data requires a list of pairs. "
|
||||
"Please check the documentation on how to configure this.")
|
||||
"Please check the documentation on how to configure this."
|
||||
)
|
||||
|
||||
|
||||
def start_download_data(args: Dict[str, Any]) -> None:
|
||||
@@ -46,38 +52,41 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
|
||||
|
||||
def start_convert_trades(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
|
||||
timerange = TimeRange()
|
||||
|
||||
# Remove stake-currency to skip checks which are not relevant for datadownload
|
||||
config['stake_currency'] = ''
|
||||
config["stake_currency"] = ""
|
||||
|
||||
if 'timeframes' not in config:
|
||||
config['timeframes'] = DL_DATA_TIMEFRAMES
|
||||
if "timeframes" not in config:
|
||||
config["timeframes"] = DL_DATA_TIMEFRAMES
|
||||
|
||||
# Init exchange
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
# Manual validations of relevant settings
|
||||
|
||||
for timeframe in config['timeframes']:
|
||||
for timeframe in config["timeframes"]:
|
||||
exchange.validate_timeframes(timeframe)
|
||||
available_pairs = [
|
||||
p for p in exchange.get_markets(
|
||||
tradable_only=True, active_only=not config.get('include_inactive')
|
||||
).keys()
|
||||
p
|
||||
for p in exchange.get_markets(
|
||||
tradable_only=True, active_only=not config.get("include_inactive")
|
||||
).keys()
|
||||
]
|
||||
|
||||
expanded_pairs = dynamic_expand_pairlist(config, available_pairs)
|
||||
|
||||
# Convert downloaded trade data to different timeframes
|
||||
convert_trades_to_ohlcv(
|
||||
pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||
data_format_trades=config['dataformat_trades'],
|
||||
candle_type=config.get('candle_type_def', CandleType.SPOT)
|
||||
pairs=expanded_pairs,
|
||||
timeframes=config["timeframes"],
|
||||
datadir=config["datadir"],
|
||||
timerange=timerange,
|
||||
erase=bool(config.get("erase")),
|
||||
data_format_ohlcv=config["dataformat_ohlcv"],
|
||||
data_format_trades=config["dataformat_trades"],
|
||||
candle_type=config.get("candle_type_def", CandleType.SPOT),
|
||||
)
|
||||
|
||||
|
||||
@@ -88,14 +97,19 @@ def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
if ohlcv:
|
||||
migrate_data(config)
|
||||
convert_ohlcv_format(config,
|
||||
convert_from=args['format_from'],
|
||||
convert_to=args['format_to'],
|
||||
erase=args['erase'])
|
||||
convert_ohlcv_format(
|
||||
config,
|
||||
convert_from=args["format_from"],
|
||||
convert_to=args["format_to"],
|
||||
erase=args["erase"],
|
||||
)
|
||||
else:
|
||||
convert_trades_format(config,
|
||||
convert_from=args['format_from_trades'], convert_to=args['format_to'],
|
||||
erase=args['erase'])
|
||||
convert_trades_format(
|
||||
config,
|
||||
convert_from=args["format_from_trades"],
|
||||
convert_to=args["format_to"],
|
||||
erase=args["erase"],
|
||||
)
|
||||
|
||||
|
||||
def start_list_data(args: Dict[str, Any]) -> None:
|
||||
@@ -108,45 +122,59 @@ def start_list_data(args: Dict[str, Any]) -> None:
|
||||
from tabulate import tabulate
|
||||
|
||||
from freqtrade.data.history import get_datahandler
|
||||
dhc = get_datahandler(config['datadir'], config['dataformat_ohlcv'])
|
||||
|
||||
dhc = get_datahandler(config["datadir"], config["dataformat_ohlcv"])
|
||||
|
||||
paircombs = dhc.ohlcv_get_available_data(
|
||||
config['datadir'],
|
||||
config.get('trading_mode', TradingMode.SPOT)
|
||||
)
|
||||
config["datadir"], config.get("trading_mode", TradingMode.SPOT)
|
||||
)
|
||||
|
||||
if args['pairs']:
|
||||
paircombs = [comb for comb in paircombs if comb[0] in args['pairs']]
|
||||
if args["pairs"]:
|
||||
paircombs = [comb for comb in paircombs if comb[0] in args["pairs"]]
|
||||
|
||||
print(f"Found {len(paircombs)} pair / timeframe combinations.")
|
||||
if not config.get('show_timerange'):
|
||||
if not config.get("show_timerange"):
|
||||
groupedpair = defaultdict(list)
|
||||
for pair, timeframe, candle_type in sorted(
|
||||
paircombs,
|
||||
key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2])
|
||||
paircombs, key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2])
|
||||
):
|
||||
groupedpair[(pair, candle_type)].append(timeframe)
|
||||
|
||||
if groupedpair:
|
||||
print(tabulate([
|
||||
(pair, ', '.join(timeframes), candle_type)
|
||||
for (pair, candle_type), timeframes in groupedpair.items()
|
||||
],
|
||||
headers=("Pair", "Timeframe", "Type"),
|
||||
tablefmt='psql', stralign='right'))
|
||||
print(
|
||||
tabulate(
|
||||
[
|
||||
(pair, ", ".join(timeframes), candle_type)
|
||||
for (pair, candle_type), timeframes in groupedpair.items()
|
||||
],
|
||||
headers=("Pair", "Timeframe", "Type"),
|
||||
tablefmt="psql",
|
||||
stralign="right",
|
||||
)
|
||||
)
|
||||
else:
|
||||
paircombs1 = [(
|
||||
pair, timeframe, candle_type,
|
||||
*dhc.ohlcv_data_min_max(pair, timeframe, candle_type)
|
||||
) for pair, timeframe, candle_type in paircombs]
|
||||
paircombs1 = [
|
||||
(pair, timeframe, candle_type, *dhc.ohlcv_data_min_max(pair, timeframe, candle_type))
|
||||
for pair, timeframe, candle_type in paircombs
|
||||
]
|
||||
|
||||
print(tabulate([
|
||||
(pair, timeframe, candle_type,
|
||||
start.strftime(DATETIME_PRINT_FORMAT),
|
||||
end.strftime(DATETIME_PRINT_FORMAT), length)
|
||||
for pair, timeframe, candle_type, start, end, length in sorted(
|
||||
paircombs1,
|
||||
key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2]))
|
||||
],
|
||||
headers=("Pair", "Timeframe", "Type", 'From', 'To', 'Candles'),
|
||||
tablefmt='psql', stralign='right'))
|
||||
print(
|
||||
tabulate(
|
||||
[
|
||||
(
|
||||
pair,
|
||||
timeframe,
|
||||
candle_type,
|
||||
start.strftime(DATETIME_PRINT_FORMAT),
|
||||
end.strftime(DATETIME_PRINT_FORMAT),
|
||||
length,
|
||||
)
|
||||
for pair, timeframe, candle_type, start, end, length in sorted(
|
||||
paircombs1, key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2])
|
||||
)
|
||||
],
|
||||
headers=("Pair", "Timeframe", "Type", "From", "To", "Candles"),
|
||||
tablefmt="psql",
|
||||
stralign="right",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -19,9 +19,9 @@ def start_convert_db(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
init_db(config['db_url'])
|
||||
init_db(config["db_url"])
|
||||
session_target = Trade.session
|
||||
init_db(config['db_url_from'])
|
||||
init_db(config["db_url_from"])
|
||||
logger.info("Starting db migration.")
|
||||
|
||||
trade_count = 0
|
||||
@@ -47,9 +47,11 @@ def start_convert_db(args: Dict[str, Any]) -> None:
|
||||
max_order_id = session_target.scalar(select(func.max(Order.id)))
|
||||
max_pairlock_id = session_target.scalar(select(func.max(PairLock.id)))
|
||||
|
||||
set_sequence_ids(session_target.get_bind(),
|
||||
trade_id=max_trade_id,
|
||||
order_id=max_order_id,
|
||||
pairlock_id=max_pairlock_id)
|
||||
set_sequence_ids(
|
||||
session_target.get_bind(),
|
||||
trade_id=max_trade_id,
|
||||
order_id=max_order_id,
|
||||
pairlock_id=max_pairlock_id,
|
||||
)
|
||||
|
||||
logger.info(f"Migrated {trade_count} Trades, and {pairlock_count} Pairlocks.")
|
||||
|
||||
@@ -38,7 +38,7 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
|
||||
"""
|
||||
Deploy new strategy from template to strategy_path
|
||||
"""
|
||||
fallback = 'full'
|
||||
fallback = "full"
|
||||
attributes = render_template_with_fallback(
|
||||
templatefile=f"strategy_subtemplates/strategy_attributes_{subtemplate}.j2",
|
||||
templatefallbackfile=f"strategy_subtemplates/strategy_attributes_{fallback}.j2",
|
||||
@@ -64,33 +64,35 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
|
||||
templatefallbackfile="strategy_subtemplates/strategy_methods_empty.j2",
|
||||
)
|
||||
|
||||
strategy_text = render_template(templatefile='base_strategy.py.j2',
|
||||
arguments={"strategy": strategy_name,
|
||||
"attributes": attributes,
|
||||
"indicators": indicators,
|
||||
"buy_trend": buy_trend,
|
||||
"sell_trend": sell_trend,
|
||||
"plot_config": plot_config,
|
||||
"additional_methods": additional_methods,
|
||||
})
|
||||
strategy_text = render_template(
|
||||
templatefile="base_strategy.py.j2",
|
||||
arguments={
|
||||
"strategy": strategy_name,
|
||||
"attributes": attributes,
|
||||
"indicators": indicators,
|
||||
"buy_trend": buy_trend,
|
||||
"sell_trend": sell_trend,
|
||||
"plot_config": plot_config,
|
||||
"additional_methods": additional_methods,
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(f"Writing strategy to `{strategy_path}`.")
|
||||
strategy_path.write_text(strategy_text)
|
||||
|
||||
|
||||
def start_new_strategy(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
if "strategy" in args and args["strategy"]:
|
||||
|
||||
new_path = config['user_data_dir'] / USERPATH_STRATEGIES / (args['strategy'] + '.py')
|
||||
new_path = config["user_data_dir"] / USERPATH_STRATEGIES / (args["strategy"] + ".py")
|
||||
|
||||
if new_path.exists():
|
||||
raise OperationalException(f"`{new_path}` already exists. "
|
||||
"Please choose another Strategy Name.")
|
||||
raise OperationalException(
|
||||
f"`{new_path}` already exists. Please choose another Strategy Name."
|
||||
)
|
||||
|
||||
deploy_new_strategy(args['strategy'], new_path, args['template'])
|
||||
deploy_new_strategy(args["strategy"], new_path, args["template"])
|
||||
|
||||
else:
|
||||
raise ConfigurationError("`new-strategy` requires --strategy to be set.")
|
||||
@@ -100,8 +102,8 @@ def clean_ui_subdir(directory: Path):
|
||||
if directory.is_dir():
|
||||
logger.info("Removing UI directory content.")
|
||||
|
||||
for p in reversed(list(directory.glob('**/*'))): # iterate contents from leaves to root
|
||||
if p.name in ('.gitkeep', 'fallback_file.html'):
|
||||
for p in reversed(list(directory.glob("**/*"))): # iterate contents from leaves to root
|
||||
if p.name in (".gitkeep", "fallback_file.html"):
|
||||
continue
|
||||
if p.is_file():
|
||||
p.unlink()
|
||||
@@ -110,11 +112,11 @@ def clean_ui_subdir(directory: Path):
|
||||
|
||||
|
||||
def read_ui_version(dest_folder: Path) -> Optional[str]:
|
||||
file = dest_folder / '.uiversion'
|
||||
file = dest_folder / ".uiversion"
|
||||
if not file.is_file():
|
||||
return None
|
||||
|
||||
with file.open('r') as f:
|
||||
with file.open("r") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
@@ -133,12 +135,12 @@ def download_and_install_ui(dest_folder: Path, dl_url: str, version: str):
|
||||
destfile.mkdir(exist_ok=True)
|
||||
else:
|
||||
destfile.write_bytes(x.read())
|
||||
with (dest_folder / '.uiversion').open('w') as f:
|
||||
with (dest_folder / ".uiversion").open("w") as f:
|
||||
f.write(version)
|
||||
|
||||
|
||||
def get_ui_download_url(version: Optional[str] = None) -> Tuple[str, str]:
|
||||
base_url = 'https://api.github.com/repos/freqtrade/frequi/'
|
||||
base_url = "https://api.github.com/repos/freqtrade/frequi/"
|
||||
# Get base UI Repo path
|
||||
|
||||
resp = requests.get(f"{base_url}releases", timeout=req_timeout)
|
||||
@@ -146,42 +148,41 @@ def get_ui_download_url(version: Optional[str] = None) -> Tuple[str, str]:
|
||||
r = resp.json()
|
||||
|
||||
if version:
|
||||
tmp = [x for x in r if x['name'] == version]
|
||||
tmp = [x for x in r if x["name"] == version]
|
||||
if tmp:
|
||||
latest_version = tmp[0]['name']
|
||||
assets = tmp[0].get('assets', [])
|
||||
latest_version = tmp[0]["name"]
|
||||
assets = tmp[0].get("assets", [])
|
||||
else:
|
||||
raise ValueError("UI-Version not found.")
|
||||
else:
|
||||
latest_version = r[0]['name']
|
||||
assets = r[0].get('assets', [])
|
||||
dl_url = ''
|
||||
latest_version = r[0]["name"]
|
||||
assets = r[0].get("assets", [])
|
||||
dl_url = ""
|
||||
if assets and len(assets) > 0:
|
||||
dl_url = assets[0]['browser_download_url']
|
||||
dl_url = assets[0]["browser_download_url"]
|
||||
|
||||
# URL not found - try assets url
|
||||
if not dl_url:
|
||||
assets = r[0]['assets_url']
|
||||
assets = r[0]["assets_url"]
|
||||
resp = requests.get(assets, timeout=req_timeout)
|
||||
r = resp.json()
|
||||
dl_url = r[0]['browser_download_url']
|
||||
dl_url = r[0]["browser_download_url"]
|
||||
|
||||
return dl_url, latest_version
|
||||
|
||||
|
||||
def start_install_ui(args: Dict[str, Any]) -> None:
|
||||
|
||||
dest_folder = Path(__file__).parents[1] / 'rpc/api_server/ui/installed/'
|
||||
dest_folder = Path(__file__).parents[1] / "rpc/api_server/ui/installed/"
|
||||
# First make sure the assets are removed.
|
||||
dl_url, latest_version = get_ui_download_url(args.get('ui_version'))
|
||||
dl_url, latest_version = get_ui_download_url(args.get("ui_version"))
|
||||
|
||||
curr_version = read_ui_version(dest_folder)
|
||||
if curr_version == latest_version and not args.get('erase_ui_only'):
|
||||
if curr_version == latest_version and not args.get("erase_ui_only"):
|
||||
logger.info(f"UI already up-to-date, FreqUI Version {curr_version}.")
|
||||
return
|
||||
|
||||
clean_ui_subdir(dest_folder)
|
||||
if args.get('erase_ui_only'):
|
||||
if args.get("erase_ui_only"):
|
||||
logger.info("Erased UI directory content. Not downloading new version.")
|
||||
else:
|
||||
# Download a new version
|
||||
|
||||
@@ -22,15 +22,15 @@ def start_hyperopt_list(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
print_colorized = config.get('print_colorized', False)
|
||||
print_json = config.get('print_json', False)
|
||||
export_csv = config.get('export_csv')
|
||||
no_details = config.get('hyperopt_list_no_details', False)
|
||||
print_colorized = config.get("print_colorized", False)
|
||||
print_json = config.get("print_json", False)
|
||||
export_csv = config.get("export_csv")
|
||||
no_details = config.get("hyperopt_list_no_details", False)
|
||||
no_header = False
|
||||
|
||||
results_file = get_latest_hyperopt_file(
|
||||
config['user_data_dir'] / 'hyperopt_results',
|
||||
config.get('hyperoptexportfilename'))
|
||||
config["user_data_dir"] / "hyperopt_results", config.get("hyperoptexportfilename")
|
||||
)
|
||||
|
||||
# Previous evaluations
|
||||
epochs, total_epochs = HyperoptTools.load_filtered_results(results_file, config)
|
||||
@@ -40,21 +40,26 @@ def start_hyperopt_list(args: Dict[str, Any]) -> None:
|
||||
|
||||
if not export_csv:
|
||||
try:
|
||||
print(HyperoptTools.get_result_table(config, epochs, total_epochs,
|
||||
not config.get('hyperopt_list_best', False),
|
||||
print_colorized, 0))
|
||||
print(
|
||||
HyperoptTools.get_result_table(
|
||||
config,
|
||||
epochs,
|
||||
total_epochs,
|
||||
not config.get("hyperopt_list_best", False),
|
||||
print_colorized,
|
||||
0,
|
||||
)
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
print('User interrupted..')
|
||||
print("User interrupted..")
|
||||
|
||||
if epochs and not no_details:
|
||||
sorted_epochs = sorted(epochs, key=itemgetter('loss'))
|
||||
sorted_epochs = sorted(epochs, key=itemgetter("loss"))
|
||||
results = sorted_epochs[0]
|
||||
HyperoptTools.show_epoch_details(results, total_epochs, print_json, no_header)
|
||||
|
||||
if epochs and export_csv:
|
||||
HyperoptTools.export_csv_file(
|
||||
config, epochs, export_csv
|
||||
)
|
||||
HyperoptTools.export_csv_file(config, epochs, export_csv)
|
||||
|
||||
|
||||
def start_hyperopt_show(args: Dict[str, Any]) -> None:
|
||||
@@ -65,13 +70,13 @@ def start_hyperopt_show(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
print_json = config.get('print_json', False)
|
||||
no_header = config.get('hyperopt_show_no_header', False)
|
||||
print_json = config.get("print_json", False)
|
||||
no_header = config.get("hyperopt_show_no_header", False)
|
||||
results_file = get_latest_hyperopt_file(
|
||||
config['user_data_dir'] / 'hyperopt_results',
|
||||
config.get('hyperoptexportfilename'))
|
||||
config["user_data_dir"] / "hyperopt_results", config.get("hyperoptexportfilename")
|
||||
)
|
||||
|
||||
n = config.get('hyperopt_show_index', -1)
|
||||
n = config.get("hyperopt_show_index", -1)
|
||||
|
||||
# Previous evaluations
|
||||
epochs, total_epochs = HyperoptTools.load_filtered_results(results_file, config)
|
||||
@@ -80,10 +85,12 @@ def start_hyperopt_show(args: Dict[str, Any]) -> None:
|
||||
|
||||
if n > filtered_epochs:
|
||||
raise OperationalException(
|
||||
f"The index of the epoch to show should be less than {filtered_epochs + 1}.")
|
||||
f"The index of the epoch to show should be less than {filtered_epochs + 1}."
|
||||
)
|
||||
if n < -filtered_epochs:
|
||||
raise OperationalException(
|
||||
f"The index of the epoch to show should be greater than {-filtered_epochs - 1}.")
|
||||
f"The index of the epoch to show should be greater than {-filtered_epochs - 1}."
|
||||
)
|
||||
|
||||
# Translate epoch index from human-readable format to pythonic
|
||||
if n > 0:
|
||||
@@ -92,13 +99,18 @@ def start_hyperopt_show(args: Dict[str, Any]) -> None:
|
||||
if epochs:
|
||||
val = epochs[n]
|
||||
|
||||
metrics = val['results_metrics']
|
||||
if 'strategy_name' in metrics:
|
||||
strategy_name = metrics['strategy_name']
|
||||
show_backtest_result(strategy_name, metrics,
|
||||
metrics['stake_currency'], config.get('backtest_breakdown', []))
|
||||
metrics = val["results_metrics"]
|
||||
if "strategy_name" in metrics:
|
||||
strategy_name = metrics["strategy_name"]
|
||||
show_backtest_result(
|
||||
strategy_name,
|
||||
metrics,
|
||||
metrics["stake_currency"],
|
||||
config.get("backtest_breakdown", []),
|
||||
)
|
||||
|
||||
HyperoptTools.try_export_params(config, strategy_name, val)
|
||||
|
||||
HyperoptTools.show_epoch_details(val, total_epochs, print_json, no_header,
|
||||
header_str="Epoch details")
|
||||
HyperoptTools.show_epoch_details(
|
||||
val, total_epochs, print_json, no_header, header_str="Epoch details"
|
||||
)
|
||||
|
||||
@@ -26,42 +26,47 @@ def start_list_exchanges(args: Dict[str, Any]) -> None:
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
exchanges = list_available_exchanges(args['list_exchanges_all'])
|
||||
exchanges = list_available_exchanges(args["list_exchanges_all"])
|
||||
|
||||
if args['print_one_column']:
|
||||
print('\n'.join([e['name'] for e in exchanges]))
|
||||
if args["print_one_column"]:
|
||||
print("\n".join([e["name"] for e in exchanges]))
|
||||
else:
|
||||
headers = {
|
||||
'name': 'Exchange name',
|
||||
'supported': 'Supported',
|
||||
'trade_modes': 'Markets',
|
||||
'comment': 'Reason',
|
||||
}
|
||||
headers.update({'valid': 'Valid'} if args['list_exchanges_all'] else {})
|
||||
"name": "Exchange name",
|
||||
"supported": "Supported",
|
||||
"trade_modes": "Markets",
|
||||
"comment": "Reason",
|
||||
}
|
||||
headers.update({"valid": "Valid"} if args["list_exchanges_all"] else {})
|
||||
|
||||
def build_entry(exchange: ValidExchangesType, valid: bool):
|
||||
valid_entry = {'valid': exchange['valid']} if valid else {}
|
||||
valid_entry = {"valid": exchange["valid"]} if valid else {}
|
||||
result: Dict[str, Union[str, bool]] = {
|
||||
'name': exchange['name'],
|
||||
"name": exchange["name"],
|
||||
**valid_entry,
|
||||
'supported': 'Official' if exchange['supported'] else '',
|
||||
'trade_modes': ', '.join(
|
||||
(f"{a['margin_mode']} " if a['margin_mode'] else '') + a['trading_mode']
|
||||
for a in exchange['trade_modes']
|
||||
"supported": "Official" if exchange["supported"] else "",
|
||||
"trade_modes": ", ".join(
|
||||
(f"{a['margin_mode']} " if a["margin_mode"] else "") + a["trading_mode"]
|
||||
for a in exchange["trade_modes"]
|
||||
),
|
||||
'comment': exchange['comment'],
|
||||
"comment": exchange["comment"],
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
if args['list_exchanges_all']:
|
||||
if args["list_exchanges_all"]:
|
||||
print("All exchanges supported by the ccxt library:")
|
||||
exchanges = [build_entry(e, True) for e in exchanges]
|
||||
else:
|
||||
print("Exchanges available for Freqtrade:")
|
||||
exchanges = [build_entry(e, False) for e in exchanges if e['valid'] is not False]
|
||||
exchanges = [build_entry(e, False) for e in exchanges if e["valid"] is not False]
|
||||
|
||||
print(tabulate(exchanges, headers=headers, ))
|
||||
print(
|
||||
tabulate(
|
||||
exchanges,
|
||||
headers=headers,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
||||
@@ -71,26 +76,35 @@ def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
||||
yellow = Fore.YELLOW
|
||||
reset = Style.RESET_ALL
|
||||
else:
|
||||
red = ''
|
||||
yellow = ''
|
||||
reset = ''
|
||||
red = ""
|
||||
yellow = ""
|
||||
reset = ""
|
||||
|
||||
names = [s['name'] for s in objs]
|
||||
objs_to_print = [{
|
||||
'name': s['name'] if s['name'] else "--",
|
||||
'location': s['location_rel'],
|
||||
'status': (red + "LOAD FAILED" + reset if s['class'] is None
|
||||
else "OK" if names.count(s['name']) == 1
|
||||
else yellow + "DUPLICATE NAME" + reset)
|
||||
} for s in objs]
|
||||
names = [s["name"] for s in objs]
|
||||
objs_to_print = [
|
||||
{
|
||||
"name": s["name"] if s["name"] else "--",
|
||||
"location": s["location_rel"],
|
||||
"status": (
|
||||
red + "LOAD FAILED" + reset
|
||||
if s["class"] is None
|
||||
else "OK"
|
||||
if names.count(s["name"]) == 1
|
||||
else yellow + "DUPLICATE NAME" + reset
|
||||
),
|
||||
}
|
||||
for s in objs
|
||||
]
|
||||
for idx, s in enumerate(objs):
|
||||
if 'hyperoptable' in s:
|
||||
objs_to_print[idx].update({
|
||||
'hyperoptable': "Yes" if s['hyperoptable']['count'] > 0 else "No",
|
||||
'buy-Params': len(s['hyperoptable'].get('buy', [])),
|
||||
'sell-Params': len(s['hyperoptable'].get('sell', [])),
|
||||
})
|
||||
print(tabulate(objs_to_print, headers='keys', tablefmt='psql', stralign='right'))
|
||||
if "hyperoptable" in s:
|
||||
objs_to_print[idx].update(
|
||||
{
|
||||
"hyperoptable": "Yes" if s["hyperoptable"]["count"] > 0 else "No",
|
||||
"buy-Params": len(s["hyperoptable"].get("buy", [])),
|
||||
"sell-Params": len(s["hyperoptable"].get("sell", [])),
|
||||
}
|
||||
)
|
||||
print(tabulate(objs_to_print, headers="keys", tablefmt="psql", stralign="right"))
|
||||
|
||||
|
||||
def start_list_strategies(args: Dict[str, Any]) -> None:
|
||||
@@ -100,19 +114,20 @@ def start_list_strategies(args: Dict[str, Any]) -> None:
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
strategy_objs = StrategyResolver.search_all_objects(
|
||||
config, not args['print_one_column'], config.get('recursive_strategy_search', False))
|
||||
config, not args["print_one_column"], config.get("recursive_strategy_search", False)
|
||||
)
|
||||
# Sort alphabetically
|
||||
strategy_objs = sorted(strategy_objs, key=lambda x: x['name'])
|
||||
strategy_objs = sorted(strategy_objs, key=lambda x: x["name"])
|
||||
for obj in strategy_objs:
|
||||
if obj['class']:
|
||||
obj['hyperoptable'] = obj['class'].detect_all_parameters()
|
||||
if obj["class"]:
|
||||
obj["hyperoptable"] = obj["class"].detect_all_parameters()
|
||||
else:
|
||||
obj['hyperoptable'] = {'count': 0}
|
||||
obj["hyperoptable"] = {"count": 0}
|
||||
|
||||
if args['print_one_column']:
|
||||
print('\n'.join([s['name'] for s in strategy_objs]))
|
||||
if args["print_one_column"]:
|
||||
print("\n".join([s["name"] for s in strategy_objs]))
|
||||
else:
|
||||
_print_objs_tabular(strategy_objs, config.get('print_colorized', False))
|
||||
_print_objs_tabular(strategy_objs, config.get("print_colorized", False))
|
||||
|
||||
|
||||
def start_list_freqAI_models(args: Dict[str, Any]) -> None:
|
||||
@@ -121,13 +136,14 @@ def start_list_freqAI_models(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
from freqtrade.resolvers.freqaimodel_resolver import FreqaiModelResolver
|
||||
model_objs = FreqaiModelResolver.search_all_objects(config, not args['print_one_column'])
|
||||
|
||||
model_objs = FreqaiModelResolver.search_all_objects(config, not args["print_one_column"])
|
||||
# Sort alphabetically
|
||||
model_objs = sorted(model_objs, key=lambda x: x['name'])
|
||||
if args['print_one_column']:
|
||||
print('\n'.join([s['name'] for s in model_objs]))
|
||||
model_objs = sorted(model_objs, key=lambda x: x["name"])
|
||||
if args["print_one_column"]:
|
||||
print("\n".join([s["name"] for s in model_objs]))
|
||||
else:
|
||||
_print_objs_tabular(model_objs, config.get('print_colorized', False))
|
||||
_print_objs_tabular(model_objs, config.get("print_colorized", False))
|
||||
|
||||
|
||||
def start_list_timeframes(args: Dict[str, Any]) -> None:
|
||||
@@ -136,16 +152,18 @@ def start_list_timeframes(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
# Do not use timeframe set in the config
|
||||
config['timeframe'] = None
|
||||
config["timeframe"] = None
|
||||
|
||||
# Init exchange
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
|
||||
if args['print_one_column']:
|
||||
print('\n'.join(exchange.timeframes))
|
||||
if args["print_one_column"]:
|
||||
print("\n".join(exchange.timeframes))
|
||||
else:
|
||||
print(f"Timeframes available for the exchange `{exchange.name}`: "
|
||||
f"{', '.join(exchange.timeframes)}")
|
||||
print(
|
||||
f"Timeframes available for the exchange `{exchange.name}`: "
|
||||
f"{', '.join(exchange.timeframes)}"
|
||||
)
|
||||
|
||||
|
||||
def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
||||
@@ -161,51 +179,75 @@ def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
|
||||
# By default only active pairs/markets are to be shown
|
||||
active_only = not args.get('list_pairs_all', False)
|
||||
active_only = not args.get("list_pairs_all", False)
|
||||
|
||||
base_currencies = args.get('base_currencies', [])
|
||||
quote_currencies = args.get('quote_currencies', [])
|
||||
base_currencies = args.get("base_currencies", [])
|
||||
quote_currencies = args.get("quote_currencies", [])
|
||||
|
||||
try:
|
||||
pairs = exchange.get_markets(base_currencies=base_currencies,
|
||||
quote_currencies=quote_currencies,
|
||||
tradable_only=pairs_only,
|
||||
active_only=active_only)
|
||||
pairs = exchange.get_markets(
|
||||
base_currencies=base_currencies,
|
||||
quote_currencies=quote_currencies,
|
||||
tradable_only=pairs_only,
|
||||
active_only=active_only,
|
||||
)
|
||||
# Sort the pairs/markets by symbol
|
||||
pairs = dict(sorted(pairs.items()))
|
||||
except Exception as e:
|
||||
raise OperationalException(f"Cannot get markets. Reason: {e}") from e
|
||||
|
||||
else:
|
||||
summary_str = ((f"Exchange {exchange.name} has {len(pairs)} ") +
|
||||
("active " if active_only else "") +
|
||||
(plural(len(pairs), "pair" if pairs_only else "market")) +
|
||||
(f" with {', '.join(base_currencies)} as base "
|
||||
f"{plural(len(base_currencies), 'currency', 'currencies')}"
|
||||
if base_currencies else "") +
|
||||
(" and" if base_currencies and quote_currencies else "") +
|
||||
(f" with {', '.join(quote_currencies)} as quote "
|
||||
f"{plural(len(quote_currencies), 'currency', 'currencies')}"
|
||||
if quote_currencies else ""))
|
||||
summary_str = (
|
||||
(f"Exchange {exchange.name} has {len(pairs)} ")
|
||||
+ ("active " if active_only else "")
|
||||
+ (plural(len(pairs), "pair" if pairs_only else "market"))
|
||||
+ (
|
||||
f" with {', '.join(base_currencies)} as base "
|
||||
f"{plural(len(base_currencies), 'currency', 'currencies')}"
|
||||
if base_currencies
|
||||
else ""
|
||||
)
|
||||
+ (" and" if base_currencies and quote_currencies else "")
|
||||
+ (
|
||||
f" with {', '.join(quote_currencies)} as quote "
|
||||
f"{plural(len(quote_currencies), 'currency', 'currencies')}"
|
||||
if quote_currencies
|
||||
else ""
|
||||
)
|
||||
)
|
||||
|
||||
headers = ["Id", "Symbol", "Base", "Quote", "Active",
|
||||
"Spot", "Margin", "Future", "Leverage"]
|
||||
headers = [
|
||||
"Id",
|
||||
"Symbol",
|
||||
"Base",
|
||||
"Quote",
|
||||
"Active",
|
||||
"Spot",
|
||||
"Margin",
|
||||
"Future",
|
||||
"Leverage",
|
||||
]
|
||||
|
||||
tabular_data = [{
|
||||
'Id': v['id'],
|
||||
'Symbol': v['symbol'],
|
||||
'Base': v['base'],
|
||||
'Quote': v['quote'],
|
||||
'Active': market_is_active(v),
|
||||
'Spot': 'Spot' if exchange.market_is_spot(v) else '',
|
||||
'Margin': 'Margin' if exchange.market_is_margin(v) else '',
|
||||
'Future': 'Future' if exchange.market_is_future(v) else '',
|
||||
'Leverage': exchange.get_max_leverage(v['symbol'], 20)
|
||||
} for _, v in pairs.items()]
|
||||
tabular_data = [
|
||||
{
|
||||
"Id": v["id"],
|
||||
"Symbol": v["symbol"],
|
||||
"Base": v["base"],
|
||||
"Quote": v["quote"],
|
||||
"Active": market_is_active(v),
|
||||
"Spot": "Spot" if exchange.market_is_spot(v) else "",
|
||||
"Margin": "Margin" if exchange.market_is_margin(v) else "",
|
||||
"Future": "Future" if exchange.market_is_future(v) else "",
|
||||
"Leverage": exchange.get_max_leverage(v["symbol"], 20),
|
||||
}
|
||||
for _, v in pairs.items()
|
||||
]
|
||||
|
||||
if (args.get('print_one_column', False) or
|
||||
args.get('list_pairs_print_json', False) or
|
||||
args.get('print_csv', False)):
|
||||
if (
|
||||
args.get("print_one_column", False)
|
||||
or args.get("list_pairs_print_json", False)
|
||||
or args.get("print_csv", False)
|
||||
):
|
||||
# Print summary string in the log in case of machine-readable
|
||||
# regular formats.
|
||||
logger.info(f"{summary_str}.")
|
||||
@@ -215,24 +257,26 @@ def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
||||
print()
|
||||
|
||||
if pairs:
|
||||
if args.get('print_list', False):
|
||||
if args.get("print_list", False):
|
||||
# print data as a list, with human-readable summary
|
||||
print(f"{summary_str}: {', '.join(pairs.keys())}.")
|
||||
elif args.get('print_one_column', False):
|
||||
print('\n'.join(pairs.keys()))
|
||||
elif args.get('list_pairs_print_json', False):
|
||||
elif args.get("print_one_column", False):
|
||||
print("\n".join(pairs.keys()))
|
||||
elif args.get("list_pairs_print_json", False):
|
||||
print(rapidjson.dumps(list(pairs.keys()), default=str))
|
||||
elif args.get('print_csv', False):
|
||||
elif args.get("print_csv", False):
|
||||
writer = csv.DictWriter(sys.stdout, fieldnames=headers)
|
||||
writer.writeheader()
|
||||
writer.writerows(tabular_data)
|
||||
else:
|
||||
# print data as a table, with the human-readable summary
|
||||
print(f"{summary_str}:")
|
||||
print(tabulate(tabular_data, headers='keys', tablefmt='psql', stralign='right'))
|
||||
elif not (args.get('print_one_column', False) or
|
||||
args.get('list_pairs_print_json', False) or
|
||||
args.get('print_csv', False)):
|
||||
print(tabulate(tabular_data, headers="keys", tablefmt="psql", stralign="right"))
|
||||
elif not (
|
||||
args.get("print_one_column", False)
|
||||
or args.get("list_pairs_print_json", False)
|
||||
or args.get("print_csv", False)
|
||||
):
|
||||
print(f"{summary_str}.")
|
||||
|
||||
|
||||
@@ -243,21 +287,22 @@ def start_show_trades(args: Dict[str, Any]) -> None:
|
||||
import json
|
||||
|
||||
from freqtrade.persistence import Trade, init_db
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
if 'db_url' not in config:
|
||||
if "db_url" not in config:
|
||||
raise ConfigurationError("--db-url is required for this command.")
|
||||
|
||||
logger.info(f'Using DB: "{parse_db_uri_for_logging(config["db_url"])}"')
|
||||
init_db(config['db_url'])
|
||||
init_db(config["db_url"])
|
||||
tfilter = []
|
||||
|
||||
if config.get('trade_ids'):
|
||||
tfilter.append(Trade.id.in_(config['trade_ids']))
|
||||
if config.get("trade_ids"):
|
||||
tfilter.append(Trade.id.in_(config["trade_ids"]))
|
||||
|
||||
trades = Trade.get_trades(tfilter).all()
|
||||
logger.info(f"Printing {len(trades)} Trades: ")
|
||||
if config.get('print_json', False):
|
||||
if config.get("print_json", False):
|
||||
print(json.dumps([trade.to_json() for trade in trades], indent=4))
|
||||
else:
|
||||
for trade in trades:
|
||||
|
||||
@@ -21,20 +21,22 @@ def setup_optimize_configuration(args: Dict[str, Any], method: RunMode) -> Dict[
|
||||
config = setup_utils_configuration(args, method)
|
||||
|
||||
no_unlimited_runmodes = {
|
||||
RunMode.BACKTEST: 'backtesting',
|
||||
RunMode.HYPEROPT: 'hyperoptimization',
|
||||
RunMode.BACKTEST: "backtesting",
|
||||
RunMode.HYPEROPT: "hyperoptimization",
|
||||
}
|
||||
if method in no_unlimited_runmodes.keys():
|
||||
wallet_size = config['dry_run_wallet'] * config['tradable_balance_ratio']
|
||||
wallet_size = config["dry_run_wallet"] * config["tradable_balance_ratio"]
|
||||
# tradable_balance_ratio
|
||||
if (config['stake_amount'] != constants.UNLIMITED_STAKE_AMOUNT
|
||||
and config['stake_amount'] > wallet_size):
|
||||
wallet = fmt_coin(wallet_size, config['stake_currency'])
|
||||
stake = fmt_coin(config['stake_amount'], config['stake_currency'])
|
||||
if (
|
||||
config["stake_amount"] != constants.UNLIMITED_STAKE_AMOUNT
|
||||
and config["stake_amount"] > wallet_size
|
||||
):
|
||||
wallet = fmt_coin(wallet_size, config["stake_currency"])
|
||||
stake = fmt_coin(config["stake_amount"], config["stake_currency"])
|
||||
raise ConfigurationError(
|
||||
f"Starting balance ({wallet}) is smaller than stake_amount {stake}. "
|
||||
f"Wallet is calculated as `dry_run_wallet * tradable_balance_ratio`."
|
||||
)
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
@@ -51,7 +53,7 @@ def start_backtesting(args: Dict[str, Any]) -> None:
|
||||
# Initialize configuration
|
||||
config = setup_optimize_configuration(args, RunMode.BACKTEST)
|
||||
|
||||
logger.info('Starting freqtrade in Backtesting mode')
|
||||
logger.info("Starting freqtrade in Backtesting mode")
|
||||
|
||||
# Initialize backtesting object
|
||||
backtesting = Backtesting(config)
|
||||
@@ -68,7 +70,7 @@ def start_backtesting_show(args: Dict[str, Any]) -> None:
|
||||
from freqtrade.data.btanalysis import load_backtest_stats
|
||||
from freqtrade.optimize.optimize_reports import show_backtest_results, show_sorted_pairlist
|
||||
|
||||
results = load_backtest_stats(config['exportfilename'])
|
||||
results = load_backtest_stats(config["exportfilename"])
|
||||
|
||||
show_backtest_results(config, results)
|
||||
show_sorted_pairlist(config, results)
|
||||
@@ -87,20 +89,20 @@ def start_hyperopt(args: Dict[str, Any]) -> None:
|
||||
from freqtrade.optimize.hyperopt import Hyperopt
|
||||
except ImportError as e:
|
||||
raise OperationalException(
|
||||
f"{e}. Please ensure that the hyperopt dependencies are installed.") from e
|
||||
f"{e}. Please ensure that the hyperopt dependencies are installed."
|
||||
) from e
|
||||
# Initialize configuration
|
||||
config = setup_optimize_configuration(args, RunMode.HYPEROPT)
|
||||
|
||||
logger.info('Starting freqtrade in Hyperopt mode')
|
||||
logger.info("Starting freqtrade in Hyperopt mode")
|
||||
|
||||
lock = FileLock(Hyperopt.get_lock_filename(config))
|
||||
|
||||
try:
|
||||
with lock.acquire(timeout=1):
|
||||
|
||||
# Remove noisy log messages
|
||||
logging.getLogger('hyperopt.tpe').setLevel(logging.WARNING)
|
||||
logging.getLogger('filelock').setLevel(logging.WARNING)
|
||||
logging.getLogger("hyperopt.tpe").setLevel(logging.WARNING)
|
||||
logging.getLogger("filelock").setLevel(logging.WARNING)
|
||||
|
||||
# Initialize backtesting object
|
||||
hyperopt = Hyperopt(config)
|
||||
@@ -108,9 +110,11 @@ def start_hyperopt(args: Dict[str, Any]) -> None:
|
||||
|
||||
except Timeout:
|
||||
logger.info("Another running instance of freqtrade Hyperopt detected.")
|
||||
logger.info("Simultaneous execution of multiple Hyperopt commands is not supported. "
|
||||
"Hyperopt module is resource hungry. Please run your Hyperopt sequentially "
|
||||
"or on separate machines.")
|
||||
logger.info(
|
||||
"Simultaneous execution of multiple Hyperopt commands is not supported. "
|
||||
"Hyperopt module is resource hungry. Please run your Hyperopt sequentially "
|
||||
"or on separate machines."
|
||||
)
|
||||
logger.info("Quitting now.")
|
||||
# TODO: return False here in order to help freqtrade to exit
|
||||
# with non-zero exit code...
|
||||
@@ -127,7 +131,7 @@ def start_edge(args: Dict[str, Any]) -> None:
|
||||
|
||||
# Initialize configuration
|
||||
config = setup_optimize_configuration(args, RunMode.EDGE)
|
||||
logger.info('Starting freqtrade in Edge mode')
|
||||
logger.info("Starting freqtrade in Edge mode")
|
||||
|
||||
# Initialize Edge object
|
||||
edge_cli = EdgeCli(config)
|
||||
|
||||
@@ -17,28 +17,29 @@ def start_test_pairlist(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
from freqtrade.persistence import FtNoDBContext
|
||||
from freqtrade.plugins.pairlistmanager import PairListManager
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
|
||||
quote_currencies = args.get('quote_currencies')
|
||||
quote_currencies = args.get("quote_currencies")
|
||||
if not quote_currencies:
|
||||
quote_currencies = [config.get('stake_currency')]
|
||||
quote_currencies = [config.get("stake_currency")]
|
||||
results = {}
|
||||
with FtNoDBContext():
|
||||
for curr in quote_currencies:
|
||||
config['stake_currency'] = curr
|
||||
config["stake_currency"] = curr
|
||||
pairlists = PairListManager(exchange, config)
|
||||
pairlists.refresh_pairlist()
|
||||
results[curr] = pairlists.whitelist
|
||||
|
||||
for curr, pairlist in results.items():
|
||||
if not args.get('print_one_column', False) and not args.get('list_pairs_print_json', False):
|
||||
if not args.get("print_one_column", False) and not args.get("list_pairs_print_json", False):
|
||||
print(f"Pairs for {curr}: ")
|
||||
|
||||
if args.get('print_one_column', False):
|
||||
print('\n'.join(pairlist))
|
||||
elif args.get('list_pairs_print_json', False):
|
||||
if args.get("print_one_column", False):
|
||||
print("\n".join(pairlist))
|
||||
elif args.get("list_pairs_print_json", False):
|
||||
print(rapidjson.dumps(list(pairlist), default=str))
|
||||
else:
|
||||
print(pairlist)
|
||||
|
||||
@@ -6,10 +6,11 @@ from freqtrade.exceptions import ConfigurationError
|
||||
|
||||
|
||||
def validate_plot_args(args: Dict[str, Any]) -> None:
|
||||
if not args.get('datadir') and not args.get('config'):
|
||||
if not args.get("datadir") and not args.get("config"):
|
||||
raise ConfigurationError(
|
||||
"You need to specify either `--datadir` or `--config` "
|
||||
"for plot-profit and plot-dataframe.")
|
||||
"for plot-profit and plot-dataframe."
|
||||
)
|
||||
|
||||
|
||||
def start_plot_dataframe(args: Dict[str, Any]) -> None:
|
||||
@@ -18,6 +19,7 @@ def start_plot_dataframe(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
# Import here to avoid errors if plot-dependencies are not installed.
|
||||
from freqtrade.plot.plotting import load_and_plot_trades
|
||||
|
||||
validate_plot_args(args)
|
||||
config = setup_utils_configuration(args, RunMode.PLOT)
|
||||
|
||||
@@ -30,6 +32,7 @@ def start_plot_profit(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
# Import here to avoid errors if plot-dependencies are not installed.
|
||||
from freqtrade.plot.plotting import plot_profit
|
||||
|
||||
validate_plot_args(args)
|
||||
config = setup_utils_configuration(args, RunMode.PLOT)
|
||||
|
||||
|
||||
@@ -26,13 +26,15 @@ def start_strategy_update(args: Dict[str, Any]) -> None:
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
strategy_objs = StrategyResolver.search_all_objects(
|
||||
config, enum_failed=False, recursive=config.get('recursive_strategy_search', False))
|
||||
config, enum_failed=False, recursive=config.get("recursive_strategy_search", False)
|
||||
)
|
||||
|
||||
filtered_strategy_objs = []
|
||||
if args['strategy_list']:
|
||||
if args["strategy_list"]:
|
||||
filtered_strategy_objs = [
|
||||
strategy_obj for strategy_obj in strategy_objs
|
||||
if strategy_obj['name'] in args['strategy_list']
|
||||
strategy_obj
|
||||
for strategy_obj in strategy_objs
|
||||
if strategy_obj["name"] in args["strategy_list"]
|
||||
]
|
||||
|
||||
else:
|
||||
@@ -41,8 +43,8 @@ def start_strategy_update(args: Dict[str, Any]) -> None:
|
||||
|
||||
processed_locations = set()
|
||||
for strategy_obj in filtered_strategy_objs:
|
||||
if strategy_obj['location'] not in processed_locations:
|
||||
processed_locations.add(strategy_obj['location'])
|
||||
if strategy_obj["location"] not in processed_locations:
|
||||
processed_locations.add(strategy_obj["location"])
|
||||
start_conversion(strategy_obj, config)
|
||||
|
||||
|
||||
|
||||
@@ -24,13 +24,13 @@ def sanitize_config(config: Config, *, show_sensitive: bool = False) -> Config:
|
||||
]
|
||||
config = deepcopy(config)
|
||||
for key in keys_to_remove:
|
||||
if '.' in key:
|
||||
nested_keys = key.split('.')
|
||||
if "." in key:
|
||||
nested_keys = key.split(".")
|
||||
nested_config = config
|
||||
for nested_key in nested_keys[:-1]:
|
||||
nested_config = nested_config.get(nested_key, {})
|
||||
nested_config[nested_keys[-1]] = 'REDACTED'
|
||||
nested_config[nested_keys[-1]] = "REDACTED"
|
||||
else:
|
||||
config[key] = 'REDACTED'
|
||||
config[key] = "REDACTED"
|
||||
|
||||
return config
|
||||
|
||||
@@ -11,7 +11,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_utils_configuration(
|
||||
args: Dict[str, Any], method: RunMode, *, set_dry: bool = True) -> Dict[str, Any]:
|
||||
args: Dict[str, Any], method: RunMode, *, set_dry: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Prepare the configuration for utils subcommands
|
||||
:param args: Cli args from Arguments()
|
||||
@@ -23,7 +24,7 @@ def setup_utils_configuration(
|
||||
|
||||
# Ensure these modes are using Dry-run
|
||||
if set_dry:
|
||||
config['dry_run'] = True
|
||||
config["dry_run"] = True
|
||||
validate_config_consistency(config, preliminary=True)
|
||||
|
||||
return config
|
||||
|
||||
@@ -20,18 +20,16 @@ def _extend_validator(validator_class):
|
||||
Extended validator for the Freqtrade configuration JSON Schema.
|
||||
Currently it only handles defaults for subschemas.
|
||||
"""
|
||||
validate_properties = validator_class.VALIDATORS['properties']
|
||||
validate_properties = validator_class.VALIDATORS["properties"]
|
||||
|
||||
def set_defaults(validator, properties, instance, schema):
|
||||
for prop, subschema in properties.items():
|
||||
if 'default' in subschema:
|
||||
instance.setdefault(prop, subschema['default'])
|
||||
if "default" in subschema:
|
||||
instance.setdefault(prop, subschema["default"])
|
||||
|
||||
yield from validate_properties(validator, properties, instance, schema)
|
||||
|
||||
return validators.extend(
|
||||
validator_class, {'properties': set_defaults}
|
||||
)
|
||||
return validators.extend(validator_class, {"properties": set_defaults})
|
||||
|
||||
|
||||
FreqtradeValidator = _extend_validator(Draft4Validator)
|
||||
@@ -44,27 +42,23 @@ def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> D
|
||||
:return: Returns the config if valid, otherwise throw an exception
|
||||
"""
|
||||
conf_schema = deepcopy(constants.CONF_SCHEMA)
|
||||
if conf.get('runmode', RunMode.OTHER) in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
conf_schema['required'] = constants.SCHEMA_TRADE_REQUIRED
|
||||
elif conf.get('runmode', RunMode.OTHER) in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
||||
if conf.get("runmode", RunMode.OTHER) in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
conf_schema["required"] = constants.SCHEMA_TRADE_REQUIRED
|
||||
elif conf.get("runmode", RunMode.OTHER) in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
||||
if preliminary:
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED
|
||||
conf_schema["required"] = constants.SCHEMA_BACKTEST_REQUIRED
|
||||
else:
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED_FINAL
|
||||
elif conf.get('runmode', RunMode.OTHER) == RunMode.WEBSERVER:
|
||||
conf_schema['required'] = constants.SCHEMA_MINIMAL_WEBSERVER
|
||||
conf_schema["required"] = constants.SCHEMA_BACKTEST_REQUIRED_FINAL
|
||||
elif conf.get("runmode", RunMode.OTHER) == RunMode.WEBSERVER:
|
||||
conf_schema["required"] = constants.SCHEMA_MINIMAL_WEBSERVER
|
||||
else:
|
||||
conf_schema['required'] = constants.SCHEMA_MINIMAL_REQUIRED
|
||||
conf_schema["required"] = constants.SCHEMA_MINIMAL_REQUIRED
|
||||
try:
|
||||
FreqtradeValidator(conf_schema).validate(conf)
|
||||
return conf
|
||||
except ValidationError as e:
|
||||
logger.critical(
|
||||
f"Invalid configuration. Reason: {e}"
|
||||
)
|
||||
raise ValidationError(
|
||||
best_match(Draft4Validator(conf_schema).iter_errors(conf)).message
|
||||
)
|
||||
logger.critical(f"Invalid configuration. Reason: {e}")
|
||||
raise ValidationError(best_match(Draft4Validator(conf_schema).iter_errors(conf)).message)
|
||||
|
||||
|
||||
def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = False) -> None:
|
||||
@@ -91,7 +85,7 @@ def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = Fal
|
||||
validate_migrated_strategy_settings(conf)
|
||||
|
||||
# validate configuration before returning
|
||||
logger.info('Validating configuration ...')
|
||||
logger.info("Validating configuration ...")
|
||||
validate_config_schema(conf, preliminary=preliminary)
|
||||
|
||||
|
||||
@@ -100,9 +94,11 @@ def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
|
||||
If edge is disabled, either max_open_trades or stake_amount need to be set.
|
||||
:raise: ConfigurationError if config validation failed
|
||||
"""
|
||||
if (not conf.get('edge', {}).get('enabled')
|
||||
and conf.get('max_open_trades') == float('inf')
|
||||
and conf.get('stake_amount') == constants.UNLIMITED_STAKE_AMOUNT):
|
||||
if (
|
||||
not conf.get("edge", {}).get("enabled")
|
||||
and conf.get("max_open_trades") == float("inf")
|
||||
and conf.get("stake_amount") == constants.UNLIMITED_STAKE_AMOUNT
|
||||
):
|
||||
raise ConfigurationError("`max_open_trades` and `stake_amount` cannot both be unlimited.")
|
||||
|
||||
|
||||
@@ -111,45 +107,47 @@ def _validate_price_config(conf: Dict[str, Any]) -> None:
|
||||
When using market orders, price sides must be using the "other" side of the price
|
||||
"""
|
||||
# TODO: The below could be an enforced setting when using market orders
|
||||
if (conf.get('order_types', {}).get('entry') == 'market'
|
||||
and conf.get('entry_pricing', {}).get('price_side') not in ('ask', 'other')):
|
||||
raise ConfigurationError(
|
||||
'Market entry orders require entry_pricing.price_side = "other".')
|
||||
if conf.get("order_types", {}).get("entry") == "market" and conf.get("entry_pricing", {}).get(
|
||||
"price_side"
|
||||
) not in ("ask", "other"):
|
||||
raise ConfigurationError('Market entry orders require entry_pricing.price_side = "other".')
|
||||
|
||||
if (conf.get('order_types', {}).get('exit') == 'market'
|
||||
and conf.get('exit_pricing', {}).get('price_side') not in ('bid', 'other')):
|
||||
if conf.get("order_types", {}).get("exit") == "market" and conf.get("exit_pricing", {}).get(
|
||||
"price_side"
|
||||
) not in ("bid", "other"):
|
||||
raise ConfigurationError('Market exit orders require exit_pricing.price_side = "other".')
|
||||
|
||||
|
||||
def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
|
||||
|
||||
if conf.get('stoploss') == 0.0:
|
||||
if conf.get("stoploss") == 0.0:
|
||||
raise ConfigurationError(
|
||||
'The config stoploss needs to be different from 0 to avoid problems with sell orders.'
|
||||
"The config stoploss needs to be different from 0 to avoid problems with sell orders."
|
||||
)
|
||||
# Skip if trailing stoploss is not activated
|
||||
if not conf.get('trailing_stop', False):
|
||||
if not conf.get("trailing_stop", False):
|
||||
return
|
||||
|
||||
tsl_positive = float(conf.get('trailing_stop_positive', 0))
|
||||
tsl_offset = float(conf.get('trailing_stop_positive_offset', 0))
|
||||
tsl_only_offset = conf.get('trailing_only_offset_is_reached', False)
|
||||
tsl_positive = float(conf.get("trailing_stop_positive", 0))
|
||||
tsl_offset = float(conf.get("trailing_stop_positive_offset", 0))
|
||||
tsl_only_offset = conf.get("trailing_only_offset_is_reached", False)
|
||||
|
||||
if tsl_only_offset:
|
||||
if tsl_positive == 0.0:
|
||||
raise ConfigurationError(
|
||||
'The config trailing_only_offset_is_reached needs '
|
||||
'trailing_stop_positive_offset to be more than 0 in your config.')
|
||||
"The config trailing_only_offset_is_reached needs "
|
||||
"trailing_stop_positive_offset to be more than 0 in your config."
|
||||
)
|
||||
if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive:
|
||||
raise ConfigurationError(
|
||||
'The config trailing_stop_positive_offset needs '
|
||||
'to be greater than trailing_stop_positive in your config.')
|
||||
"The config trailing_stop_positive_offset needs "
|
||||
"to be greater than trailing_stop_positive in your config."
|
||||
)
|
||||
|
||||
# Fetch again without default
|
||||
if 'trailing_stop_positive' in conf and float(conf['trailing_stop_positive']) == 0.0:
|
||||
if "trailing_stop_positive" in conf and float(conf["trailing_stop_positive"]) == 0.0:
|
||||
raise ConfigurationError(
|
||||
'The config trailing_stop_positive needs to be different from 0 '
|
||||
'to avoid problems with sell orders.'
|
||||
"The config trailing_stop_positive needs to be different from 0 "
|
||||
"to avoid problems with sell orders."
|
||||
)
|
||||
|
||||
|
||||
@@ -158,10 +156,10 @@ def _validate_edge(conf: Dict[str, Any]) -> None:
|
||||
Edge and Dynamic whitelist should not both be enabled, since edge overrides dynamic whitelists.
|
||||
"""
|
||||
|
||||
if not conf.get('edge', {}).get('enabled'):
|
||||
if not conf.get("edge", {}).get("enabled"):
|
||||
return
|
||||
|
||||
if not conf.get('use_exit_signal', True):
|
||||
if not conf.get("use_exit_signal", True):
|
||||
raise ConfigurationError(
|
||||
"Edge requires `use_exit_signal` to be True, otherwise no sells will happen."
|
||||
)
|
||||
@@ -171,13 +169,20 @@ def _validate_whitelist(conf: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Dynamic whitelist does not require pair_whitelist to be set - however StaticWhitelist does.
|
||||
"""
|
||||
if conf.get('runmode', RunMode.OTHER) in [RunMode.OTHER, RunMode.PLOT,
|
||||
RunMode.UTIL_NO_EXCHANGE, RunMode.UTIL_EXCHANGE]:
|
||||
if conf.get("runmode", RunMode.OTHER) in [
|
||||
RunMode.OTHER,
|
||||
RunMode.PLOT,
|
||||
RunMode.UTIL_NO_EXCHANGE,
|
||||
RunMode.UTIL_EXCHANGE,
|
||||
]:
|
||||
return
|
||||
|
||||
for pl in conf.get('pairlists', [{'method': 'StaticPairList'}]):
|
||||
if (isinstance(pl, dict) and pl.get('method') == 'StaticPairList'
|
||||
and not conf.get('exchange', {}).get('pair_whitelist')):
|
||||
for pl in conf.get("pairlists", [{"method": "StaticPairList"}]):
|
||||
if (
|
||||
isinstance(pl, dict)
|
||||
and pl.get("method") == "StaticPairList"
|
||||
and not conf.get("exchange", {}).get("pair_whitelist")
|
||||
):
|
||||
raise ConfigurationError("StaticPairList requires pair_whitelist to be set.")
|
||||
|
||||
|
||||
@@ -186,14 +191,14 @@ def _validate_protections(conf: Dict[str, Any]) -> None:
|
||||
Validate protection configuration validity
|
||||
"""
|
||||
|
||||
for prot in conf.get('protections', []):
|
||||
if ('stop_duration' in prot and 'stop_duration_candles' in prot):
|
||||
for prot in conf.get("protections", []):
|
||||
if "stop_duration" in prot and "stop_duration_candles" in prot:
|
||||
raise ConfigurationError(
|
||||
"Protections must specify either `stop_duration` or `stop_duration_candles`.\n"
|
||||
f"Please fix the protection {prot.get('method')}"
|
||||
)
|
||||
|
||||
if ('lookback_period' in prot and 'lookback_period_candles' in prot):
|
||||
if "lookback_period" in prot and "lookback_period_candles" in prot:
|
||||
raise ConfigurationError(
|
||||
"Protections must specify either `lookback_period` or `lookback_period_candles`.\n"
|
||||
f"Please fix the protection {prot.get('method')}"
|
||||
@@ -201,10 +206,10 @@ def _validate_protections(conf: Dict[str, Any]) -> None:
|
||||
|
||||
|
||||
def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
|
||||
ask_strategy = conf.get('exit_pricing', {})
|
||||
ob_min = ask_strategy.get('order_book_min')
|
||||
ob_max = ask_strategy.get('order_book_max')
|
||||
if ob_min is not None and ob_max is not None and ask_strategy.get('use_order_book'):
|
||||
ask_strategy = conf.get("exit_pricing", {})
|
||||
ob_min = ask_strategy.get("order_book_min")
|
||||
ob_max = ask_strategy.get("order_book_max")
|
||||
if ob_min is not None and ob_max is not None and ask_strategy.get("use_order_book"):
|
||||
if ob_min != ob_max:
|
||||
raise ConfigurationError(
|
||||
"Using order_book_max != order_book_min in exit_pricing is no longer supported."
|
||||
@@ -212,7 +217,7 @@ def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
|
||||
)
|
||||
else:
|
||||
# Move value to order_book_top
|
||||
ask_strategy['order_book_top'] = ob_min
|
||||
ask_strategy["order_book_top"] = ob_min
|
||||
logger.warning(
|
||||
"DEPRECATED: "
|
||||
"Please use `order_book_top` instead of `order_book_min` and `order_book_max` "
|
||||
@@ -221,7 +226,6 @@ def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
|
||||
|
||||
|
||||
def validate_migrated_strategy_settings(conf: Dict[str, Any]) -> None:
|
||||
|
||||
_validate_time_in_force(conf)
|
||||
_validate_order_types(conf)
|
||||
_validate_unfilledtimeout(conf)
|
||||
@@ -230,119 +234,129 @@ def validate_migrated_strategy_settings(conf: Dict[str, Any]) -> None:
|
||||
|
||||
|
||||
def _validate_time_in_force(conf: Dict[str, Any]) -> None:
|
||||
|
||||
time_in_force = conf.get('order_time_in_force', {})
|
||||
if 'buy' in time_in_force or 'sell' in time_in_force:
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
time_in_force = conf.get("order_time_in_force", {})
|
||||
if "buy" in time_in_force or "sell" in time_in_force:
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError(
|
||||
"Please migrate your time_in_force settings to use 'entry' and 'exit'.")
|
||||
"Please migrate your time_in_force settings to use 'entry' and 'exit'."
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"DEPRECATED: Using 'buy' and 'sell' for time_in_force is deprecated."
|
||||
"Please migrate your time_in_force settings to use 'entry' and 'exit'."
|
||||
)
|
||||
process_deprecated_setting(
|
||||
conf, 'order_time_in_force', 'buy', 'order_time_in_force', 'entry')
|
||||
conf, "order_time_in_force", "buy", "order_time_in_force", "entry"
|
||||
)
|
||||
|
||||
process_deprecated_setting(
|
||||
conf, 'order_time_in_force', 'sell', 'order_time_in_force', 'exit')
|
||||
conf, "order_time_in_force", "sell", "order_time_in_force", "exit"
|
||||
)
|
||||
|
||||
|
||||
def _validate_order_types(conf: Dict[str, Any]) -> None:
|
||||
|
||||
order_types = conf.get('order_types', {})
|
||||
old_order_types = ['buy', 'sell', 'emergencysell', 'forcebuy',
|
||||
'forcesell', 'emergencyexit', 'forceexit', 'forceentry']
|
||||
order_types = conf.get("order_types", {})
|
||||
old_order_types = [
|
||||
"buy",
|
||||
"sell",
|
||||
"emergencysell",
|
||||
"forcebuy",
|
||||
"forcesell",
|
||||
"emergencyexit",
|
||||
"forceexit",
|
||||
"forceentry",
|
||||
]
|
||||
if any(x in order_types for x in old_order_types):
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError(
|
||||
"Please migrate your order_types settings to use the new wording.")
|
||||
"Please migrate your order_types settings to use the new wording."
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"DEPRECATED: Using 'buy' and 'sell' for order_types is deprecated."
|
||||
"Please migrate your order_types settings to use 'entry' and 'exit' wording."
|
||||
)
|
||||
for o, n in [
|
||||
('buy', 'entry'),
|
||||
('sell', 'exit'),
|
||||
('emergencysell', 'emergency_exit'),
|
||||
('forcesell', 'force_exit'),
|
||||
('forcebuy', 'force_entry'),
|
||||
('emergencyexit', 'emergency_exit'),
|
||||
('forceexit', 'force_exit'),
|
||||
('forceentry', 'force_entry'),
|
||||
("buy", "entry"),
|
||||
("sell", "exit"),
|
||||
("emergencysell", "emergency_exit"),
|
||||
("forcesell", "force_exit"),
|
||||
("forcebuy", "force_entry"),
|
||||
("emergencyexit", "emergency_exit"),
|
||||
("forceexit", "force_exit"),
|
||||
("forceentry", "force_entry"),
|
||||
]:
|
||||
|
||||
process_deprecated_setting(conf, 'order_types', o, 'order_types', n)
|
||||
process_deprecated_setting(conf, "order_types", o, "order_types", n)
|
||||
|
||||
|
||||
def _validate_unfilledtimeout(conf: Dict[str, Any]) -> None:
|
||||
unfilledtimeout = conf.get('unfilledtimeout', {})
|
||||
if any(x in unfilledtimeout for x in ['buy', 'sell']):
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
unfilledtimeout = conf.get("unfilledtimeout", {})
|
||||
if any(x in unfilledtimeout for x in ["buy", "sell"]):
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError(
|
||||
"Please migrate your unfilledtimeout settings to use the new wording.")
|
||||
"Please migrate your unfilledtimeout settings to use the new wording."
|
||||
)
|
||||
else:
|
||||
|
||||
logger.warning(
|
||||
"DEPRECATED: Using 'buy' and 'sell' for unfilledtimeout is deprecated."
|
||||
"Please migrate your unfilledtimeout settings to use 'entry' and 'exit' wording."
|
||||
)
|
||||
for o, n in [
|
||||
('buy', 'entry'),
|
||||
('sell', 'exit'),
|
||||
("buy", "entry"),
|
||||
("sell", "exit"),
|
||||
]:
|
||||
|
||||
process_deprecated_setting(conf, 'unfilledtimeout', o, 'unfilledtimeout', n)
|
||||
process_deprecated_setting(conf, "unfilledtimeout", o, "unfilledtimeout", n)
|
||||
|
||||
|
||||
def _validate_pricing_rules(conf: Dict[str, Any]) -> None:
|
||||
|
||||
if conf.get('ask_strategy') or conf.get('bid_strategy'):
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError(
|
||||
"Please migrate your pricing settings to use the new wording.")
|
||||
if conf.get("ask_strategy") or conf.get("bid_strategy"):
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError("Please migrate your pricing settings to use the new wording.")
|
||||
else:
|
||||
|
||||
logger.warning(
|
||||
"DEPRECATED: Using 'ask_strategy' and 'bid_strategy' is deprecated."
|
||||
"Please migrate your settings to use 'entry_pricing' and 'exit_pricing'."
|
||||
)
|
||||
conf['entry_pricing'] = {}
|
||||
for obj in list(conf.get('bid_strategy', {}).keys()):
|
||||
if obj == 'ask_last_balance':
|
||||
process_deprecated_setting(conf, 'bid_strategy', obj,
|
||||
'entry_pricing', 'price_last_balance')
|
||||
conf["entry_pricing"] = {}
|
||||
for obj in list(conf.get("bid_strategy", {}).keys()):
|
||||
if obj == "ask_last_balance":
|
||||
process_deprecated_setting(
|
||||
conf, "bid_strategy", obj, "entry_pricing", "price_last_balance"
|
||||
)
|
||||
else:
|
||||
process_deprecated_setting(conf, 'bid_strategy', obj, 'entry_pricing', obj)
|
||||
del conf['bid_strategy']
|
||||
process_deprecated_setting(conf, "bid_strategy", obj, "entry_pricing", obj)
|
||||
del conf["bid_strategy"]
|
||||
|
||||
conf['exit_pricing'] = {}
|
||||
for obj in list(conf.get('ask_strategy', {}).keys()):
|
||||
if obj == 'bid_last_balance':
|
||||
process_deprecated_setting(conf, 'ask_strategy', obj,
|
||||
'exit_pricing', 'price_last_balance')
|
||||
conf["exit_pricing"] = {}
|
||||
for obj in list(conf.get("ask_strategy", {}).keys()):
|
||||
if obj == "bid_last_balance":
|
||||
process_deprecated_setting(
|
||||
conf, "ask_strategy", obj, "exit_pricing", "price_last_balance"
|
||||
)
|
||||
else:
|
||||
process_deprecated_setting(conf, 'ask_strategy', obj, 'exit_pricing', obj)
|
||||
del conf['ask_strategy']
|
||||
process_deprecated_setting(conf, "ask_strategy", obj, "exit_pricing", obj)
|
||||
del conf["ask_strategy"]
|
||||
|
||||
|
||||
def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
|
||||
freqai_enabled = conf.get('freqai', {}).get('enabled', False)
|
||||
analyze_per_epoch = conf.get('analyze_per_epoch', False)
|
||||
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
|
||||
analyze_per_epoch = conf.get("analyze_per_epoch", False)
|
||||
if analyze_per_epoch and freqai_enabled:
|
||||
raise ConfigurationError(
|
||||
'Using analyze-per-epoch parameter is not supported with a FreqAI strategy.')
|
||||
"Using analyze-per-epoch parameter is not supported with a FreqAI strategy."
|
||||
)
|
||||
|
||||
|
||||
def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool) -> None:
|
||||
freqai_enabled = conf.get('freqai', {}).get('enabled', False)
|
||||
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
|
||||
if freqai_enabled:
|
||||
main_tf = conf.get('timeframe', '5m')
|
||||
freqai_include_timeframes = conf.get('freqai', {}).get('feature_parameters', {}
|
||||
).get('include_timeframes', [])
|
||||
main_tf = conf.get("timeframe", "5m")
|
||||
freqai_include_timeframes = (
|
||||
conf.get("freqai", {}).get("feature_parameters", {}).get("include_timeframes", [])
|
||||
)
|
||||
|
||||
from freqtrade.exchange import timeframe_to_seconds
|
||||
|
||||
main_tf_s = timeframe_to_seconds(main_tf)
|
||||
offending_lines = []
|
||||
for tf in freqai_include_timeframes:
|
||||
@@ -352,57 +366,65 @@ def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool)
|
||||
if offending_lines:
|
||||
raise ConfigurationError(
|
||||
f"Main timeframe of {main_tf} must be smaller or equal to FreqAI "
|
||||
f"`include_timeframes`.Offending include-timeframes: {', '.join(offending_lines)}")
|
||||
f"`include_timeframes`.Offending include-timeframes: {', '.join(offending_lines)}"
|
||||
)
|
||||
|
||||
# Ensure that the base timeframe is included in the include_timeframes list
|
||||
if not preliminary and main_tf not in freqai_include_timeframes:
|
||||
feature_parameters = conf.get('freqai', {}).get('feature_parameters', {})
|
||||
feature_parameters = conf.get("freqai", {}).get("feature_parameters", {})
|
||||
include_timeframes = [main_tf] + freqai_include_timeframes
|
||||
conf.get('freqai', {}).get('feature_parameters', {}) \
|
||||
.update({**feature_parameters, 'include_timeframes': include_timeframes})
|
||||
conf.get("freqai", {}).get("feature_parameters", {}).update(
|
||||
{**feature_parameters, "include_timeframes": include_timeframes}
|
||||
)
|
||||
|
||||
|
||||
def _validate_freqai_backtest(conf: Dict[str, Any]) -> None:
|
||||
if conf.get('runmode', RunMode.OTHER) == RunMode.BACKTEST:
|
||||
freqai_enabled = conf.get('freqai', {}).get('enabled', False)
|
||||
timerange = conf.get('timerange')
|
||||
freqai_backtest_live_models = conf.get('freqai_backtest_live_models', False)
|
||||
if conf.get("runmode", RunMode.OTHER) == RunMode.BACKTEST:
|
||||
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
|
||||
timerange = conf.get("timerange")
|
||||
freqai_backtest_live_models = conf.get("freqai_backtest_live_models", False)
|
||||
if freqai_backtest_live_models and freqai_enabled and timerange:
|
||||
raise ConfigurationError(
|
||||
'Using timerange parameter is not supported with '
|
||||
'--freqai-backtest-live-models parameter.')
|
||||
"Using timerange parameter is not supported with "
|
||||
"--freqai-backtest-live-models parameter."
|
||||
)
|
||||
|
||||
if freqai_backtest_live_models and not freqai_enabled:
|
||||
raise ConfigurationError(
|
||||
'Using --freqai-backtest-live-models parameter is only '
|
||||
'supported with a FreqAI strategy.')
|
||||
"Using --freqai-backtest-live-models parameter is only "
|
||||
"supported with a FreqAI strategy."
|
||||
)
|
||||
|
||||
if freqai_enabled and not freqai_backtest_live_models and not timerange:
|
||||
raise ConfigurationError(
|
||||
'Please pass --timerange if you intend to use FreqAI for backtesting.')
|
||||
"Please pass --timerange if you intend to use FreqAI for backtesting."
|
||||
)
|
||||
|
||||
|
||||
def _validate_consumers(conf: Dict[str, Any]) -> None:
|
||||
emc_conf = conf.get('external_message_consumer', {})
|
||||
if emc_conf.get('enabled', False):
|
||||
if len(emc_conf.get('producers', [])) < 1:
|
||||
emc_conf = conf.get("external_message_consumer", {})
|
||||
if emc_conf.get("enabled", False):
|
||||
if len(emc_conf.get("producers", [])) < 1:
|
||||
raise ConfigurationError("You must specify at least 1 Producer to connect to.")
|
||||
|
||||
producer_names = [p['name'] for p in emc_conf.get('producers', [])]
|
||||
producer_names = [p["name"] for p in emc_conf.get("producers", [])]
|
||||
duplicates = [item for item, count in Counter(producer_names).items() if count > 1]
|
||||
if duplicates:
|
||||
raise ConfigurationError(
|
||||
f"Producer names must be unique. Duplicate: {', '.join(duplicates)}")
|
||||
if conf.get('process_only_new_candles', True):
|
||||
f"Producer names must be unique. Duplicate: {', '.join(duplicates)}"
|
||||
)
|
||||
if conf.get("process_only_new_candles", True):
|
||||
# Warning here or require it?
|
||||
logger.warning("To receive best performance with external data, "
|
||||
"please set `process_only_new_candles` to False")
|
||||
logger.warning(
|
||||
"To receive best performance with external data, "
|
||||
"please set `process_only_new_candles` to False"
|
||||
)
|
||||
|
||||
|
||||
def _strategy_settings(conf: Dict[str, Any]) -> None:
|
||||
|
||||
process_deprecated_setting(conf, None, 'use_sell_signal', None, 'use_exit_signal')
|
||||
process_deprecated_setting(conf, None, 'sell_profit_only', None, 'exit_profit_only')
|
||||
process_deprecated_setting(conf, None, 'sell_profit_offset', None, 'exit_profit_offset')
|
||||
process_deprecated_setting(conf, None, 'ignore_roi_if_buy_signal',
|
||||
None, 'ignore_roi_if_entry_signal')
|
||||
process_deprecated_setting(conf, None, "use_sell_signal", None, "use_exit_signal")
|
||||
process_deprecated_setting(conf, None, "sell_profit_only", None, "exit_profit_only")
|
||||
process_deprecated_setting(conf, None, "sell_profit_offset", None, "exit_profit_offset")
|
||||
process_deprecated_setting(
|
||||
conf, None, "ignore_roi_if_buy_signal", None, "ignore_roi_if_entry_signal"
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
This module contains the configuration class
|
||||
"""
|
||||
|
||||
import logging
|
||||
import warnings
|
||||
from copy import deepcopy
|
||||
@@ -56,7 +57,7 @@ class Configuration:
|
||||
:return: configuration dictionary
|
||||
"""
|
||||
# Keep this method as staticmethod, so it can be used from interactive environments
|
||||
c = Configuration({'config': files}, RunMode.OTHER)
|
||||
c = Configuration({"config": files}, RunMode.OTHER)
|
||||
return c.get_config()
|
||||
|
||||
def load_config(self) -> Dict[str, Any]:
|
||||
@@ -69,19 +70,20 @@ class Configuration:
|
||||
|
||||
# Load environment variables
|
||||
from freqtrade.commands.arguments import NO_CONF_ALLOWED
|
||||
if self.args.get('command') not in NO_CONF_ALLOWED:
|
||||
|
||||
if self.args.get("command") not in NO_CONF_ALLOWED:
|
||||
env_data = enironment_vars_to_dict()
|
||||
config = deep_merge_dicts(env_data, config)
|
||||
|
||||
# Normalize config
|
||||
if 'internals' not in config:
|
||||
config['internals'] = {}
|
||||
if "internals" not in config:
|
||||
config["internals"] = {}
|
||||
|
||||
if 'pairlists' not in config:
|
||||
config['pairlists'] = []
|
||||
if "pairlists" not in config:
|
||||
config["pairlists"] = []
|
||||
|
||||
# Keep a copy of the original configuration file
|
||||
config['original_config'] = deepcopy(config)
|
||||
config["original_config"] = deepcopy(config)
|
||||
|
||||
self._process_logging_options(config)
|
||||
|
||||
@@ -105,7 +107,7 @@ class Configuration:
|
||||
from freqtrade.exchange.check_exchange import check_exchange
|
||||
|
||||
# Check if the exchange set by the user is supported
|
||||
check_exchange(config, config.get('experimental', {}).get('block_bad_exchanges', True))
|
||||
check_exchange(config, config.get("experimental", {}).get("block_bad_exchanges", True))
|
||||
|
||||
self._resolve_pairs_list(config)
|
||||
|
||||
@@ -119,52 +121,56 @@ class Configuration:
|
||||
the -v/--verbose, --logfile options
|
||||
"""
|
||||
# Log level
|
||||
config.update({'verbosity': self.args.get('verbosity', 0)})
|
||||
config.update({"verbosity": self.args.get("verbosity", 0)})
|
||||
|
||||
if 'logfile' in self.args and self.args['logfile']:
|
||||
config.update({'logfile': self.args['logfile']})
|
||||
if "logfile" in self.args and self.args["logfile"]:
|
||||
config.update({"logfile": self.args["logfile"]})
|
||||
|
||||
setup_logging(config)
|
||||
|
||||
def _process_trading_options(self, config: Config) -> None:
|
||||
if config['runmode'] not in TRADE_MODES:
|
||||
if config["runmode"] not in TRADE_MODES:
|
||||
return
|
||||
|
||||
if config.get('dry_run', False):
|
||||
logger.info('Dry run is enabled')
|
||||
if config.get('db_url') in [None, constants.DEFAULT_DB_PROD_URL]:
|
||||
if config.get("dry_run", False):
|
||||
logger.info("Dry run is enabled")
|
||||
if config.get("db_url") in [None, constants.DEFAULT_DB_PROD_URL]:
|
||||
# Default to in-memory db for dry_run if not specified
|
||||
config['db_url'] = constants.DEFAULT_DB_DRYRUN_URL
|
||||
config["db_url"] = constants.DEFAULT_DB_DRYRUN_URL
|
||||
else:
|
||||
if not config.get('db_url'):
|
||||
config['db_url'] = constants.DEFAULT_DB_PROD_URL
|
||||
logger.info('Dry run is disabled')
|
||||
if not config.get("db_url"):
|
||||
config["db_url"] = constants.DEFAULT_DB_PROD_URL
|
||||
logger.info("Dry run is disabled")
|
||||
|
||||
logger.info(f'Using DB: "{parse_db_uri_for_logging(config["db_url"])}"')
|
||||
|
||||
def _process_common_options(self, config: Config) -> None:
|
||||
|
||||
# Set strategy if not specified in config and or if it's non default
|
||||
if self.args.get('strategy') or not config.get('strategy'):
|
||||
config.update({'strategy': self.args.get('strategy')})
|
||||
if self.args.get("strategy") or not config.get("strategy"):
|
||||
config.update({"strategy": self.args.get("strategy")})
|
||||
|
||||
self._args_to_config(config, argname='strategy_path',
|
||||
logstring='Using additional Strategy lookup path: {}')
|
||||
self._args_to_config(
|
||||
config, argname="strategy_path", logstring="Using additional Strategy lookup path: {}"
|
||||
)
|
||||
|
||||
if ('db_url' in self.args and self.args['db_url'] and
|
||||
self.args['db_url'] != constants.DEFAULT_DB_PROD_URL):
|
||||
config.update({'db_url': self.args['db_url']})
|
||||
logger.info('Parameter --db-url detected ...')
|
||||
if (
|
||||
"db_url" in self.args
|
||||
and self.args["db_url"]
|
||||
and self.args["db_url"] != constants.DEFAULT_DB_PROD_URL
|
||||
):
|
||||
config.update({"db_url": self.args["db_url"]})
|
||||
logger.info("Parameter --db-url detected ...")
|
||||
|
||||
self._args_to_config(config, argname='db_url_from',
|
||||
logstring='Parameter --db-url-from detected ...')
|
||||
self._args_to_config(
|
||||
config, argname="db_url_from", logstring="Parameter --db-url-from detected ..."
|
||||
)
|
||||
|
||||
if config.get('force_entry_enable', False):
|
||||
logger.warning('`force_entry_enable` RPC message enabled.')
|
||||
if config.get("force_entry_enable", False):
|
||||
logger.warning("`force_entry_enable` RPC message enabled.")
|
||||
|
||||
# Support for sd_notify
|
||||
if 'sd_notify' in self.args and self.args['sd_notify']:
|
||||
config['internals'].update({'sd_notify': True})
|
||||
if "sd_notify" in self.args and self.args["sd_notify"]:
|
||||
config["internals"].update({"sd_notify": True})
|
||||
|
||||
def _process_datadir_options(self, config: Config) -> None:
|
||||
"""
|
||||
@@ -172,245 +178,274 @@ class Configuration:
|
||||
--user-data, --datadir
|
||||
"""
|
||||
# Check exchange parameter here - otherwise `datadir` might be wrong.
|
||||
if 'exchange' in self.args and self.args['exchange']:
|
||||
config['exchange']['name'] = self.args['exchange']
|
||||
if "exchange" in self.args and self.args["exchange"]:
|
||||
config["exchange"]["name"] = self.args["exchange"]
|
||||
logger.info(f"Using exchange {config['exchange']['name']}")
|
||||
|
||||
if 'pair_whitelist' not in config['exchange']:
|
||||
config['exchange']['pair_whitelist'] = []
|
||||
if "pair_whitelist" not in config["exchange"]:
|
||||
config["exchange"]["pair_whitelist"] = []
|
||||
|
||||
if 'user_data_dir' in self.args and self.args['user_data_dir']:
|
||||
config.update({'user_data_dir': self.args['user_data_dir']})
|
||||
elif 'user_data_dir' not in config:
|
||||
if "user_data_dir" in self.args and self.args["user_data_dir"]:
|
||||
config.update({"user_data_dir": self.args["user_data_dir"]})
|
||||
elif "user_data_dir" not in config:
|
||||
# Default to cwd/user_data (legacy option ...)
|
||||
config.update({'user_data_dir': str(Path.cwd() / 'user_data')})
|
||||
config.update({"user_data_dir": str(Path.cwd() / "user_data")})
|
||||
|
||||
# reset to user_data_dir so this contains the absolute path.
|
||||
config['user_data_dir'] = create_userdata_dir(config['user_data_dir'], create_dir=False)
|
||||
logger.info('Using user-data directory: %s ...', config['user_data_dir'])
|
||||
config["user_data_dir"] = create_userdata_dir(config["user_data_dir"], create_dir=False)
|
||||
logger.info("Using user-data directory: %s ...", config["user_data_dir"])
|
||||
|
||||
config.update({'datadir': create_datadir(config, self.args.get('datadir'))})
|
||||
logger.info('Using data directory: %s ...', config.get('datadir'))
|
||||
config.update({"datadir": create_datadir(config, self.args.get("datadir"))})
|
||||
logger.info("Using data directory: %s ...", config.get("datadir"))
|
||||
|
||||
if self.args.get('exportfilename'):
|
||||
self._args_to_config(config, argname='exportfilename',
|
||||
logstring='Storing backtest results to {} ...')
|
||||
config['exportfilename'] = Path(config['exportfilename'])
|
||||
if self.args.get("exportfilename"):
|
||||
self._args_to_config(
|
||||
config, argname="exportfilename", logstring="Storing backtest results to {} ..."
|
||||
)
|
||||
config["exportfilename"] = Path(config["exportfilename"])
|
||||
else:
|
||||
config['exportfilename'] = (config['user_data_dir']
|
||||
/ 'backtest_results')
|
||||
config["exportfilename"] = config["user_data_dir"] / "backtest_results"
|
||||
|
||||
if self.args.get('show_sensitive'):
|
||||
if self.args.get("show_sensitive"):
|
||||
logger.warning(
|
||||
"Sensitive information will be shown in the upcoming output. "
|
||||
"Please make sure to never share this output without redacting "
|
||||
"the information yourself.")
|
||||
"the information yourself."
|
||||
)
|
||||
|
||||
def _process_optimize_options(self, config: Config) -> None:
|
||||
|
||||
# This will override the strategy configuration
|
||||
self._args_to_config(config, argname='timeframe',
|
||||
logstring='Parameter -i/--timeframe detected ... '
|
||||
'Using timeframe: {} ...')
|
||||
|
||||
self._args_to_config(config, argname='position_stacking',
|
||||
logstring='Parameter --enable-position-stacking detected ...')
|
||||
self._args_to_config(
|
||||
config,
|
||||
argname="timeframe",
|
||||
logstring="Parameter -i/--timeframe detected ... Using timeframe: {} ...",
|
||||
)
|
||||
|
||||
self._args_to_config(
|
||||
config, argname='enable_protections',
|
||||
logstring='Parameter --enable-protections detected, enabling Protections. ...')
|
||||
config,
|
||||
argname="position_stacking",
|
||||
logstring="Parameter --enable-position-stacking detected ...",
|
||||
)
|
||||
|
||||
if 'use_max_market_positions' in self.args and not self.args["use_max_market_positions"]:
|
||||
config.update({'use_max_market_positions': False})
|
||||
logger.info('Parameter --disable-max-market-positions detected ...')
|
||||
logger.info('max_open_trades set to unlimited ...')
|
||||
elif 'max_open_trades' in self.args and self.args['max_open_trades']:
|
||||
config.update({'max_open_trades': self.args['max_open_trades']})
|
||||
logger.info('Parameter --max-open-trades detected, '
|
||||
'overriding max_open_trades to: %s ...', config.get('max_open_trades'))
|
||||
elif config['runmode'] in NON_UTIL_MODES:
|
||||
logger.info('Using max_open_trades: %s ...', config.get('max_open_trades'))
|
||||
self._args_to_config(
|
||||
config,
|
||||
argname="enable_protections",
|
||||
logstring="Parameter --enable-protections detected, enabling Protections. ...",
|
||||
)
|
||||
|
||||
if "use_max_market_positions" in self.args and not self.args["use_max_market_positions"]:
|
||||
config.update({"use_max_market_positions": False})
|
||||
logger.info("Parameter --disable-max-market-positions detected ...")
|
||||
logger.info("max_open_trades set to unlimited ...")
|
||||
elif "max_open_trades" in self.args and self.args["max_open_trades"]:
|
||||
config.update({"max_open_trades": self.args["max_open_trades"]})
|
||||
logger.info(
|
||||
"Parameter --max-open-trades detected, overriding max_open_trades to: %s ...",
|
||||
config.get("max_open_trades"),
|
||||
)
|
||||
elif config["runmode"] in NON_UTIL_MODES:
|
||||
logger.info("Using max_open_trades: %s ...", config.get("max_open_trades"))
|
||||
# Setting max_open_trades to infinite if -1
|
||||
if config.get('max_open_trades') == -1:
|
||||
config['max_open_trades'] = float('inf')
|
||||
if config.get("max_open_trades") == -1:
|
||||
config["max_open_trades"] = float("inf")
|
||||
|
||||
if self.args.get('stake_amount'):
|
||||
if self.args.get("stake_amount"):
|
||||
# Convert explicitly to float to support CLI argument for both unlimited and value
|
||||
try:
|
||||
self.args['stake_amount'] = float(self.args['stake_amount'])
|
||||
self.args["stake_amount"] = float(self.args["stake_amount"])
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
configurations = [
|
||||
('timeframe_detail',
|
||||
'Parameter --timeframe-detail detected, using {} for intra-candle backtesting ...'),
|
||||
('backtest_show_pair_list', 'Parameter --show-pair-list detected.'),
|
||||
('stake_amount',
|
||||
'Parameter --stake-amount detected, overriding stake_amount to: {} ...'),
|
||||
('dry_run_wallet',
|
||||
'Parameter --dry-run-wallet detected, overriding dry_run_wallet to: {} ...'),
|
||||
('fee', 'Parameter --fee detected, setting fee to: {} ...'),
|
||||
('timerange', 'Parameter --timerange detected: {} ...'),
|
||||
]
|
||||
(
|
||||
"timeframe_detail",
|
||||
"Parameter --timeframe-detail detected, using {} for intra-candle backtesting ...",
|
||||
),
|
||||
("backtest_show_pair_list", "Parameter --show-pair-list detected."),
|
||||
(
|
||||
"stake_amount",
|
||||
"Parameter --stake-amount detected, overriding stake_amount to: {} ...",
|
||||
),
|
||||
(
|
||||
"dry_run_wallet",
|
||||
"Parameter --dry-run-wallet detected, overriding dry_run_wallet to: {} ...",
|
||||
),
|
||||
("fee", "Parameter --fee detected, setting fee to: {} ..."),
|
||||
("timerange", "Parameter --timerange detected: {} ..."),
|
||||
]
|
||||
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
self._process_datadir_options(config)
|
||||
|
||||
self._args_to_config(config, argname='strategy_list',
|
||||
logstring='Using strategy list of {} strategies', logfun=len)
|
||||
self._args_to_config(
|
||||
config,
|
||||
argname="strategy_list",
|
||||
logstring="Using strategy list of {} strategies",
|
||||
logfun=len,
|
||||
)
|
||||
|
||||
configurations = [
|
||||
('recursive_strategy_search',
|
||||
'Recursively searching for a strategy in the strategies folder.'),
|
||||
('timeframe', 'Overriding timeframe with Command line argument'),
|
||||
('export', 'Parameter --export detected: {} ...'),
|
||||
('backtest_breakdown', 'Parameter --breakdown detected ...'),
|
||||
('backtest_cache', 'Parameter --cache={} detected ...'),
|
||||
('disableparamexport', 'Parameter --disableparamexport detected: {} ...'),
|
||||
('freqai_backtest_live_models',
|
||||
'Parameter --freqai-backtest-live-models detected ...'),
|
||||
(
|
||||
"recursive_strategy_search",
|
||||
"Recursively searching for a strategy in the strategies folder.",
|
||||
),
|
||||
("timeframe", "Overriding timeframe with Command line argument"),
|
||||
("export", "Parameter --export detected: {} ..."),
|
||||
("backtest_breakdown", "Parameter --breakdown detected ..."),
|
||||
("backtest_cache", "Parameter --cache={} detected ..."),
|
||||
("disableparamexport", "Parameter --disableparamexport detected: {} ..."),
|
||||
("freqai_backtest_live_models", "Parameter --freqai-backtest-live-models detected ..."),
|
||||
]
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
# Edge section:
|
||||
if 'stoploss_range' in self.args and self.args["stoploss_range"]:
|
||||
if "stoploss_range" in self.args and self.args["stoploss_range"]:
|
||||
txt_range = eval(self.args["stoploss_range"])
|
||||
config['edge'].update({'stoploss_range_min': txt_range[0]})
|
||||
config['edge'].update({'stoploss_range_max': txt_range[1]})
|
||||
config['edge'].update({'stoploss_range_step': txt_range[2]})
|
||||
logger.info('Parameter --stoplosses detected: %s ...', self.args["stoploss_range"])
|
||||
config["edge"].update({"stoploss_range_min": txt_range[0]})
|
||||
config["edge"].update({"stoploss_range_max": txt_range[1]})
|
||||
config["edge"].update({"stoploss_range_step": txt_range[2]})
|
||||
logger.info("Parameter --stoplosses detected: %s ...", self.args["stoploss_range"])
|
||||
|
||||
# Hyperopt section
|
||||
|
||||
configurations = [
|
||||
('hyperopt', 'Using Hyperopt class name: {}'),
|
||||
('hyperopt_path', 'Using additional Hyperopt lookup path: {}'),
|
||||
('hyperoptexportfilename', 'Using hyperopt file: {}'),
|
||||
('lookahead_analysis_exportfilename', 'Saving lookahead analysis results into {} ...'),
|
||||
('epochs', 'Parameter --epochs detected ... Will run Hyperopt with for {} epochs ...'),
|
||||
('spaces', 'Parameter -s/--spaces detected: {}'),
|
||||
('analyze_per_epoch', 'Parameter --analyze-per-epoch detected.'),
|
||||
('print_all', 'Parameter --print-all detected ...'),
|
||||
("hyperopt", "Using Hyperopt class name: {}"),
|
||||
("hyperopt_path", "Using additional Hyperopt lookup path: {}"),
|
||||
("hyperoptexportfilename", "Using hyperopt file: {}"),
|
||||
("lookahead_analysis_exportfilename", "Saving lookahead analysis results into {} ..."),
|
||||
("epochs", "Parameter --epochs detected ... Will run Hyperopt with for {} epochs ..."),
|
||||
("spaces", "Parameter -s/--spaces detected: {}"),
|
||||
("analyze_per_epoch", "Parameter --analyze-per-epoch detected."),
|
||||
("print_all", "Parameter --print-all detected ..."),
|
||||
]
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
if 'print_colorized' in self.args and not self.args["print_colorized"]:
|
||||
logger.info('Parameter --no-color detected ...')
|
||||
config.update({'print_colorized': False})
|
||||
if "print_colorized" in self.args and not self.args["print_colorized"]:
|
||||
logger.info("Parameter --no-color detected ...")
|
||||
config.update({"print_colorized": False})
|
||||
else:
|
||||
config.update({'print_colorized': True})
|
||||
config.update({"print_colorized": True})
|
||||
|
||||
configurations = [
|
||||
('print_json', 'Parameter --print-json detected ...'),
|
||||
('export_csv', 'Parameter --export-csv detected: {}'),
|
||||
('hyperopt_jobs', 'Parameter -j/--job-workers detected: {}'),
|
||||
('hyperopt_random_state', 'Parameter --random-state detected: {}'),
|
||||
('hyperopt_min_trades', 'Parameter --min-trades detected: {}'),
|
||||
('hyperopt_loss', 'Using Hyperopt loss class name: {}'),
|
||||
('hyperopt_show_index', 'Parameter -n/--index detected: {}'),
|
||||
('hyperopt_list_best', 'Parameter --best detected: {}'),
|
||||
('hyperopt_list_profitable', 'Parameter --profitable detected: {}'),
|
||||
('hyperopt_list_min_trades', 'Parameter --min-trades detected: {}'),
|
||||
('hyperopt_list_max_trades', 'Parameter --max-trades detected: {}'),
|
||||
('hyperopt_list_min_avg_time', 'Parameter --min-avg-time detected: {}'),
|
||||
('hyperopt_list_max_avg_time', 'Parameter --max-avg-time detected: {}'),
|
||||
('hyperopt_list_min_avg_profit', 'Parameter --min-avg-profit detected: {}'),
|
||||
('hyperopt_list_max_avg_profit', 'Parameter --max-avg-profit detected: {}'),
|
||||
('hyperopt_list_min_total_profit', 'Parameter --min-total-profit detected: {}'),
|
||||
('hyperopt_list_max_total_profit', 'Parameter --max-total-profit detected: {}'),
|
||||
('hyperopt_list_min_objective', 'Parameter --min-objective detected: {}'),
|
||||
('hyperopt_list_max_objective', 'Parameter --max-objective detected: {}'),
|
||||
('hyperopt_list_no_details', 'Parameter --no-details detected: {}'),
|
||||
('hyperopt_show_no_header', 'Parameter --no-header detected: {}'),
|
||||
('hyperopt_ignore_missing_space', 'Paramter --ignore-missing-space detected: {}'),
|
||||
("print_json", "Parameter --print-json detected ..."),
|
||||
("export_csv", "Parameter --export-csv detected: {}"),
|
||||
("hyperopt_jobs", "Parameter -j/--job-workers detected: {}"),
|
||||
("hyperopt_random_state", "Parameter --random-state detected: {}"),
|
||||
("hyperopt_min_trades", "Parameter --min-trades detected: {}"),
|
||||
("hyperopt_loss", "Using Hyperopt loss class name: {}"),
|
||||
("hyperopt_show_index", "Parameter -n/--index detected: {}"),
|
||||
("hyperopt_list_best", "Parameter --best detected: {}"),
|
||||
("hyperopt_list_profitable", "Parameter --profitable detected: {}"),
|
||||
("hyperopt_list_min_trades", "Parameter --min-trades detected: {}"),
|
||||
("hyperopt_list_max_trades", "Parameter --max-trades detected: {}"),
|
||||
("hyperopt_list_min_avg_time", "Parameter --min-avg-time detected: {}"),
|
||||
("hyperopt_list_max_avg_time", "Parameter --max-avg-time detected: {}"),
|
||||
("hyperopt_list_min_avg_profit", "Parameter --min-avg-profit detected: {}"),
|
||||
("hyperopt_list_max_avg_profit", "Parameter --max-avg-profit detected: {}"),
|
||||
("hyperopt_list_min_total_profit", "Parameter --min-total-profit detected: {}"),
|
||||
("hyperopt_list_max_total_profit", "Parameter --max-total-profit detected: {}"),
|
||||
("hyperopt_list_min_objective", "Parameter --min-objective detected: {}"),
|
||||
("hyperopt_list_max_objective", "Parameter --max-objective detected: {}"),
|
||||
("hyperopt_list_no_details", "Parameter --no-details detected: {}"),
|
||||
("hyperopt_show_no_header", "Parameter --no-header detected: {}"),
|
||||
("hyperopt_ignore_missing_space", "Parameter --ignore-missing-space detected: {}"),
|
||||
]
|
||||
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
def _process_plot_options(self, config: Config) -> None:
|
||||
|
||||
configurations = [
|
||||
('pairs', 'Using pairs {}'),
|
||||
('indicators1', 'Using indicators1: {}'),
|
||||
('indicators2', 'Using indicators2: {}'),
|
||||
('trade_ids', 'Filtering on trade_ids: {}'),
|
||||
('plot_limit', 'Limiting plot to: {}'),
|
||||
('plot_auto_open', 'Parameter --auto-open detected.'),
|
||||
('trade_source', 'Using trades from: {}'),
|
||||
('prepend_data', 'Prepend detected. Allowing data prepending.'),
|
||||
('erase', 'Erase detected. Deleting existing data.'),
|
||||
('no_trades', 'Parameter --no-trades detected.'),
|
||||
('timeframes', 'timeframes --timeframes: {}'),
|
||||
('days', 'Detected --days: {}'),
|
||||
('include_inactive', 'Detected --include-inactive-pairs: {}'),
|
||||
('download_trades', 'Detected --dl-trades: {}'),
|
||||
('dataformat_ohlcv', 'Using "{}" to store OHLCV data.'),
|
||||
('dataformat_trades', 'Using "{}" to store trades data.'),
|
||||
('show_timerange', 'Detected --show-timerange'),
|
||||
("pairs", "Using pairs {}"),
|
||||
("indicators1", "Using indicators1: {}"),
|
||||
("indicators2", "Using indicators2: {}"),
|
||||
("trade_ids", "Filtering on trade_ids: {}"),
|
||||
("plot_limit", "Limiting plot to: {}"),
|
||||
("plot_auto_open", "Parameter --auto-open detected."),
|
||||
("trade_source", "Using trades from: {}"),
|
||||
("prepend_data", "Prepend detected. Allowing data prepending."),
|
||||
("erase", "Erase detected. Deleting existing data."),
|
||||
("no_trades", "Parameter --no-trades detected."),
|
||||
("timeframes", "timeframes --timeframes: {}"),
|
||||
("days", "Detected --days: {}"),
|
||||
("include_inactive", "Detected --include-inactive-pairs: {}"),
|
||||
("download_trades", "Detected --dl-trades: {}"),
|
||||
("dataformat_ohlcv", 'Using "{}" to store OHLCV data.'),
|
||||
("dataformat_trades", 'Using "{}" to store trades data.'),
|
||||
("show_timerange", "Detected --show-timerange"),
|
||||
]
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
def _process_data_options(self, config: Config) -> None:
|
||||
self._args_to_config(config, argname='new_pairs_days',
|
||||
logstring='Detected --new-pairs-days: {}')
|
||||
self._args_to_config(config, argname='trading_mode',
|
||||
logstring='Detected --trading-mode: {}')
|
||||
config['candle_type_def'] = CandleType.get_default(
|
||||
config.get('trading_mode', 'spot') or 'spot')
|
||||
config['trading_mode'] = TradingMode(config.get('trading_mode', 'spot') or 'spot')
|
||||
self._args_to_config(config, argname='candle_types',
|
||||
logstring='Detected --candle-types: {}')
|
||||
self._args_to_config(
|
||||
config, argname="new_pairs_days", logstring="Detected --new-pairs-days: {}"
|
||||
)
|
||||
self._args_to_config(
|
||||
config, argname="trading_mode", logstring="Detected --trading-mode: {}"
|
||||
)
|
||||
config["candle_type_def"] = CandleType.get_default(
|
||||
config.get("trading_mode", "spot") or "spot"
|
||||
)
|
||||
config["trading_mode"] = TradingMode(config.get("trading_mode", "spot") or "spot")
|
||||
self._args_to_config(
|
||||
config, argname="candle_types", logstring="Detected --candle-types: {}"
|
||||
)
|
||||
|
||||
def _process_analyze_options(self, config: Config) -> None:
|
||||
configurations = [
|
||||
('analysis_groups', 'Analysis reason groups: {}'),
|
||||
('enter_reason_list', 'Analysis enter tag list: {}'),
|
||||
('exit_reason_list', 'Analysis exit tag list: {}'),
|
||||
('indicator_list', 'Analysis indicator list: {}'),
|
||||
('timerange', 'Filter trades by timerange: {}'),
|
||||
('analysis_rejected', 'Analyse rejected signals: {}'),
|
||||
('analysis_to_csv', 'Store analysis tables to CSV: {}'),
|
||||
('analysis_csv_path', 'Path to store analysis CSVs: {}'),
|
||||
("analysis_groups", "Analysis reason groups: {}"),
|
||||
("enter_reason_list", "Analysis enter tag list: {}"),
|
||||
("exit_reason_list", "Analysis exit tag list: {}"),
|
||||
("indicator_list", "Analysis indicator list: {}"),
|
||||
("timerange", "Filter trades by timerange: {}"),
|
||||
("analysis_rejected", "Analyse rejected signals: {}"),
|
||||
("analysis_to_csv", "Store analysis tables to CSV: {}"),
|
||||
("analysis_csv_path", "Path to store analysis CSVs: {}"),
|
||||
# Lookahead analysis results
|
||||
('targeted_trade_amount', 'Targeted Trade amount: {}'),
|
||||
('minimum_trade_amount', 'Minimum Trade amount: {}'),
|
||||
('lookahead_analysis_exportfilename', 'Path to store lookahead-analysis-results: {}'),
|
||||
('startup_candle', 'Startup candle to be used on recursive analysis: {}'),
|
||||
("targeted_trade_amount", "Targeted Trade amount: {}"),
|
||||
("minimum_trade_amount", "Minimum Trade amount: {}"),
|
||||
("lookahead_analysis_exportfilename", "Path to store lookahead-analysis-results: {}"),
|
||||
("startup_candle", "Startup candle to be used on recursive analysis: {}"),
|
||||
]
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
def _args_to_config_loop(self, config, configurations: List[Tuple[str, str]]) -> None:
|
||||
|
||||
for argname, logstring in configurations:
|
||||
self._args_to_config(config, argname=argname, logstring=logstring)
|
||||
|
||||
def _process_runmode(self, config: Config) -> None:
|
||||
|
||||
self._args_to_config(config, argname='dry_run',
|
||||
logstring='Parameter --dry-run detected, '
|
||||
'overriding dry_run to: {} ...')
|
||||
self._args_to_config(
|
||||
config,
|
||||
argname="dry_run",
|
||||
logstring="Parameter --dry-run detected, overriding dry_run to: {} ...",
|
||||
)
|
||||
|
||||
if not self.runmode:
|
||||
# Handle real mode, infer dry/live from config
|
||||
self.runmode = RunMode.DRY_RUN if config.get('dry_run', True) else RunMode.LIVE
|
||||
self.runmode = RunMode.DRY_RUN if config.get("dry_run", True) else RunMode.LIVE
|
||||
logger.info(f"Runmode set to {self.runmode.value}.")
|
||||
|
||||
config.update({'runmode': self.runmode})
|
||||
config.update({"runmode": self.runmode})
|
||||
|
||||
def _process_freqai_options(self, config: Config) -> None:
|
||||
self._args_to_config(
|
||||
config, argname="freqaimodel", logstring="Using freqaimodel class name: {}"
|
||||
)
|
||||
|
||||
self._args_to_config(config, argname='freqaimodel',
|
||||
logstring='Using freqaimodel class name: {}')
|
||||
|
||||
self._args_to_config(config, argname='freqaimodel_path',
|
||||
logstring='Using freqaimodel path: {}')
|
||||
self._args_to_config(
|
||||
config, argname="freqaimodel_path", logstring="Using freqaimodel path: {}"
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
def _args_to_config(self, config: Config, argname: str,
|
||||
logstring: str, logfun: Optional[Callable] = None,
|
||||
deprecated_msg: Optional[str] = None) -> None:
|
||||
def _args_to_config(
|
||||
self,
|
||||
config: Config,
|
||||
argname: str,
|
||||
logstring: str,
|
||||
logfun: Optional[Callable] = None,
|
||||
deprecated_msg: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
:param config: Configuration dictionary
|
||||
:param argname: Argumentname in self.args - will be copied to config dict.
|
||||
@@ -420,9 +455,11 @@ class Configuration:
|
||||
sample: logfun=len (prints the length of the found
|
||||
configuration instead of the content)
|
||||
"""
|
||||
if (argname in self.args and self.args[argname] is not None
|
||||
and self.args[argname] is not False):
|
||||
|
||||
if (
|
||||
argname in self.args
|
||||
and self.args[argname] is not None
|
||||
and self.args[argname] is not False
|
||||
):
|
||||
config.update({argname: self.args[argname]})
|
||||
if logfun:
|
||||
logger.info(logstring.format(logfun(config[argname])))
|
||||
@@ -441,7 +478,7 @@ class Configuration:
|
||||
"""
|
||||
|
||||
if "pairs" in config:
|
||||
config['exchange']['pair_whitelist'] = config['pairs']
|
||||
config["exchange"]["pair_whitelist"] = config["pairs"]
|
||||
return
|
||||
|
||||
if "pairs_file" in self.args and self.args["pairs_file"]:
|
||||
@@ -451,19 +488,19 @@ class Configuration:
|
||||
# or if pairs file is specified explicitly
|
||||
if not pairs_file.exists():
|
||||
raise OperationalException(f'No pairs file found with path "{pairs_file}".')
|
||||
config['pairs'] = load_file(pairs_file)
|
||||
if isinstance(config['pairs'], list):
|
||||
config['pairs'].sort()
|
||||
config["pairs"] = load_file(pairs_file)
|
||||
if isinstance(config["pairs"], list):
|
||||
config["pairs"].sort()
|
||||
return
|
||||
|
||||
if 'config' in self.args and self.args['config']:
|
||||
if "config" in self.args and self.args["config"]:
|
||||
logger.info("Using pairlist from configuration.")
|
||||
config['pairs'] = config.get('exchange', {}).get('pair_whitelist')
|
||||
config["pairs"] = config.get("exchange", {}).get("pair_whitelist")
|
||||
else:
|
||||
# Fall back to /dl_path/pairs.json
|
||||
pairs_file = config['datadir'] / 'pairs.json'
|
||||
pairs_file = config["datadir"] / "pairs.json"
|
||||
if pairs_file.exists():
|
||||
logger.info(f'Reading pairs file "{pairs_file}".')
|
||||
config['pairs'] = load_file(pairs_file)
|
||||
if 'pairs' in config and isinstance(config['pairs'], list):
|
||||
config['pairs'].sort()
|
||||
config["pairs"] = load_file(pairs_file)
|
||||
if "pairs" in config and isinstance(config["pairs"], list):
|
||||
config["pairs"].sort()
|
||||
|
||||
@@ -12,9 +12,13 @@ from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_conflicting_settings(config: Config,
|
||||
section_old: Optional[str], name_old: str,
|
||||
section_new: Optional[str], name_new: str) -> None:
|
||||
def check_conflicting_settings(
|
||||
config: Config,
|
||||
section_old: Optional[str],
|
||||
name_old: str,
|
||||
section_new: Optional[str],
|
||||
name_new: str,
|
||||
) -> None:
|
||||
section_new_config = config.get(section_new, {}) if section_new else config
|
||||
section_old_config = config.get(section_old, {}) if section_old else config
|
||||
if name_new in section_new_config and name_old in section_old_config:
|
||||
@@ -29,9 +33,9 @@ def check_conflicting_settings(config: Config,
|
||||
)
|
||||
|
||||
|
||||
def process_removed_setting(config: Config,
|
||||
section1: str, name1: str,
|
||||
section2: Optional[str], name2: str) -> None:
|
||||
def process_removed_setting(
|
||||
config: Config, section1: str, name1: str, section2: Optional[str], name2: str
|
||||
) -> None:
|
||||
"""
|
||||
:param section1: Removed section
|
||||
:param name1: Removed setting name
|
||||
@@ -48,10 +52,13 @@ def process_removed_setting(config: Config,
|
||||
)
|
||||
|
||||
|
||||
def process_deprecated_setting(config: Config,
|
||||
section_old: Optional[str], name_old: str,
|
||||
section_new: Optional[str], name_new: str
|
||||
) -> None:
|
||||
def process_deprecated_setting(
|
||||
config: Config,
|
||||
section_old: Optional[str],
|
||||
name_old: str,
|
||||
section_new: Optional[str],
|
||||
name_new: str,
|
||||
) -> None:
|
||||
check_conflicting_settings(config, section_old, name_old, section_new, name_new)
|
||||
section_old_config = config.get(section_old, {}) if section_old else config
|
||||
|
||||
@@ -71,57 +78,91 @@ def process_deprecated_setting(config: Config,
|
||||
|
||||
|
||||
def process_temporary_deprecated_settings(config: Config) -> None:
|
||||
|
||||
# Kept for future deprecated / moved settings
|
||||
# check_conflicting_settings(config, 'ask_strategy', 'use_sell_signal',
|
||||
# 'experimental', 'use_sell_signal')
|
||||
|
||||
process_deprecated_setting(config, 'ask_strategy', 'ignore_buying_expired_candle_after',
|
||||
None, 'ignore_buying_expired_candle_after')
|
||||
process_deprecated_setting(
|
||||
config,
|
||||
"ask_strategy",
|
||||
"ignore_buying_expired_candle_after",
|
||||
None,
|
||||
"ignore_buying_expired_candle_after",
|
||||
)
|
||||
|
||||
process_deprecated_setting(config, None, 'forcebuy_enable', None, 'force_entry_enable')
|
||||
process_deprecated_setting(config, None, "forcebuy_enable", None, "force_entry_enable")
|
||||
|
||||
# New settings
|
||||
if config.get('telegram'):
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'sell',
|
||||
'notification_settings', 'exit')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'sell_fill',
|
||||
'notification_settings', 'exit_fill')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'sell_cancel',
|
||||
'notification_settings', 'exit_cancel')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'buy',
|
||||
'notification_settings', 'entry')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'buy_fill',
|
||||
'notification_settings', 'entry_fill')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'buy_cancel',
|
||||
'notification_settings', 'entry_cancel')
|
||||
if config.get('webhook'):
|
||||
process_deprecated_setting(config, 'webhook', 'webhookbuy', 'webhook', 'webhookentry')
|
||||
process_deprecated_setting(config, 'webhook', 'webhookbuycancel',
|
||||
'webhook', 'webhookentrycancel')
|
||||
process_deprecated_setting(config, 'webhook', 'webhookbuyfill',
|
||||
'webhook', 'webhookentryfill')
|
||||
process_deprecated_setting(config, 'webhook', 'webhooksell', 'webhook', 'webhookexit')
|
||||
process_deprecated_setting(config, 'webhook', 'webhooksellcancel',
|
||||
'webhook', 'webhookexitcancel')
|
||||
process_deprecated_setting(config, 'webhook', 'webhooksellfill',
|
||||
'webhook', 'webhookexitfill')
|
||||
if config.get("telegram"):
|
||||
process_deprecated_setting(
|
||||
config["telegram"], "notification_settings", "sell", "notification_settings", "exit"
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"],
|
||||
"notification_settings",
|
||||
"sell_fill",
|
||||
"notification_settings",
|
||||
"exit_fill",
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"],
|
||||
"notification_settings",
|
||||
"sell_cancel",
|
||||
"notification_settings",
|
||||
"exit_cancel",
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"], "notification_settings", "buy", "notification_settings", "entry"
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"],
|
||||
"notification_settings",
|
||||
"buy_fill",
|
||||
"notification_settings",
|
||||
"entry_fill",
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"],
|
||||
"notification_settings",
|
||||
"buy_cancel",
|
||||
"notification_settings",
|
||||
"entry_cancel",
|
||||
)
|
||||
if config.get("webhook"):
|
||||
process_deprecated_setting(config, "webhook", "webhookbuy", "webhook", "webhookentry")
|
||||
process_deprecated_setting(
|
||||
config, "webhook", "webhookbuycancel", "webhook", "webhookentrycancel"
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config, "webhook", "webhookbuyfill", "webhook", "webhookentryfill"
|
||||
)
|
||||
process_deprecated_setting(config, "webhook", "webhooksell", "webhook", "webhookexit")
|
||||
process_deprecated_setting(
|
||||
config, "webhook", "webhooksellcancel", "webhook", "webhookexitcancel"
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config, "webhook", "webhooksellfill", "webhook", "webhookexitfill"
|
||||
)
|
||||
|
||||
# Legacy way - having them in experimental ...
|
||||
|
||||
process_removed_setting(config, 'experimental', 'use_sell_signal', None, 'use_exit_signal')
|
||||
process_removed_setting(config, 'experimental', 'sell_profit_only', None, 'exit_profit_only')
|
||||
process_removed_setting(config, 'experimental', 'ignore_roi_if_buy_signal',
|
||||
None, 'ignore_roi_if_entry_signal')
|
||||
process_removed_setting(config, "experimental", "use_sell_signal", None, "use_exit_signal")
|
||||
process_removed_setting(config, "experimental", "sell_profit_only", None, "exit_profit_only")
|
||||
process_removed_setting(
|
||||
config, "experimental", "ignore_roi_if_buy_signal", None, "ignore_roi_if_entry_signal"
|
||||
)
|
||||
|
||||
process_removed_setting(config, 'ask_strategy', 'use_sell_signal', None, 'use_exit_signal')
|
||||
process_removed_setting(config, 'ask_strategy', 'sell_profit_only', None, 'exit_profit_only')
|
||||
process_removed_setting(config, 'ask_strategy', 'sell_profit_offset',
|
||||
None, 'exit_profit_offset')
|
||||
process_removed_setting(config, 'ask_strategy', 'ignore_roi_if_buy_signal',
|
||||
None, 'ignore_roi_if_entry_signal')
|
||||
if (config.get('edge', {}).get('enabled', False)
|
||||
and 'capital_available_percentage' in config.get('edge', {})):
|
||||
process_removed_setting(config, "ask_strategy", "use_sell_signal", None, "use_exit_signal")
|
||||
process_removed_setting(config, "ask_strategy", "sell_profit_only", None, "exit_profit_only")
|
||||
process_removed_setting(
|
||||
config, "ask_strategy", "sell_profit_offset", None, "exit_profit_offset"
|
||||
)
|
||||
process_removed_setting(
|
||||
config, "ask_strategy", "ignore_roi_if_buy_signal", None, "ignore_roi_if_entry_signal"
|
||||
)
|
||||
if config.get("edge", {}).get(
|
||||
"enabled", False
|
||||
) and "capital_available_percentage" in config.get("edge", {}):
|
||||
raise ConfigurationError(
|
||||
"DEPRECATED: "
|
||||
"Using 'edge.capital_available_percentage' has been deprecated in favor of "
|
||||
@@ -129,12 +170,11 @@ def process_temporary_deprecated_settings(config: Config) -> None:
|
||||
"'tradable_balance_ratio' and remove 'capital_available_percentage' "
|
||||
"from the edge configuration."
|
||||
)
|
||||
if 'ticker_interval' in config:
|
||||
|
||||
if "ticker_interval" in config:
|
||||
raise ConfigurationError(
|
||||
"DEPRECATED: 'ticker_interval' detected. "
|
||||
"Please use 'timeframe' instead of 'ticker_interval."
|
||||
)
|
||||
|
||||
if 'protections' in config:
|
||||
if "protections" in config:
|
||||
logger.warning("DEPRECATED: Setting 'protections' in the configuration is deprecated.")
|
||||
|
||||
@@ -5,4 +5,4 @@ def running_in_docker() -> bool:
|
||||
"""
|
||||
Check if we are running in a docker container
|
||||
"""
|
||||
return os.environ.get('FT_APP_ENV') == 'docker'
|
||||
return os.environ.get("FT_APP_ENV") == "docker"
|
||||
|
||||
@@ -4,8 +4,14 @@ from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.configuration.detect_environment import running_in_docker
|
||||
from freqtrade.constants import (USER_DATA_FILES, USERPATH_FREQAIMODELS, USERPATH_HYPEROPTS,
|
||||
USERPATH_NOTEBOOKS, USERPATH_STRATEGIES, Config)
|
||||
from freqtrade.constants import (
|
||||
USER_DATA_FILES,
|
||||
USERPATH_FREQAIMODELS,
|
||||
USERPATH_HYPEROPTS,
|
||||
USERPATH_NOTEBOOKS,
|
||||
USERPATH_STRATEGIES,
|
||||
Config,
|
||||
)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
@@ -13,16 +19,15 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_datadir(config: Config, datadir: Optional[str] = None) -> Path:
|
||||
|
||||
folder = Path(datadir) if datadir else Path(f"{config['user_data_dir']}/data")
|
||||
if not datadir:
|
||||
# set datadir
|
||||
exchange_name = config.get('exchange', {}).get('name', '').lower()
|
||||
exchange_name = config.get("exchange", {}).get("name", "").lower()
|
||||
folder = folder.joinpath(exchange_name)
|
||||
|
||||
if not folder.is_dir():
|
||||
folder.mkdir(parents=True)
|
||||
logger.info(f'Created data directory: {datadir}')
|
||||
logger.info(f"Created data directory: {datadir}")
|
||||
return folder
|
||||
|
||||
|
||||
@@ -34,8 +39,8 @@ def chown_user_directory(directory: Path) -> None:
|
||||
if running_in_docker():
|
||||
try:
|
||||
import subprocess
|
||||
subprocess.check_output(
|
||||
['sudo', 'chown', '-R', 'ftuser:', str(directory.resolve())])
|
||||
|
||||
subprocess.check_output(["sudo", "chown", "-R", "ftuser:", str(directory.resolve())])
|
||||
except Exception:
|
||||
logger.warning(f"Could not chown {directory}")
|
||||
|
||||
@@ -50,18 +55,28 @@ def create_userdata_dir(directory: str, create_dir: bool = False) -> Path:
|
||||
:param create_dir: Create directory if it does not exist.
|
||||
:return: Path object containing the directory
|
||||
"""
|
||||
sub_dirs = ["backtest_results", "data", USERPATH_HYPEROPTS, "hyperopt_results", "logs",
|
||||
USERPATH_NOTEBOOKS, "plot", USERPATH_STRATEGIES, USERPATH_FREQAIMODELS]
|
||||
sub_dirs = [
|
||||
"backtest_results",
|
||||
"data",
|
||||
USERPATH_HYPEROPTS,
|
||||
"hyperopt_results",
|
||||
"logs",
|
||||
USERPATH_NOTEBOOKS,
|
||||
"plot",
|
||||
USERPATH_STRATEGIES,
|
||||
USERPATH_FREQAIMODELS,
|
||||
]
|
||||
folder = Path(directory)
|
||||
chown_user_directory(folder)
|
||||
if not folder.is_dir():
|
||||
if create_dir:
|
||||
folder.mkdir(parents=True)
|
||||
logger.info(f'Created user-data directory: {folder}')
|
||||
logger.info(f"Created user-data directory: {folder}")
|
||||
else:
|
||||
raise OperationalException(
|
||||
f"Directory `{folder}` does not exist. "
|
||||
"Please use `freqtrade create-userdir` to create a user directory")
|
||||
"Please use `freqtrade create-userdir` to create a user directory"
|
||||
)
|
||||
|
||||
# Create required subdirectories
|
||||
for f in sub_dirs:
|
||||
|
||||
@@ -16,9 +16,9 @@ def _get_var_typed(val):
|
||||
try:
|
||||
return float(val)
|
||||
except ValueError:
|
||||
if val.lower() in ('t', 'true'):
|
||||
if val.lower() in ("t", "true"):
|
||||
return True
|
||||
elif val.lower() in ('f', 'false'):
|
||||
elif val.lower() in ("f", "false"):
|
||||
return False
|
||||
# keep as string
|
||||
return val
|
||||
@@ -32,16 +32,21 @@ def _flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str
|
||||
:param prefix: Prefix to consider (usually FREQTRADE__)
|
||||
:return: Nested dict based on available and relevant variables.
|
||||
"""
|
||||
no_convert = ['CHAT_ID', 'PASSWORD']
|
||||
no_convert = ["CHAT_ID", "PASSWORD"]
|
||||
relevant_vars: Dict[str, Any] = {}
|
||||
|
||||
for env_var, val in sorted(env_dict.items()):
|
||||
if env_var.startswith(prefix):
|
||||
logger.info(f"Loading variable '{env_var}'")
|
||||
key = env_var.replace(prefix, '')
|
||||
for k in reversed(key.split('__')):
|
||||
val = {k.lower(): _get_var_typed(val)
|
||||
if not isinstance(val, dict) and k not in no_convert else val}
|
||||
key = env_var.replace(prefix, "")
|
||||
for k in reversed(key.split("__")):
|
||||
val = {
|
||||
k.lower(): (
|
||||
_get_var_typed(val)
|
||||
if not isinstance(val, dict) and k not in no_convert
|
||||
else val
|
||||
)
|
||||
}
|
||||
relevant_vars = deep_merge_dicts(val, relevant_vars)
|
||||
return relevant_vars
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
This module contain functions to load the configuration file
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
@@ -25,25 +26,25 @@ def log_config_error_range(path: str, errmsg: str) -> str:
|
||||
"""
|
||||
Parses configuration file and prints range around error
|
||||
"""
|
||||
if path != '-':
|
||||
offsetlist = re.findall(r'(?<=Parse\serror\sat\soffset\s)\d+', errmsg)
|
||||
if path != "-":
|
||||
offsetlist = re.findall(r"(?<=Parse\serror\sat\soffset\s)\d+", errmsg)
|
||||
if offsetlist:
|
||||
offset = int(offsetlist[0])
|
||||
text = Path(path).read_text()
|
||||
# Fetch an offset of 80 characters around the error line
|
||||
subtext = text[offset - min(80, offset):offset + 80]
|
||||
segments = subtext.split('\n')
|
||||
subtext = text[offset - min(80, offset) : offset + 80]
|
||||
segments = subtext.split("\n")
|
||||
if len(segments) > 3:
|
||||
# Remove first and last lines, to avoid odd truncations
|
||||
return '\n'.join(segments[1:-1])
|
||||
return "\n".join(segments[1:-1])
|
||||
else:
|
||||
return subtext
|
||||
return ''
|
||||
return ""
|
||||
|
||||
|
||||
def load_file(path: Path) -> Dict[str, Any]:
|
||||
try:
|
||||
with path.open('r') as file:
|
||||
with path.open("r") as file:
|
||||
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
|
||||
except FileNotFoundError:
|
||||
raise OperationalException(f'File "{path}" not found!') from None
|
||||
@@ -58,25 +59,27 @@ def load_config_file(path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
try:
|
||||
# Read config from stdin if requested in the options
|
||||
with Path(path).open() if path != '-' else sys.stdin as file:
|
||||
with Path(path).open() if path != "-" else sys.stdin as file:
|
||||
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
|
||||
except FileNotFoundError:
|
||||
raise OperationalException(
|
||||
f'Config file "{path}" not found!'
|
||||
' Please create a config file or check whether it exists.') from None
|
||||
" Please create a config file or check whether it exists."
|
||||
) from None
|
||||
except rapidjson.JSONDecodeError as e:
|
||||
err_range = log_config_error_range(path, str(e))
|
||||
raise ConfigurationError(
|
||||
f'{e}\n'
|
||||
f'Please verify the following segment of your configuration:\n{err_range}'
|
||||
if err_range else 'Please verify your configuration file for syntax errors.'
|
||||
f"{e}\nPlease verify the following segment of your configuration:\n{err_range}"
|
||||
if err_range
|
||||
else "Please verify your configuration file for syntax errors."
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def load_from_files(
|
||||
files: List[str], base_path: Optional[Path] = None, level: int = 0) -> Dict[str, Any]:
|
||||
files: List[str], base_path: Optional[Path] = None, level: int = 0
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Recursively load configuration files if specified.
|
||||
Sub-files are assumed to be relative to the initial config.
|
||||
@@ -90,8 +93,8 @@ def load_from_files(
|
||||
files_loaded = []
|
||||
# We expect here a list of config filenames
|
||||
for filename in files:
|
||||
logger.info(f'Using config: {filename} ...')
|
||||
if filename == '-':
|
||||
logger.info(f"Using config: {filename} ...")
|
||||
if filename == "-":
|
||||
# Immediately load stdin and return
|
||||
return load_config_file(filename)
|
||||
file = Path(filename)
|
||||
@@ -100,10 +103,11 @@ def load_from_files(
|
||||
file = base_path / file
|
||||
|
||||
config_tmp = load_config_file(str(file))
|
||||
if 'add_config_files' in config_tmp:
|
||||
if "add_config_files" in config_tmp:
|
||||
config_sub = load_from_files(
|
||||
config_tmp['add_config_files'], file.resolve().parent, level + 1)
|
||||
files_loaded.extend(config_sub.get('config_files', []))
|
||||
config_tmp["add_config_files"], file.resolve().parent, level + 1
|
||||
)
|
||||
files_loaded.extend(config_sub.get("config_files", []))
|
||||
config_tmp = deep_merge_dicts(config_tmp, config_sub)
|
||||
|
||||
files_loaded.insert(0, str(file))
|
||||
@@ -111,6 +115,6 @@ def load_from_files(
|
||||
# Merge config options, overwriting prior values
|
||||
config = deep_merge_dicts(config_tmp, config)
|
||||
|
||||
config['config_files'] = files_loaded
|
||||
config["config_files"] = files_loaded
|
||||
|
||||
return config
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
This module contains the argument manager class
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
@@ -22,9 +23,13 @@ class TimeRange:
|
||||
if *type is None, don't use corresponding startvalue.
|
||||
"""
|
||||
|
||||
def __init__(self, starttype: Optional[str] = None, stoptype: Optional[str] = None,
|
||||
startts: int = 0, stopts: int = 0):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
starttype: Optional[str] = None,
|
||||
stoptype: Optional[str] = None,
|
||||
startts: int = 0,
|
||||
stopts: int = 0,
|
||||
):
|
||||
self.starttype: Optional[str] = starttype
|
||||
self.stoptype: Optional[str] = stoptype
|
||||
self.startts: int = startts
|
||||
@@ -48,12 +53,12 @@ class TimeRange:
|
||||
Returns a string representation of the timerange as used by parse_timerange.
|
||||
Follows the format yyyymmdd-yyyymmdd - leaving out the parts that are not set.
|
||||
"""
|
||||
start = ''
|
||||
stop = ''
|
||||
start = ""
|
||||
stop = ""
|
||||
if startdt := self.startdt:
|
||||
start = startdt.strftime('%Y%m%d')
|
||||
start = startdt.strftime("%Y%m%d")
|
||||
if stopdt := self.stopdt:
|
||||
stop = stopdt.strftime('%Y%m%d')
|
||||
stop = stopdt.strftime("%Y%m%d")
|
||||
return f"{start}-{stop}"
|
||||
|
||||
@property
|
||||
@@ -61,7 +66,7 @@ class TimeRange:
|
||||
"""
|
||||
Returns a string representation of the start date
|
||||
"""
|
||||
val = 'unbounded'
|
||||
val = "unbounded"
|
||||
if (startdt := self.startdt) is not None:
|
||||
val = startdt.strftime(DATETIME_PRINT_FORMAT)
|
||||
return val
|
||||
@@ -71,15 +76,19 @@ class TimeRange:
|
||||
"""
|
||||
Returns a string representation of the stop date
|
||||
"""
|
||||
val = 'unbounded'
|
||||
val = "unbounded"
|
||||
if (stopdt := self.stopdt) is not None:
|
||||
val = stopdt.strftime(DATETIME_PRINT_FORMAT)
|
||||
return val
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Override the default Equals behavior"""
|
||||
return (self.starttype == other.starttype and self.stoptype == other.stoptype
|
||||
and self.startts == other.startts and self.stopts == other.stopts)
|
||||
return (
|
||||
self.starttype == other.starttype
|
||||
and self.stoptype == other.stoptype
|
||||
and self.startts == other.startts
|
||||
and self.stopts == other.stopts
|
||||
)
|
||||
|
||||
def subtract_start(self, seconds: int) -> None:
|
||||
"""
|
||||
@@ -90,8 +99,9 @@ class TimeRange:
|
||||
if self.startts:
|
||||
self.startts = self.startts - seconds
|
||||
|
||||
def adjust_start_if_necessary(self, timeframe_secs: int, startup_candles: int,
|
||||
min_date: datetime) -> None:
|
||||
def adjust_start_if_necessary(
|
||||
self, timeframe_secs: int, startup_candles: int, min_date: datetime
|
||||
) -> None:
|
||||
"""
|
||||
Adjust startts by <startup_candles> candles.
|
||||
Applies only if no startup-candles have been available.
|
||||
@@ -101,13 +111,13 @@ class TimeRange:
|
||||
has to be moved
|
||||
:return: None (Modifies the object in place)
|
||||
"""
|
||||
if (not self.starttype or (startup_candles
|
||||
and min_date.timestamp() >= self.startts)):
|
||||
if not self.starttype or (startup_candles and min_date.timestamp() >= self.startts):
|
||||
# If no startts was defined, or backtest-data starts at the defined backtest-date
|
||||
logger.warning("Moving start-date by %s candles to account for startup time.",
|
||||
startup_candles)
|
||||
logger.warning(
|
||||
"Moving start-date by %s candles to account for startup time.", startup_candles
|
||||
)
|
||||
self.startts = int(min_date.timestamp() + timeframe_secs * startup_candles)
|
||||
self.starttype = 'date'
|
||||
self.starttype = "date"
|
||||
|
||||
@classmethod
|
||||
def parse_timerange(cls, text: Optional[str]) -> Self:
|
||||
@@ -118,16 +128,17 @@ class TimeRange:
|
||||
"""
|
||||
if not text:
|
||||
return cls(None, None, 0, 0)
|
||||
syntax = [(r'^-(\d{8})$', (None, 'date')),
|
||||
(r'^(\d{8})-$', ('date', None)),
|
||||
(r'^(\d{8})-(\d{8})$', ('date', 'date')),
|
||||
(r'^-(\d{10})$', (None, 'date')),
|
||||
(r'^(\d{10})-$', ('date', None)),
|
||||
(r'^(\d{10})-(\d{10})$', ('date', 'date')),
|
||||
(r'^-(\d{13})$', (None, 'date')),
|
||||
(r'^(\d{13})-$', ('date', None)),
|
||||
(r'^(\d{13})-(\d{13})$', ('date', 'date')),
|
||||
]
|
||||
syntax = [
|
||||
(r"^-(\d{8})$", (None, "date")),
|
||||
(r"^(\d{8})-$", ("date", None)),
|
||||
(r"^(\d{8})-(\d{8})$", ("date", "date")),
|
||||
(r"^-(\d{10})$", (None, "date")),
|
||||
(r"^(\d{10})-$", ("date", None)),
|
||||
(r"^(\d{10})-(\d{10})$", ("date", "date")),
|
||||
(r"^-(\d{13})$", (None, "date")),
|
||||
(r"^(\d{13})-$", ("date", None)),
|
||||
(r"^(\d{13})-(\d{13})$", ("date", "date")),
|
||||
]
|
||||
for rex, stype in syntax:
|
||||
# Apply the regular expression to text
|
||||
match = re.match(rex, text)
|
||||
@@ -138,9 +149,12 @@ class TimeRange:
|
||||
stop: int = 0
|
||||
if stype[0]:
|
||||
starts = rvals[index]
|
||||
if stype[0] == 'date' and len(starts) == 8:
|
||||
start = int(datetime.strptime(starts, '%Y%m%d').replace(
|
||||
tzinfo=timezone.utc).timestamp())
|
||||
if stype[0] == "date" and len(starts) == 8:
|
||||
start = int(
|
||||
datetime.strptime(starts, "%Y%m%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
)
|
||||
elif len(starts) == 13:
|
||||
start = int(starts) // 1000
|
||||
else:
|
||||
@@ -148,15 +162,19 @@ class TimeRange:
|
||||
index += 1
|
||||
if stype[1]:
|
||||
stops = rvals[index]
|
||||
if stype[1] == 'date' and len(stops) == 8:
|
||||
stop = int(datetime.strptime(stops, '%Y%m%d').replace(
|
||||
tzinfo=timezone.utc).timestamp())
|
||||
if stype[1] == "date" and len(stops) == 8:
|
||||
stop = int(
|
||||
datetime.strptime(stops, "%Y%m%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
)
|
||||
elif len(stops) == 13:
|
||||
stop = int(stops) // 1000
|
||||
else:
|
||||
stop = int(stops)
|
||||
if start > stop > 0:
|
||||
raise ConfigurationError(
|
||||
f'Start date is after stop date for timerange "{text}"')
|
||||
f'Start date is after stop date for timerange "{text}"'
|
||||
)
|
||||
return cls(stype[0], stype[1], start, stop)
|
||||
raise ConfigurationError(f'Incorrect syntax for timerange "{text}"')
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,6 +3,4 @@ Module to handle data operations for freqtrade
|
||||
"""
|
||||
|
||||
# limit what's imported when using `from freqtrade.data import *`
|
||||
__all__ = [
|
||||
'converter'
|
||||
]
|
||||
__all__ = ["converter"]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Helpers when analyzing backtest data
|
||||
"""
|
||||
|
||||
import logging
|
||||
from copy import copy
|
||||
from datetime import datetime, timezone
|
||||
@@ -21,14 +22,35 @@ from freqtrade.types import BacktestHistoryEntryType, BacktestResultType
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Newest format
|
||||
BT_DATA_COLUMNS = ['pair', 'stake_amount', 'max_stake_amount', 'amount',
|
||||
'open_date', 'close_date', 'open_rate', 'close_rate',
|
||||
'fee_open', 'fee_close', 'trade_duration',
|
||||
'profit_ratio', 'profit_abs', 'exit_reason',
|
||||
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
|
||||
'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'enter_tag',
|
||||
'leverage', 'is_short', 'open_timestamp', 'close_timestamp', 'orders'
|
||||
]
|
||||
BT_DATA_COLUMNS = [
|
||||
"pair",
|
||||
"stake_amount",
|
||||
"max_stake_amount",
|
||||
"amount",
|
||||
"open_date",
|
||||
"close_date",
|
||||
"open_rate",
|
||||
"close_rate",
|
||||
"fee_open",
|
||||
"fee_close",
|
||||
"trade_duration",
|
||||
"profit_ratio",
|
||||
"profit_abs",
|
||||
"exit_reason",
|
||||
"initial_stop_loss_abs",
|
||||
"initial_stop_loss_ratio",
|
||||
"stop_loss_abs",
|
||||
"stop_loss_ratio",
|
||||
"min_rate",
|
||||
"max_rate",
|
||||
"is_open",
|
||||
"enter_tag",
|
||||
"leverage",
|
||||
"is_short",
|
||||
"open_timestamp",
|
||||
"close_timestamp",
|
||||
"orders",
|
||||
]
|
||||
|
||||
|
||||
def get_latest_optimize_filename(directory: Union[Path, str], variant: str) -> str:
|
||||
@@ -50,15 +72,16 @@ def get_latest_optimize_filename(directory: Union[Path, str], variant: str) -> s
|
||||
|
||||
if not filename.is_file():
|
||||
raise ValueError(
|
||||
f"Directory '{directory}' does not seem to contain backtest statistics yet.")
|
||||
f"Directory '{directory}' does not seem to contain backtest statistics yet."
|
||||
)
|
||||
|
||||
with filename.open() as file:
|
||||
data = json_load(file)
|
||||
|
||||
if f'latest_{variant}' not in data:
|
||||
if f"latest_{variant}" not in data:
|
||||
raise ValueError(f"Invalid '{LAST_BT_RESULT_FN}' format.")
|
||||
|
||||
return data[f'latest_{variant}']
|
||||
return data[f"latest_{variant}"]
|
||||
|
||||
|
||||
def get_latest_backtest_filename(directory: Union[Path, str]) -> str:
|
||||
@@ -71,7 +94,7 @@ def get_latest_backtest_filename(directory: Union[Path, str]) -> str:
|
||||
* `directory/.last_result.json` does not exist
|
||||
* `directory/.last_result.json` has the wrong content
|
||||
"""
|
||||
return get_latest_optimize_filename(directory, 'backtest')
|
||||
return get_latest_optimize_filename(directory, "backtest")
|
||||
|
||||
|
||||
def get_latest_hyperopt_filename(directory: Union[Path, str]) -> str:
|
||||
@@ -85,14 +108,15 @@ def get_latest_hyperopt_filename(directory: Union[Path, str]) -> str:
|
||||
* `directory/.last_result.json` has the wrong content
|
||||
"""
|
||||
try:
|
||||
return get_latest_optimize_filename(directory, 'hyperopt')
|
||||
return get_latest_optimize_filename(directory, "hyperopt")
|
||||
except ValueError:
|
||||
# Return default (legacy) pickle filename
|
||||
return 'hyperopt_results.pickle'
|
||||
return "hyperopt_results.pickle"
|
||||
|
||||
|
||||
def get_latest_hyperopt_file(
|
||||
directory: Union[Path, str], predef_filename: Optional[str] = None) -> Path:
|
||||
directory: Union[Path, str], predef_filename: Optional[str] = None
|
||||
) -> Path:
|
||||
"""
|
||||
Get latest hyperopt export based on '.last_result.json'.
|
||||
:param directory: Directory to search for last result
|
||||
@@ -107,7 +131,8 @@ def get_latest_hyperopt_file(
|
||||
if predef_filename:
|
||||
if Path(predef_filename).is_absolute():
|
||||
raise ConfigurationError(
|
||||
"--hyperopt-filename expects only the filename, not an absolute path.")
|
||||
"--hyperopt-filename expects only the filename, not an absolute path."
|
||||
)
|
||||
return directory / predef_filename
|
||||
return directory / get_latest_hyperopt_filename(directory)
|
||||
|
||||
@@ -126,7 +151,7 @@ def load_backtest_metadata(filename: Union[Path, str]) -> Dict[str, Any]:
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
except Exception as e:
|
||||
raise OperationalException('Unexpected error while loading backtest metadata.') from e
|
||||
raise OperationalException("Unexpected error while loading backtest metadata.") from e
|
||||
|
||||
|
||||
def load_backtest_stats(filename: Union[Path, str]) -> BacktestResultType:
|
||||
@@ -147,7 +172,7 @@ def load_backtest_stats(filename: Union[Path, str]) -> BacktestResultType:
|
||||
|
||||
# Legacy list format does not contain metadata.
|
||||
if isinstance(data, dict):
|
||||
data['metadata'] = load_backtest_metadata(filename)
|
||||
data["metadata"] = load_backtest_metadata(filename)
|
||||
return data
|
||||
|
||||
|
||||
@@ -159,38 +184,39 @@ def load_and_merge_backtest_result(strategy_name: str, filename: Path, results:
|
||||
:param results: dict to merge the result to.
|
||||
"""
|
||||
bt_data = load_backtest_stats(filename)
|
||||
k: Literal['metadata', 'strategy']
|
||||
for k in ('metadata', 'strategy'): # type: ignore
|
||||
k: Literal["metadata", "strategy"]
|
||||
for k in ("metadata", "strategy"): # type: ignore
|
||||
results[k][strategy_name] = bt_data[k][strategy_name]
|
||||
results['metadata'][strategy_name]['filename'] = filename.stem
|
||||
comparison = bt_data['strategy_comparison']
|
||||
results["metadata"][strategy_name]["filename"] = filename.stem
|
||||
comparison = bt_data["strategy_comparison"]
|
||||
for i in range(len(comparison)):
|
||||
if comparison[i]['key'] == strategy_name:
|
||||
results['strategy_comparison'].append(comparison[i])
|
||||
if comparison[i]["key"] == strategy_name:
|
||||
results["strategy_comparison"].append(comparison[i])
|
||||
break
|
||||
|
||||
|
||||
def _get_backtest_files(dirname: Path) -> List[Path]:
|
||||
# Weird glob expression here avoids including .meta.json files.
|
||||
return list(reversed(sorted(dirname.glob('backtest-result-*-[0-9][0-9].json'))))
|
||||
return list(reversed(sorted(dirname.glob("backtest-result-*-[0-9][0-9].json"))))
|
||||
|
||||
|
||||
def _extract_backtest_result(filename: Path) -> List[BacktestHistoryEntryType]:
|
||||
metadata = load_backtest_metadata(filename)
|
||||
return [
|
||||
{
|
||||
'filename': filename.stem,
|
||||
'strategy': s,
|
||||
'run_id': v['run_id'],
|
||||
'notes': v.get('notes', ''),
|
||||
"filename": filename.stem,
|
||||
"strategy": s,
|
||||
"run_id": v["run_id"],
|
||||
"notes": v.get("notes", ""),
|
||||
# Backtest "run" time
|
||||
'backtest_start_time': v['backtest_start_time'],
|
||||
"backtest_start_time": v["backtest_start_time"],
|
||||
# Backtest timerange
|
||||
'backtest_start_ts': v.get('backtest_start_ts', None),
|
||||
'backtest_end_ts': v.get('backtest_end_ts', None),
|
||||
'timeframe': v.get('timeframe', None),
|
||||
'timeframe_detail': v.get('timeframe_detail', None),
|
||||
} for s, v in metadata.items()
|
||||
"backtest_start_ts": v.get("backtest_start_ts", None),
|
||||
"backtest_end_ts": v.get("backtest_end_ts", None),
|
||||
"timeframe": v.get("timeframe", None),
|
||||
"timeframe_detail": v.get("timeframe_detail", None),
|
||||
}
|
||||
for s, v in metadata.items()
|
||||
]
|
||||
|
||||
|
||||
@@ -218,7 +244,7 @@ def delete_backtest_result(file_abs: Path):
|
||||
"""
|
||||
# *.meta.json
|
||||
logger.info(f"Deleting backtest result file: {file_abs.name}")
|
||||
file_abs_meta = file_abs.with_suffix('.meta.json')
|
||||
file_abs_meta = file_abs.with_suffix(".meta.json")
|
||||
file_abs.unlink()
|
||||
file_abs_meta.unlink()
|
||||
|
||||
@@ -244,12 +270,13 @@ def get_backtest_market_change(filename: Path, include_ts: bool = True) -> pd.Da
|
||||
"""
|
||||
df = pd.read_feather(filename)
|
||||
if include_ts:
|
||||
df.loc[:, '__date_ts'] = df.loc[:, 'date'].astype(np.int64) // 1000 // 1000
|
||||
df.loc[:, "__date_ts"] = df.loc[:, "date"].astype(np.int64) // 1000 // 1000
|
||||
return df
|
||||
|
||||
|
||||
def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, str],
|
||||
min_backtest_date: Optional[datetime] = None) -> Dict[str, Any]:
|
||||
def find_existing_backtest_stats(
|
||||
dirname: Union[Path, str], run_ids: Dict[str, str], min_backtest_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Find existing backtest stats that match specified run IDs and load them.
|
||||
:param dirname: pathlib.Path object, or string pointing to the file.
|
||||
@@ -261,9 +288,9 @@ def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, s
|
||||
run_ids = copy(run_ids)
|
||||
dirname = Path(dirname)
|
||||
results: Dict[str, Any] = {
|
||||
'metadata': {},
|
||||
'strategy': {},
|
||||
'strategy_comparison': [],
|
||||
"metadata": {},
|
||||
"strategy": {},
|
||||
"strategy_comparison": [],
|
||||
}
|
||||
|
||||
for filename in _get_backtest_files(dirname):
|
||||
@@ -280,14 +307,14 @@ def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, s
|
||||
continue
|
||||
|
||||
if min_backtest_date is not None:
|
||||
backtest_date = strategy_metadata['backtest_start_time']
|
||||
backtest_date = strategy_metadata["backtest_start_time"]
|
||||
backtest_date = datetime.fromtimestamp(backtest_date, tz=timezone.utc)
|
||||
if backtest_date < min_backtest_date:
|
||||
# Do not use a cached result for this strategy as first result is too old.
|
||||
del run_ids[strategy_name]
|
||||
continue
|
||||
|
||||
if strategy_metadata['run_id'] == run_id:
|
||||
if strategy_metadata["run_id"] == run_id:
|
||||
del run_ids[strategy_name]
|
||||
load_and_merge_backtest_result(strategy_name, filename, results)
|
||||
|
||||
@@ -300,20 +327,20 @@ def _load_backtest_data_df_compatibility(df: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
Compatibility support for older backtest data.
|
||||
"""
|
||||
df['open_date'] = pd.to_datetime(df['open_date'], utc=True)
|
||||
df['close_date'] = pd.to_datetime(df['close_date'], utc=True)
|
||||
df["open_date"] = pd.to_datetime(df["open_date"], utc=True)
|
||||
df["close_date"] = pd.to_datetime(df["close_date"], utc=True)
|
||||
# Compatibility support for pre short Columns
|
||||
if 'is_short' not in df.columns:
|
||||
df['is_short'] = False
|
||||
if 'leverage' not in df.columns:
|
||||
df['leverage'] = 1.0
|
||||
if 'enter_tag' not in df.columns:
|
||||
df['enter_tag'] = df['buy_tag']
|
||||
df = df.drop(['buy_tag'], axis=1)
|
||||
if 'max_stake_amount' not in df.columns:
|
||||
df['max_stake_amount'] = df['stake_amount']
|
||||
if 'orders' not in df.columns:
|
||||
df['orders'] = None
|
||||
if "is_short" not in df.columns:
|
||||
df["is_short"] = False
|
||||
if "leverage" not in df.columns:
|
||||
df["leverage"] = 1.0
|
||||
if "enter_tag" not in df.columns:
|
||||
df["enter_tag"] = df["buy_tag"]
|
||||
df = df.drop(["buy_tag"], axis=1)
|
||||
if "max_stake_amount" not in df.columns:
|
||||
df["max_stake_amount"] = df["stake_amount"]
|
||||
if "orders" not in df.columns:
|
||||
df["orders"] = None
|
||||
return df
|
||||
|
||||
|
||||
@@ -329,23 +356,25 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
|
||||
data = load_backtest_stats(filename)
|
||||
if not isinstance(data, list):
|
||||
# new, nested format
|
||||
if 'strategy' not in data:
|
||||
if "strategy" not in data:
|
||||
raise ValueError("Unknown dataformat.")
|
||||
|
||||
if not strategy:
|
||||
if len(data['strategy']) == 1:
|
||||
strategy = list(data['strategy'].keys())[0]
|
||||
if len(data["strategy"]) == 1:
|
||||
strategy = list(data["strategy"].keys())[0]
|
||||
else:
|
||||
raise ValueError("Detected backtest result with more than one strategy. "
|
||||
"Please specify a strategy.")
|
||||
raise ValueError(
|
||||
"Detected backtest result with more than one strategy. "
|
||||
"Please specify a strategy."
|
||||
)
|
||||
|
||||
if strategy not in data['strategy']:
|
||||
if strategy not in data["strategy"]:
|
||||
raise ValueError(
|
||||
f"Strategy {strategy} not available in the backtest result. "
|
||||
f"Available strategies are '{','.join(data['strategy'].keys())}'"
|
||||
)
|
||||
)
|
||||
|
||||
data = data['strategy'][strategy]['trades']
|
||||
data = data["strategy"][strategy]["trades"]
|
||||
df = pd.DataFrame(data)
|
||||
if not df.empty:
|
||||
df = _load_backtest_data_df_compatibility(df)
|
||||
@@ -353,7 +382,8 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
|
||||
else:
|
||||
# old format - only with lists.
|
||||
raise OperationalException(
|
||||
"Backtest-results with only trades data are no longer supported.")
|
||||
"Backtest-results with only trades data are no longer supported."
|
||||
)
|
||||
if not df.empty:
|
||||
df = df.sort_values("open_date").reset_index(drop=True)
|
||||
return df
|
||||
@@ -368,23 +398,26 @@ def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataF
|
||||
:return: dataframe with open-counts per time-period in timeframe
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_resample_freq
|
||||
|
||||
timeframe_freq = timeframe_to_resample_freq(timeframe)
|
||||
dates = [pd.Series(pd.date_range(row[1]['open_date'], row[1]['close_date'],
|
||||
freq=timeframe_freq))
|
||||
for row in results[['open_date', 'close_date']].iterrows()]
|
||||
dates = [
|
||||
pd.Series(pd.date_range(row[1]["open_date"], row[1]["close_date"], freq=timeframe_freq))
|
||||
for row in results[["open_date", "close_date"]].iterrows()
|
||||
]
|
||||
deltas = [len(x) for x in dates]
|
||||
dates = pd.Series(pd.concat(dates).values, name='date')
|
||||
dates = pd.Series(pd.concat(dates).values, name="date")
|
||||
df2 = pd.DataFrame(np.repeat(results.values, deltas, axis=0), columns=results.columns)
|
||||
|
||||
df2 = pd.concat([dates, df2], axis=1)
|
||||
df2 = df2.set_index('date')
|
||||
df_final = df2.resample(timeframe_freq)[['pair']].count()
|
||||
df_final = df_final.rename({'pair': 'open_trades'}, axis=1)
|
||||
df2 = df2.set_index("date")
|
||||
df_final = df2.resample(timeframe_freq)[["pair"]].count()
|
||||
df_final = df_final.rename({"pair": "open_trades"}, axis=1)
|
||||
return df_final
|
||||
|
||||
|
||||
def evaluate_result_multi(results: pd.DataFrame, timeframe: str,
|
||||
max_open_trades: IntOrInf) -> pd.DataFrame:
|
||||
def evaluate_result_multi(
|
||||
results: pd.DataFrame, timeframe: str, max_open_trades: IntOrInf
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Find overlapping trades by expanding each trade once per period it was open
|
||||
and then counting overlaps
|
||||
@@ -394,7 +427,7 @@ def evaluate_result_multi(results: pd.DataFrame, timeframe: str,
|
||||
:return: dataframe with open-counts per time-period in freq
|
||||
"""
|
||||
df_final = analyze_trade_parallelism(results, timeframe)
|
||||
return df_final[df_final['open_trades'] > max_open_trades]
|
||||
return df_final[df_final["open_trades"] > max_open_trades]
|
||||
|
||||
|
||||
def trade_list_to_dataframe(trades: Union[List[Trade], List[LocalTrade]]) -> pd.DataFrame:
|
||||
@@ -405,9 +438,9 @@ def trade_list_to_dataframe(trades: Union[List[Trade], List[LocalTrade]]) -> pd.
|
||||
"""
|
||||
df = pd.DataFrame.from_records([t.to_json(True) for t in trades], columns=BT_DATA_COLUMNS)
|
||||
if len(df) > 0:
|
||||
df['close_date'] = pd.to_datetime(df['close_date'], utc=True)
|
||||
df['open_date'] = pd.to_datetime(df['open_date'], utc=True)
|
||||
df['close_rate'] = df['close_rate'].astype('float64')
|
||||
df["close_date"] = pd.to_datetime(df["close_date"], utc=True)
|
||||
df["open_date"] = pd.to_datetime(df["open_date"], utc=True)
|
||||
df["close_rate"] = df["close_rate"].astype("float64")
|
||||
return df
|
||||
|
||||
|
||||
@@ -429,8 +462,13 @@ def load_trades_from_db(db_url: str, strategy: Optional[str] = None) -> pd.DataF
|
||||
return trades
|
||||
|
||||
|
||||
def load_trades(source: str, db_url: str, exportfilename: Path,
|
||||
no_trades: bool = False, strategy: Optional[str] = None) -> pd.DataFrame:
|
||||
def load_trades(
|
||||
source: str,
|
||||
db_url: str,
|
||||
exportfilename: Path,
|
||||
no_trades: bool = False,
|
||||
strategy: Optional[str] = None,
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Based on configuration option 'trade_source':
|
||||
* loads data from DB (using `db_url`)
|
||||
@@ -451,8 +489,9 @@ def load_trades(source: str, db_url: str, exportfilename: Path,
|
||||
return load_backtest_data(exportfilename, strategy)
|
||||
|
||||
|
||||
def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame,
|
||||
date_index=False) -> pd.DataFrame:
|
||||
def extract_trades_of_period(
|
||||
dataframe: pd.DataFrame, trades: pd.DataFrame, date_index=False
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Compare trades and backtested pair DataFrames to get trades performed on backtested period
|
||||
:return: the DataFrame of a trades of period
|
||||
@@ -461,8 +500,9 @@ def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame,
|
||||
trades_start = dataframe.index[0]
|
||||
trades_stop = dataframe.index[-1]
|
||||
else:
|
||||
trades_start = dataframe.iloc[0]['date']
|
||||
trades_stop = dataframe.iloc[-1]['date']
|
||||
trades = trades.loc[(trades['open_date'] >= trades_start) &
|
||||
(trades['close_date'] <= trades_stop)]
|
||||
trades_start = dataframe.iloc[0]["date"]
|
||||
trades_stop = dataframe.iloc[-1]["date"]
|
||||
trades = trades.loc[
|
||||
(trades["open_date"] >= trades_start) & (trades["close_date"] <= trades_stop)
|
||||
]
|
||||
return trades
|
||||
|
||||
@@ -1,28 +1,38 @@
|
||||
from freqtrade.data.converter.converter import (clean_ohlcv_dataframe, convert_ohlcv_format,
|
||||
ohlcv_fill_up_missing_data, ohlcv_to_dataframe,
|
||||
order_book_to_dataframe, reduce_dataframe_footprint,
|
||||
trim_dataframe, trim_dataframes)
|
||||
from freqtrade.data.converter.trade_converter import (convert_trades_format,
|
||||
convert_trades_to_ohlcv, trades_convert_types,
|
||||
trades_df_remove_duplicates,
|
||||
trades_dict_to_list, trades_list_to_df,
|
||||
trades_to_ohlcv)
|
||||
from freqtrade.data.converter.converter import (
|
||||
clean_ohlcv_dataframe,
|
||||
convert_ohlcv_format,
|
||||
ohlcv_fill_up_missing_data,
|
||||
ohlcv_to_dataframe,
|
||||
order_book_to_dataframe,
|
||||
reduce_dataframe_footprint,
|
||||
trim_dataframe,
|
||||
trim_dataframes,
|
||||
)
|
||||
from freqtrade.data.converter.trade_converter import (
|
||||
convert_trades_format,
|
||||
convert_trades_to_ohlcv,
|
||||
trades_convert_types,
|
||||
trades_df_remove_duplicates,
|
||||
trades_dict_to_list,
|
||||
trades_list_to_df,
|
||||
trades_to_ohlcv,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
'clean_ohlcv_dataframe',
|
||||
'convert_ohlcv_format',
|
||||
'ohlcv_fill_up_missing_data',
|
||||
'ohlcv_to_dataframe',
|
||||
'order_book_to_dataframe',
|
||||
'reduce_dataframe_footprint',
|
||||
'trim_dataframe',
|
||||
'trim_dataframes',
|
||||
'convert_trades_format',
|
||||
'convert_trades_to_ohlcv',
|
||||
'trades_convert_types',
|
||||
'trades_df_remove_duplicates',
|
||||
'trades_dict_to_list',
|
||||
'trades_list_to_df',
|
||||
'trades_to_ohlcv',
|
||||
"clean_ohlcv_dataframe",
|
||||
"convert_ohlcv_format",
|
||||
"ohlcv_fill_up_missing_data",
|
||||
"ohlcv_to_dataframe",
|
||||
"order_book_to_dataframe",
|
||||
"reduce_dataframe_footprint",
|
||||
"trim_dataframe",
|
||||
"trim_dataframes",
|
||||
"convert_trades_format",
|
||||
"convert_trades_to_ohlcv",
|
||||
"trades_convert_types",
|
||||
"trades_df_remove_duplicates",
|
||||
"trades_dict_to_list",
|
||||
"trades_list_to_df",
|
||||
"trades_to_ohlcv",
|
||||
]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Functions to convert data from one format to another
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
@@ -15,8 +16,14 @@ from freqtrade.enums import CandleType, TradingMode
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def ohlcv_to_dataframe(ohlcv: list, timeframe: str, pair: str, *,
|
||||
fill_missing: bool = True, drop_incomplete: bool = True) -> DataFrame:
|
||||
def ohlcv_to_dataframe(
|
||||
ohlcv: list,
|
||||
timeframe: str,
|
||||
pair: str,
|
||||
*,
|
||||
fill_missing: bool = True,
|
||||
drop_incomplete: bool = True,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Converts a list with candle (OHLCV) data (in format returned by ccxt.fetch_ohlcv)
|
||||
to a Dataframe
|
||||
@@ -32,20 +39,28 @@ def ohlcv_to_dataframe(ohlcv: list, timeframe: str, pair: str, *,
|
||||
cols = DEFAULT_DATAFRAME_COLUMNS
|
||||
df = DataFrame(ohlcv, columns=cols)
|
||||
|
||||
df['date'] = to_datetime(df['date'], unit='ms', utc=True)
|
||||
df["date"] = to_datetime(df["date"], unit="ms", utc=True)
|
||||
|
||||
# Some exchanges return int values for Volume and even for OHLC.
|
||||
# Convert them since TA-LIB indicators used in the strategy assume floats
|
||||
# and fail with exception...
|
||||
df = df.astype(dtype={'open': 'float', 'high': 'float', 'low': 'float', 'close': 'float',
|
||||
'volume': 'float'})
|
||||
return clean_ohlcv_dataframe(df, timeframe, pair,
|
||||
fill_missing=fill_missing,
|
||||
drop_incomplete=drop_incomplete)
|
||||
df = df.astype(
|
||||
dtype={
|
||||
"open": "float",
|
||||
"high": "float",
|
||||
"low": "float",
|
||||
"close": "float",
|
||||
"volume": "float",
|
||||
}
|
||||
)
|
||||
return clean_ohlcv_dataframe(
|
||||
df, timeframe, pair, fill_missing=fill_missing, drop_incomplete=drop_incomplete
|
||||
)
|
||||
|
||||
|
||||
def clean_ohlcv_dataframe(data: DataFrame, timeframe: str, pair: str, *,
|
||||
fill_missing: bool, drop_incomplete: bool) -> DataFrame:
|
||||
def clean_ohlcv_dataframe(
|
||||
data: DataFrame, timeframe: str, pair: str, *, fill_missing: bool, drop_incomplete: bool
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Cleanse a OHLCV dataframe by
|
||||
* Grouping it by date (removes duplicate tics)
|
||||
@@ -60,17 +75,19 @@ def clean_ohlcv_dataframe(data: DataFrame, timeframe: str, pair: str, *,
|
||||
:return: DataFrame
|
||||
"""
|
||||
# group by index and aggregate results to eliminate duplicate ticks
|
||||
data = data.groupby(by='date', as_index=False, sort=True).agg({
|
||||
'open': 'first',
|
||||
'high': 'max',
|
||||
'low': 'min',
|
||||
'close': 'last',
|
||||
'volume': 'max',
|
||||
})
|
||||
data = data.groupby(by="date", as_index=False, sort=True).agg(
|
||||
{
|
||||
"open": "first",
|
||||
"high": "max",
|
||||
"low": "min",
|
||||
"close": "last",
|
||||
"volume": "max",
|
||||
}
|
||||
)
|
||||
# eliminate partial candle
|
||||
if drop_incomplete:
|
||||
data.drop(data.tail(1).index, inplace=True)
|
||||
logger.debug('Dropping last candle')
|
||||
logger.debug("Dropping last candle")
|
||||
|
||||
if fill_missing:
|
||||
return ohlcv_fill_up_missing_data(data, timeframe, pair)
|
||||
@@ -81,37 +98,35 @@ def clean_ohlcv_dataframe(data: DataFrame, timeframe: str, pair: str, *,
|
||||
def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str) -> DataFrame:
|
||||
"""
|
||||
Fills up missing data with 0 volume rows,
|
||||
using the previous close as price for "open", "high" "low" and "close", volume is set to 0
|
||||
using the previous close as price for "open", "high", "low" and "close", volume is set to 0
|
||||
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_resample_freq
|
||||
|
||||
ohlcv_dict = {
|
||||
'open': 'first',
|
||||
'high': 'max',
|
||||
'low': 'min',
|
||||
'close': 'last',
|
||||
'volume': 'sum'
|
||||
}
|
||||
ohlcv_dict = {"open": "first", "high": "max", "low": "min", "close": "last", "volume": "sum"}
|
||||
resample_interval = timeframe_to_resample_freq(timeframe)
|
||||
# Resample to create "NAN" values
|
||||
df = dataframe.resample(resample_interval, on='date').agg(ohlcv_dict)
|
||||
df = dataframe.resample(resample_interval, on="date").agg(ohlcv_dict)
|
||||
|
||||
# Forwardfill close for missing columns
|
||||
df['close'] = df['close'].ffill()
|
||||
df["close"] = df["close"].ffill()
|
||||
# Use close for "open, high, low"
|
||||
df.loc[:, ['open', 'high', 'low']] = df[['open', 'high', 'low']].fillna(
|
||||
value={'open': df['close'],
|
||||
'high': df['close'],
|
||||
'low': df['close'],
|
||||
})
|
||||
df.loc[:, ["open", "high", "low"]] = df[["open", "high", "low"]].fillna(
|
||||
value={
|
||||
"open": df["close"],
|
||||
"high": df["close"],
|
||||
"low": df["close"],
|
||||
}
|
||||
)
|
||||
df.reset_index(inplace=True)
|
||||
len_before = len(dataframe)
|
||||
len_after = len(df)
|
||||
pct_missing = (len_after - len_before) / len_before if len_before > 0 else 0
|
||||
if len_before != len_after:
|
||||
message = (f"Missing data fillup for {pair}, {timeframe}: "
|
||||
f"before: {len_before} - after: {len_after} - {pct_missing:.2%}")
|
||||
message = (
|
||||
f"Missing data fillup for {pair}, {timeframe}: "
|
||||
f"before: {len_before} - after: {len_after} - {pct_missing:.2%}"
|
||||
)
|
||||
if pct_missing > 0.01:
|
||||
logger.info(message)
|
||||
else:
|
||||
@@ -120,8 +135,9 @@ def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str)
|
||||
return df
|
||||
|
||||
|
||||
def trim_dataframe(df: DataFrame, timerange, *, df_date_col: str = 'date',
|
||||
startup_candles: int = 0) -> DataFrame:
|
||||
def trim_dataframe(
|
||||
df: DataFrame, timerange, *, df_date_col: str = "date", startup_candles: int = 0
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Trim dataframe based on given timerange
|
||||
:param df: Dataframe to trim
|
||||
@@ -134,15 +150,16 @@ def trim_dataframe(df: DataFrame, timerange, *, df_date_col: str = 'date',
|
||||
# Trim candles instead of timeframe in case of given startup_candle count
|
||||
df = df.iloc[startup_candles:, :]
|
||||
else:
|
||||
if timerange.starttype == 'date':
|
||||
if timerange.starttype == "date":
|
||||
df = df.loc[df[df_date_col] >= timerange.startdt, :]
|
||||
if timerange.stoptype == 'date':
|
||||
if timerange.stoptype == "date":
|
||||
df = df.loc[df[df_date_col] <= timerange.stopdt, :]
|
||||
return df
|
||||
|
||||
|
||||
def trim_dataframes(preprocessed: Dict[str, DataFrame], timerange,
|
||||
startup_candles: int) -> Dict[str, DataFrame]:
|
||||
def trim_dataframes(
|
||||
preprocessed: Dict[str, DataFrame], timerange, startup_candles: int
|
||||
) -> Dict[str, DataFrame]:
|
||||
"""
|
||||
Trim startup period from analyzed dataframes
|
||||
:param preprocessed: Dict of pair: dataframe
|
||||
@@ -157,8 +174,9 @@ def trim_dataframes(preprocessed: Dict[str, DataFrame], timerange,
|
||||
if not trimed_df.empty:
|
||||
processed[pair] = trimed_df
|
||||
else:
|
||||
logger.warning(f'{pair} has no data left after adjusting for startup candles, '
|
||||
f'skipping.')
|
||||
logger.warning(
|
||||
f"{pair} has no data left after adjusting for startup candles, skipping."
|
||||
)
|
||||
return processed
|
||||
|
||||
|
||||
@@ -170,19 +188,28 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
|
||||
b_sum b_size bids asks a_size a_sum
|
||||
-------------------------------------------------------------------
|
||||
"""
|
||||
cols = ['bids', 'b_size']
|
||||
cols = ["bids", "b_size"]
|
||||
|
||||
bids_frame = DataFrame(bids, columns=cols)
|
||||
# add cumulative sum column
|
||||
bids_frame['b_sum'] = bids_frame['b_size'].cumsum()
|
||||
cols2 = ['asks', 'a_size']
|
||||
bids_frame["b_sum"] = bids_frame["b_size"].cumsum()
|
||||
cols2 = ["asks", "a_size"]
|
||||
asks_frame = DataFrame(asks, columns=cols2)
|
||||
# add cumulative sum column
|
||||
asks_frame['a_sum'] = asks_frame['a_size'].cumsum()
|
||||
asks_frame["a_sum"] = asks_frame["a_size"].cumsum()
|
||||
|
||||
frame = pd.concat([bids_frame['b_sum'], bids_frame['b_size'], bids_frame['bids'],
|
||||
asks_frame['asks'], asks_frame['a_size'], asks_frame['a_sum']], axis=1,
|
||||
keys=['b_sum', 'b_size', 'bids', 'asks', 'a_size', 'a_sum'])
|
||||
frame = pd.concat(
|
||||
[
|
||||
bids_frame["b_sum"],
|
||||
bids_frame["b_size"],
|
||||
bids_frame["bids"],
|
||||
asks_frame["asks"],
|
||||
asks_frame["a_size"],
|
||||
asks_frame["a_sum"],
|
||||
],
|
||||
axis=1,
|
||||
keys=["b_sum", "b_size", "bids", "asks", "a_size", "a_sum"],
|
||||
)
|
||||
# logger.info('order book %s', frame )
|
||||
return frame
|
||||
|
||||
@@ -201,47 +228,51 @@ def convert_ohlcv_format(
|
||||
:param erase: Erase source data (does not apply if source and target format are identical)
|
||||
"""
|
||||
from freqtrade.data.history import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
timeframes = config.get('timeframes', [config.get('timeframe')])
|
||||
|
||||
src = get_datahandler(config["datadir"], convert_from)
|
||||
trg = get_datahandler(config["datadir"], convert_to)
|
||||
timeframes = config.get("timeframes", [config.get("timeframe")])
|
||||
logger.info(f"Converting candle (OHLCV) for timeframe {timeframes}")
|
||||
|
||||
candle_types = [CandleType.from_string(ct) for ct in config.get('candle_types', [
|
||||
c.value for c in CandleType])]
|
||||
candle_types = [
|
||||
CandleType.from_string(ct)
|
||||
for ct in config.get("candle_types", [c.value for c in CandleType])
|
||||
]
|
||||
logger.info(candle_types)
|
||||
paircombs = src.ohlcv_get_available_data(config['datadir'], TradingMode.SPOT)
|
||||
paircombs.extend(src.ohlcv_get_available_data(config['datadir'], TradingMode.FUTURES))
|
||||
paircombs = src.ohlcv_get_available_data(config["datadir"], TradingMode.SPOT)
|
||||
paircombs.extend(src.ohlcv_get_available_data(config["datadir"], TradingMode.FUTURES))
|
||||
|
||||
if 'pairs' in config:
|
||||
if "pairs" in config:
|
||||
# Filter pairs
|
||||
paircombs = [comb for comb in paircombs if comb[0] in config['pairs']]
|
||||
paircombs = [comb for comb in paircombs if comb[0] in config["pairs"]]
|
||||
|
||||
if 'timeframes' in config:
|
||||
paircombs = [comb for comb in paircombs if comb[1] in config['timeframes']]
|
||||
if "timeframes" in config:
|
||||
paircombs = [comb for comb in paircombs if comb[1] in config["timeframes"]]
|
||||
paircombs = [comb for comb in paircombs if comb[2] in candle_types]
|
||||
|
||||
paircombs = sorted(paircombs, key=lambda x: (x[0], x[1], x[2].value))
|
||||
|
||||
formatted_paircombs = '\n'.join([f"{pair}, {timeframe}, {candle_type}"
|
||||
for pair, timeframe, candle_type in paircombs])
|
||||
formatted_paircombs = "\n".join(
|
||||
[f"{pair}, {timeframe}, {candle_type}" for pair, timeframe, candle_type in paircombs]
|
||||
)
|
||||
|
||||
logger.info(f"Converting candle (OHLCV) data for the following pair combinations:\n"
|
||||
f"{formatted_paircombs}")
|
||||
logger.info(
|
||||
f"Converting candle (OHLCV) data for the following pair combinations:\n"
|
||||
f"{formatted_paircombs}"
|
||||
)
|
||||
for pair, timeframe, candle_type in paircombs:
|
||||
data = src.ohlcv_load(pair=pair, timeframe=timeframe,
|
||||
timerange=None,
|
||||
fill_missing=False,
|
||||
drop_incomplete=False,
|
||||
startup_candles=0,
|
||||
candle_type=candle_type)
|
||||
data = src.ohlcv_load(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
timerange=None,
|
||||
fill_missing=False,
|
||||
drop_incomplete=False,
|
||||
startup_candles=0,
|
||||
candle_type=candle_type,
|
||||
)
|
||||
logger.info(f"Converting {len(data)} {timeframe} {candle_type} candles for {pair}")
|
||||
if len(data) > 0:
|
||||
trg.ohlcv_store(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
data=data,
|
||||
candle_type=candle_type
|
||||
)
|
||||
trg.ohlcv_store(pair=pair, timeframe=timeframe, data=data, candle_type=candle_type)
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
||||
src.ohlcv_purge(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||
@@ -254,12 +285,11 @@ def reduce_dataframe_footprint(df: DataFrame) -> DataFrame:
|
||||
:return: Dataframe converted to float/int 32s
|
||||
"""
|
||||
|
||||
logger.debug(f"Memory usage of dataframe is "
|
||||
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
logger.debug(f"Memory usage of dataframe is {df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
|
||||
df_dtypes = df.dtypes
|
||||
for column, dtype in df_dtypes.items():
|
||||
if column in ['open', 'high', 'low', 'close', 'volume']:
|
||||
if column in ["open", "high", "low", "close", "volume"]:
|
||||
continue
|
||||
if dtype == np.float64:
|
||||
df_dtypes[column] = np.float32
|
||||
@@ -267,7 +297,6 @@ def reduce_dataframe_footprint(df: DataFrame) -> DataFrame:
|
||||
df_dtypes[column] = np.int32
|
||||
df = df.astype(df_dtypes)
|
||||
|
||||
logger.debug(f"Memory usage after optimization is: "
|
||||
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
logger.debug(f"Memory usage after optimization is: {df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
|
||||
return df
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Functions to convert data from one format to another
|
||||
"""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
@@ -9,8 +10,13 @@ import pandas as pd
|
||||
from pandas import DataFrame, to_datetime
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TRADES_DTYPES,
|
||||
Config, TradeList)
|
||||
from freqtrade.constants import (
|
||||
DEFAULT_DATAFRAME_COLUMNS,
|
||||
DEFAULT_TRADES_COLUMNS,
|
||||
TRADES_DTYPES,
|
||||
Config,
|
||||
TradeList,
|
||||
)
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
@@ -25,7 +31,7 @@ def trades_df_remove_duplicates(trades: pd.DataFrame) -> pd.DataFrame:
|
||||
:param trades: DataFrame with the columns constants.DEFAULT_TRADES_COLUMNS
|
||||
:return: DataFrame with duplicates removed based on the 'timestamp' column
|
||||
"""
|
||||
return trades.drop_duplicates(subset=['timestamp', 'id'])
|
||||
return trades.drop_duplicates(subset=["timestamp", "id"])
|
||||
|
||||
|
||||
def trades_dict_to_list(trades: List[Dict]) -> TradeList:
|
||||
@@ -42,7 +48,7 @@ def trades_convert_types(trades: DataFrame) -> DataFrame:
|
||||
Convert Trades dtypes and add 'date' column
|
||||
"""
|
||||
trades = trades.astype(TRADES_DTYPES)
|
||||
trades['date'] = to_datetime(trades['timestamp'], unit='ms', utc=True)
|
||||
trades["date"] = to_datetime(trades["timestamp"], unit="ms", utc=True)
|
||||
return trades
|
||||
|
||||
|
||||
@@ -71,13 +77,14 @@ def trades_to_ohlcv(trades: DataFrame, timeframe: str) -> DataFrame:
|
||||
:raises: ValueError if no trades are provided
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_resample_freq
|
||||
|
||||
if trades.empty:
|
||||
raise ValueError('Trade-list empty.')
|
||||
df = trades.set_index('date', drop=True)
|
||||
raise ValueError("Trade-list empty.")
|
||||
df = trades.set_index("date", drop=True)
|
||||
resample_interval = timeframe_to_resample_freq(timeframe)
|
||||
df_new = df['price'].resample(resample_interval).ohlc()
|
||||
df_new['volume'] = df['amount'].resample(resample_interval).sum()
|
||||
df_new['date'] = df_new.index
|
||||
df_new = df["price"].resample(resample_interval).ohlc()
|
||||
df_new["volume"] = df["amount"].resample(resample_interval).sum()
|
||||
df_new["date"] = df_new.index
|
||||
# Drop 0 volume rows
|
||||
df_new = df_new.dropna()
|
||||
return df_new.loc[:, DEFAULT_DATAFRAME_COLUMNS]
|
||||
@@ -97,24 +104,27 @@ def convert_trades_to_ohlcv(
|
||||
Convert stored trades data to ohlcv data
|
||||
"""
|
||||
from freqtrade.data.history import get_datahandler
|
||||
|
||||
data_handler_trades = get_datahandler(datadir, data_format=data_format_trades)
|
||||
data_handler_ohlcv = get_datahandler(datadir, data_format=data_format_ohlcv)
|
||||
|
||||
logger.info(f"About to convert pairs: '{', '.join(pairs)}', "
|
||||
f"intervals: '{', '.join(timeframes)}' to {datadir}")
|
||||
logger.info(
|
||||
f"About to convert pairs: '{', '.join(pairs)}', "
|
||||
f"intervals: '{', '.join(timeframes)}' to {datadir}"
|
||||
)
|
||||
trading_mode = TradingMode.FUTURES if candle_type != CandleType.SPOT else TradingMode.SPOT
|
||||
for pair in pairs:
|
||||
trades = data_handler_trades.trades_load(pair, trading_mode)
|
||||
for timeframe in timeframes:
|
||||
if erase:
|
||||
if data_handler_ohlcv.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
||||
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
logger.info(f"Deleting existing data for pair {pair}, interval {timeframe}.")
|
||||
try:
|
||||
ohlcv = trades_to_ohlcv(trades, timeframe)
|
||||
# Store ohlcv
|
||||
data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv, candle_type=candle_type)
|
||||
except ValueError:
|
||||
logger.warning(f'Could not convert {pair} to OHLCV.')
|
||||
logger.warning(f"Could not convert {pair} to OHLCV.")
|
||||
|
||||
|
||||
def convert_trades_format(config: Config, convert_from: str, convert_to: str, erase: bool):
|
||||
@@ -125,25 +135,27 @@ def convert_trades_format(config: Config, convert_from: str, convert_to: str, er
|
||||
:param convert_to: Target format
|
||||
:param erase: Erase source data (does not apply if source and target format are identical)
|
||||
"""
|
||||
if convert_from == 'kraken_csv':
|
||||
if config['exchange']['name'] != 'kraken':
|
||||
if convert_from == "kraken_csv":
|
||||
if config["exchange"]["name"] != "kraken":
|
||||
raise OperationalException(
|
||||
'Converting from csv is only supported for kraken.'
|
||||
'Please refer to the documentation for details about this special mode.'
|
||||
"Converting from csv is only supported for kraken."
|
||||
"Please refer to the documentation for details about this special mode."
|
||||
)
|
||||
from freqtrade.data.converter.trade_converter_kraken import import_kraken_trades_from_csv
|
||||
|
||||
import_kraken_trades_from_csv(config, convert_to)
|
||||
return
|
||||
|
||||
from freqtrade.data.history import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
|
||||
if 'pairs' not in config:
|
||||
config['pairs'] = src.trades_get_pairs(config['datadir'])
|
||||
src = get_datahandler(config["datadir"], convert_from)
|
||||
trg = get_datahandler(config["datadir"], convert_to)
|
||||
|
||||
if "pairs" not in config:
|
||||
config["pairs"] = src.trades_get_pairs(config["datadir"])
|
||||
logger.info(f"Converting trades for {config['pairs']}")
|
||||
trading_mode: TradingMode = config.get('trading_mode', TradingMode.SPOT)
|
||||
for pair in config['pairs']:
|
||||
trading_mode: TradingMode = config.get("trading_mode", TradingMode.SPOT)
|
||||
for pair in config["pairs"]:
|
||||
data = src.trades_load(pair, trading_mode)
|
||||
logger.info(f"Converting {len(data)} trades for {pair}")
|
||||
trg.trades_store(pair, data, trading_mode)
|
||||
|
||||
@@ -4,8 +4,10 @@ from pathlib import Path
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, DEFAULT_TRADES_COLUMNS, Config
|
||||
from freqtrade.data.converter.trade_converter import (trades_convert_types,
|
||||
trades_df_remove_duplicates)
|
||||
from freqtrade.data.converter.trade_converter import (
|
||||
trades_convert_types,
|
||||
trades_df_remove_duplicates,
|
||||
)
|
||||
from freqtrade.data.history import get_datahandler
|
||||
from freqtrade.enums import TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
@@ -15,32 +17,33 @@ from freqtrade.resolvers import ExchangeResolver
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
KRAKEN_CSV_TRADE_COLUMNS = ['timestamp', 'price', 'amount']
|
||||
KRAKEN_CSV_TRADE_COLUMNS = ["timestamp", "price", "amount"]
|
||||
|
||||
|
||||
def import_kraken_trades_from_csv(config: Config, convert_to: str):
|
||||
"""
|
||||
Import kraken trades from csv
|
||||
"""
|
||||
if config['exchange']['name'] != 'kraken':
|
||||
raise OperationalException('This function is only for the kraken exchange.')
|
||||
if config["exchange"]["name"] != "kraken":
|
||||
raise OperationalException("This function is only for the kraken exchange.")
|
||||
|
||||
datadir: Path = config['datadir']
|
||||
datadir: Path = config["datadir"]
|
||||
data_handler = get_datahandler(datadir, data_format=convert_to)
|
||||
|
||||
tradesdir: Path = config['datadir'] / 'trades_csv'
|
||||
tradesdir: Path = config["datadir"] / "trades_csv"
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
# iterate through directories in this directory
|
||||
data_symbols = {p.stem for p in tradesdir.rglob('*.csv')}
|
||||
data_symbols = {p.stem for p in tradesdir.rglob("*.csv")}
|
||||
|
||||
# create pair/filename mapping
|
||||
markets = {
|
||||
(m['symbol'], m['altname']) for m in exchange.markets.values()
|
||||
if m.get('altname') in data_symbols
|
||||
(m["symbol"], m["altname"])
|
||||
for m in exchange.markets.values()
|
||||
if m.get("altname") in data_symbols
|
||||
}
|
||||
logger.info(f"Found csv files for {', '.join(data_symbols)}.")
|
||||
|
||||
if pairs_raw := config.get('pairs'):
|
||||
if pairs_raw := config.get("pairs"):
|
||||
pairs = expand_pairlist(pairs_raw, [m[0] for m in markets])
|
||||
markets = {m for m in markets if m[0] in pairs}
|
||||
if not markets:
|
||||
@@ -66,18 +69,20 @@ def import_kraken_trades_from_csv(config: Config, convert_to: str):
|
||||
trades = pd.concat(dfs, ignore_index=True)
|
||||
del dfs
|
||||
|
||||
trades.loc[:, 'timestamp'] = trades['timestamp'] * 1e3
|
||||
trades.loc[:, 'cost'] = trades['price'] * trades['amount']
|
||||
trades.loc[:, "timestamp"] = trades["timestamp"] * 1e3
|
||||
trades.loc[:, "cost"] = trades["price"] * trades["amount"]
|
||||
for col in DEFAULT_TRADES_COLUMNS:
|
||||
if col not in trades.columns:
|
||||
trades.loc[:, col] = ''
|
||||
trades.loc[:, col] = ""
|
||||
trades = trades[DEFAULT_TRADES_COLUMNS]
|
||||
trades = trades_convert_types(trades)
|
||||
|
||||
trades_df = trades_df_remove_duplicates(trades)
|
||||
del trades
|
||||
logger.info(f"{pair}: {len(trades_df)} trades, from "
|
||||
f"{trades_df['date'].min():{DATETIME_PRINT_FORMAT}} to "
|
||||
f"{trades_df['date'].max():{DATETIME_PRINT_FORMAT}}")
|
||||
logger.info(
|
||||
f"{pair}: {len(trades_df)} trades, from "
|
||||
f"{trades_df['date'].min():{DATETIME_PRINT_FORMAT}} to "
|
||||
f"{trades_df['date'].max():{DATETIME_PRINT_FORMAT}}"
|
||||
)
|
||||
|
||||
data_handler.trades_store(pair, trades_df, TradingMode.SPOT)
|
||||
|
||||
@@ -4,6 +4,7 @@ Responsible to provide data to the bot
|
||||
including ticker and orderbook data, live and historical candle (OHLCV) data
|
||||
Common Interface for bot and strategy to access data.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
@@ -12,8 +13,12 @@ from typing import Any, Dict, List, Optional, Tuple
|
||||
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import (FULL_DATAFRAME_THRESHOLD, Config, ListPairsWithTimeframes,
|
||||
PairWithTimeframe)
|
||||
from freqtrade.constants import (
|
||||
FULL_DATAFRAME_THRESHOLD,
|
||||
Config,
|
||||
ListPairsWithTimeframes,
|
||||
PairWithTimeframe,
|
||||
)
|
||||
from freqtrade.data.history import load_pair_history
|
||||
from freqtrade.enums import CandleType, RPCMessageType, RunMode
|
||||
from freqtrade.exceptions import ExchangeError, OperationalException
|
||||
@@ -27,18 +32,17 @@ from freqtrade.util import PeriodicCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
NO_EXCHANGE_EXCEPTION = 'Exchange is not available to DataProvider.'
|
||||
NO_EXCHANGE_EXCEPTION = "Exchange is not available to DataProvider."
|
||||
MAX_DATAFRAME_CANDLES = 1000
|
||||
|
||||
|
||||
class DataProvider:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: Config,
|
||||
exchange: Optional[Exchange],
|
||||
pairlists=None,
|
||||
rpc: Optional[RPCManager] = None
|
||||
rpc: Optional[RPCManager] = None,
|
||||
) -> None:
|
||||
self._config = config
|
||||
self._exchange = exchange
|
||||
@@ -49,18 +53,20 @@ class DataProvider:
|
||||
self.__slice_date: Optional[datetime] = None
|
||||
|
||||
self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {}
|
||||
self.__producer_pairs_df: Dict[str,
|
||||
Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]] = {}
|
||||
self.__producer_pairs_df: Dict[
|
||||
str, Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]
|
||||
] = {}
|
||||
self.__producer_pairs: Dict[str, List[str]] = {}
|
||||
self._msg_queue: deque = deque()
|
||||
|
||||
self._default_candle_type = self._config.get('candle_type_def', CandleType.SPOT)
|
||||
self._default_timeframe = self._config.get('timeframe', '1h')
|
||||
self._default_candle_type = self._config.get("candle_type_def", CandleType.SPOT)
|
||||
self._default_timeframe = self._config.get("timeframe", "1h")
|
||||
|
||||
self.__msg_cache = PeriodicCache(
|
||||
maxsize=1000, ttl=timeframe_to_seconds(self._default_timeframe))
|
||||
maxsize=1000, ttl=timeframe_to_seconds(self._default_timeframe)
|
||||
)
|
||||
|
||||
self.producers = self._config.get('external_message_consumer', {}).get('producers', [])
|
||||
self.producers = self._config.get("external_message_consumer", {}).get("producers", [])
|
||||
self.external_data_enabled = len(self.producers) > 0
|
||||
|
||||
def _set_dataframe_max_index(self, limit_index: int):
|
||||
@@ -80,11 +86,7 @@ class DataProvider:
|
||||
self.__slice_date = limit_date
|
||||
|
||||
def _set_cached_df(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
dataframe: DataFrame,
|
||||
candle_type: CandleType
|
||||
self, pair: str, timeframe: str, dataframe: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Store cached Dataframe.
|
||||
@@ -96,8 +98,7 @@ class DataProvider:
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
"""
|
||||
pair_key = (pair, timeframe, candle_type)
|
||||
self.__cached_pairs[pair_key] = (
|
||||
dataframe, datetime.now(timezone.utc))
|
||||
self.__cached_pairs[pair_key] = (dataframe, datetime.now(timezone.utc))
|
||||
|
||||
# For multiple producers we will want to merge the pairlists instead of overwriting
|
||||
def _set_producer_pairs(self, pairlist: List[str], producer_name: str = "default"):
|
||||
@@ -116,12 +117,7 @@ class DataProvider:
|
||||
"""
|
||||
return self.__producer_pairs.get(producer_name, []).copy()
|
||||
|
||||
def _emit_df(
|
||||
self,
|
||||
pair_key: PairWithTimeframe,
|
||||
dataframe: DataFrame,
|
||||
new_candle: bool
|
||||
) -> None:
|
||||
def _emit_df(self, pair_key: PairWithTimeframe, dataframe: DataFrame, new_candle: bool) -> None:
|
||||
"""
|
||||
Send this dataframe as an ANALYZED_DF message to RPC
|
||||
|
||||
@@ -131,19 +127,21 @@ class DataProvider:
|
||||
"""
|
||||
if self.__rpc:
|
||||
msg: RPCAnalyzedDFMsg = {
|
||||
'type': RPCMessageType.ANALYZED_DF,
|
||||
'data': {
|
||||
'key': pair_key,
|
||||
'df': dataframe.tail(1),
|
||||
'la': datetime.now(timezone.utc)
|
||||
}
|
||||
}
|
||||
"type": RPCMessageType.ANALYZED_DF,
|
||||
"data": {
|
||||
"key": pair_key,
|
||||
"df": dataframe.tail(1),
|
||||
"la": datetime.now(timezone.utc),
|
||||
},
|
||||
}
|
||||
self.__rpc.send_msg(msg)
|
||||
if new_candle:
|
||||
self.__rpc.send_msg({
|
||||
'type': RPCMessageType.NEW_CANDLE,
|
||||
'data': pair_key,
|
||||
})
|
||||
self.__rpc.send_msg(
|
||||
{
|
||||
"type": RPCMessageType.NEW_CANDLE,
|
||||
"data": pair_key,
|
||||
}
|
||||
)
|
||||
|
||||
def _replace_external_df(
|
||||
self,
|
||||
@@ -152,7 +150,7 @@ class DataProvider:
|
||||
last_analyzed: datetime,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
producer_name: str = "default"
|
||||
producer_name: str = "default",
|
||||
) -> None:
|
||||
"""
|
||||
Add the pair data to this class from an external source.
|
||||
@@ -178,7 +176,7 @@ class DataProvider:
|
||||
last_analyzed: datetime,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
producer_name: str = "default"
|
||||
producer_name: str = "default",
|
||||
) -> Tuple[bool, int]:
|
||||
"""
|
||||
Append a candle to the existing external dataframe. The incoming dataframe
|
||||
@@ -204,12 +202,14 @@ class DataProvider:
|
||||
last_analyzed=last_analyzed,
|
||||
timeframe=timeframe,
|
||||
candle_type=candle_type,
|
||||
producer_name=producer_name
|
||||
producer_name=producer_name,
|
||||
)
|
||||
return (True, 0)
|
||||
|
||||
if (producer_name not in self.__producer_pairs_df
|
||||
or pair_key not in self.__producer_pairs_df[producer_name]):
|
||||
if (
|
||||
producer_name not in self.__producer_pairs_df
|
||||
or pair_key not in self.__producer_pairs_df[producer_name]
|
||||
):
|
||||
# We don't have data from this producer yet,
|
||||
# or we don't have data for this pair_key
|
||||
# return False and 1000 for the full df
|
||||
@@ -220,12 +220,12 @@ class DataProvider:
|
||||
# CHECK FOR MISSING CANDLES
|
||||
# Convert the timeframe to a timedelta for pandas
|
||||
timeframe_delta: Timedelta = to_timedelta(timeframe)
|
||||
local_last: Timestamp = existing_df.iloc[-1]['date'] # We want the last date from our copy
|
||||
local_last: Timestamp = existing_df.iloc[-1]["date"] # We want the last date from our copy
|
||||
# We want the first date from the incoming
|
||||
incoming_first: Timestamp = dataframe.iloc[0]['date']
|
||||
incoming_first: Timestamp = dataframe.iloc[0]["date"]
|
||||
|
||||
# Remove existing candles that are newer than the incoming first candle
|
||||
existing_df1 = existing_df[existing_df['date'] < incoming_first]
|
||||
existing_df1 = existing_df[existing_df["date"] < incoming_first]
|
||||
|
||||
candle_difference = (incoming_first - local_last) / timeframe_delta
|
||||
|
||||
@@ -243,13 +243,13 @@ class DataProvider:
|
||||
|
||||
# Everything is good, we appended
|
||||
self._replace_external_df(
|
||||
pair,
|
||||
appended_df,
|
||||
last_analyzed=last_analyzed,
|
||||
timeframe=timeframe,
|
||||
candle_type=candle_type,
|
||||
producer_name=producer_name
|
||||
)
|
||||
pair,
|
||||
appended_df,
|
||||
last_analyzed=last_analyzed,
|
||||
timeframe=timeframe,
|
||||
candle_type=candle_type,
|
||||
producer_name=producer_name,
|
||||
)
|
||||
return (True, 0)
|
||||
|
||||
def get_producer_df(
|
||||
@@ -257,7 +257,7 @@ class DataProvider:
|
||||
pair: str,
|
||||
timeframe: Optional[str] = None,
|
||||
candle_type: Optional[CandleType] = None,
|
||||
producer_name: str = "default"
|
||||
producer_name: str = "default",
|
||||
) -> Tuple[DataFrame, datetime]:
|
||||
"""
|
||||
Get the pair data from producers.
|
||||
@@ -292,64 +292,64 @@ class DataProvider:
|
||||
"""
|
||||
self._pairlists = pairlists
|
||||
|
||||
def historic_ohlcv(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
candle_type: str = ''
|
||||
) -> DataFrame:
|
||||
def historic_ohlcv(self, pair: str, timeframe: str, candle_type: str = "") -> DataFrame:
|
||||
"""
|
||||
Get stored historical candle (OHLCV) data
|
||||
:param pair: pair to get the data for
|
||||
:param timeframe: timeframe to get data for
|
||||
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||
"""
|
||||
_candle_type = CandleType.from_string(
|
||||
candle_type) if candle_type != '' else self._config['candle_type_def']
|
||||
_candle_type = (
|
||||
CandleType.from_string(candle_type)
|
||||
if candle_type != ""
|
||||
else self._config["candle_type_def"]
|
||||
)
|
||||
saved_pair: PairWithTimeframe = (pair, str(timeframe), _candle_type)
|
||||
if saved_pair not in self.__cached_pairs_backtesting:
|
||||
timerange = TimeRange.parse_timerange(None if self._config.get(
|
||||
'timerange') is None else str(self._config.get('timerange')))
|
||||
timerange = TimeRange.parse_timerange(
|
||||
None
|
||||
if self._config.get("timerange") is None
|
||||
else str(self._config.get("timerange"))
|
||||
)
|
||||
|
||||
startup_candles = self.get_required_startup(str(timeframe))
|
||||
tf_seconds = timeframe_to_seconds(str(timeframe))
|
||||
timerange.subtract_start(tf_seconds * startup_candles)
|
||||
|
||||
logger.info(f"Loading data for {pair} {timeframe} "
|
||||
f"from {timerange.start_fmt} to {timerange.stop_fmt}")
|
||||
logger.info(
|
||||
f"Loading data for {pair} {timeframe} "
|
||||
f"from {timerange.start_fmt} to {timerange.stop_fmt}"
|
||||
)
|
||||
|
||||
self.__cached_pairs_backtesting[saved_pair] = load_pair_history(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
datadir=self._config['datadir'],
|
||||
datadir=self._config["datadir"],
|
||||
timerange=timerange,
|
||||
data_format=self._config['dataformat_ohlcv'],
|
||||
data_format=self._config["dataformat_ohlcv"],
|
||||
candle_type=_candle_type,
|
||||
|
||||
)
|
||||
return self.__cached_pairs_backtesting[saved_pair].copy()
|
||||
|
||||
def get_required_startup(self, timeframe: str) -> int:
|
||||
freqai_config = self._config.get('freqai', {})
|
||||
if not freqai_config.get('enabled', False):
|
||||
return self._config.get('startup_candle_count', 0)
|
||||
freqai_config = self._config.get("freqai", {})
|
||||
if not freqai_config.get("enabled", False):
|
||||
return self._config.get("startup_candle_count", 0)
|
||||
else:
|
||||
startup_candles = self._config.get('startup_candle_count', 0)
|
||||
indicator_periods = freqai_config['feature_parameters']['indicator_periods_candles']
|
||||
startup_candles = self._config.get("startup_candle_count", 0)
|
||||
indicator_periods = freqai_config["feature_parameters"]["indicator_periods_candles"]
|
||||
# make sure the startupcandles is at least the set maximum indicator periods
|
||||
self._config['startup_candle_count'] = max(startup_candles, max(indicator_periods))
|
||||
self._config["startup_candle_count"] = max(startup_candles, max(indicator_periods))
|
||||
tf_seconds = timeframe_to_seconds(timeframe)
|
||||
train_candles = freqai_config['train_period_days'] * 86400 / tf_seconds
|
||||
total_candles = int(self._config['startup_candle_count'] + train_candles)
|
||||
train_candles = freqai_config["train_period_days"] * 86400 / tf_seconds
|
||||
total_candles = int(self._config["startup_candle_count"] + train_candles)
|
||||
logger.info(
|
||||
f'Increasing startup_candle_count for freqai on {timeframe} to {total_candles}')
|
||||
f"Increasing startup_candle_count for freqai on {timeframe} to {total_candles}"
|
||||
)
|
||||
return total_candles
|
||||
|
||||
def get_pair_dataframe(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: Optional[str] = None,
|
||||
candle_type: str = ''
|
||||
self, pair: str, timeframe: Optional[str] = None, candle_type: str = ""
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Return pair candle (OHLCV) data, either live or cached historical -- depending
|
||||
@@ -366,13 +366,13 @@ class DataProvider:
|
||||
data = self.ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||
else:
|
||||
# Get historical OHLCV data (cached on disk).
|
||||
timeframe = timeframe or self._config['timeframe']
|
||||
timeframe = timeframe or self._config["timeframe"]
|
||||
data = self.historic_ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||
# Cut date to timeframe-specific date.
|
||||
# This is necessary to prevent lookahead bias in callbacks through informative pairs.
|
||||
if self.__slice_date:
|
||||
cutoff_date = timeframe_to_prev_date(timeframe, self.__slice_date)
|
||||
data = data.loc[data['date'] < cutoff_date]
|
||||
data = data.loc[data["date"] < cutoff_date]
|
||||
if len(data) == 0:
|
||||
logger.warning(f"No data found for ({pair}, {timeframe}, {candle_type}).")
|
||||
return data
|
||||
@@ -387,7 +387,7 @@ class DataProvider:
|
||||
combination.
|
||||
Returns empty dataframe and Epoch 0 (1970-01-01) if no dataframe was cached.
|
||||
"""
|
||||
pair_key = (pair, timeframe, self._config.get('candle_type_def', CandleType.SPOT))
|
||||
pair_key = (pair, timeframe, self._config.get("candle_type_def", CandleType.SPOT))
|
||||
if pair_key in self.__cached_pairs:
|
||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
df, date = self.__cached_pairs[pair_key]
|
||||
@@ -395,7 +395,7 @@ class DataProvider:
|
||||
df, date = self.__cached_pairs[pair_key]
|
||||
if self.__slice_index is not None:
|
||||
max_index = self.__slice_index
|
||||
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES):max_index]
|
||||
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES) : max_index]
|
||||
return df, date
|
||||
else:
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
||||
@@ -406,7 +406,7 @@ class DataProvider:
|
||||
Get runmode of the bot
|
||||
can be "live", "dry-run", "backtest", "edgecli", "hyperopt" or "other".
|
||||
"""
|
||||
return RunMode(self._config.get('runmode', RunMode.OTHER))
|
||||
return RunMode(self._config.get("runmode", RunMode.OTHER))
|
||||
|
||||
def current_whitelist(self) -> List[str]:
|
||||
"""
|
||||
@@ -434,9 +434,11 @@ class DataProvider:
|
||||
|
||||
# Exchange functions
|
||||
|
||||
def refresh(self,
|
||||
pairlist: ListPairsWithTimeframes,
|
||||
helping_pairs: Optional[ListPairsWithTimeframes] = None) -> None:
|
||||
def refresh(
|
||||
self,
|
||||
pairlist: ListPairsWithTimeframes,
|
||||
helping_pairs: Optional[ListPairsWithTimeframes] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Refresh data, called with each cycle
|
||||
"""
|
||||
@@ -456,11 +458,7 @@ class DataProvider:
|
||||
return list(self._exchange._klines.keys())
|
||||
|
||||
def ohlcv(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: Optional[str] = None,
|
||||
copy: bool = True,
|
||||
candle_type: str = ''
|
||||
self, pair: str, timeframe: Optional[str] = None, copy: bool = True, candle_type: str = ""
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Get candle (OHLCV) data for the given pair as DataFrame
|
||||
@@ -474,11 +472,13 @@ class DataProvider:
|
||||
if self._exchange is None:
|
||||
raise OperationalException(NO_EXCHANGE_EXCEPTION)
|
||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
_candle_type = CandleType.from_string(
|
||||
candle_type) if candle_type != '' else self._config['candle_type_def']
|
||||
_candle_type = (
|
||||
CandleType.from_string(candle_type)
|
||||
if candle_type != ""
|
||||
else self._config["candle_type_def"]
|
||||
)
|
||||
return self._exchange.klines(
|
||||
(pair, timeframe or self._config['timeframe'], _candle_type),
|
||||
copy=copy
|
||||
(pair, timeframe or self._config["timeframe"], _candle_type), copy=copy
|
||||
)
|
||||
else:
|
||||
return DataFrame()
|
||||
|
||||
@@ -8,8 +8,11 @@ from tabulate import tabulate
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.data.btanalysis import (get_latest_backtest_filename, load_backtest_data,
|
||||
load_backtest_stats)
|
||||
from freqtrade.data.btanalysis import (
|
||||
get_latest_backtest_filename,
|
||||
load_backtest_data,
|
||||
load_backtest_stats,
|
||||
)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
@@ -18,9 +21,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def _load_backtest_analysis_data(backtest_dir: Path, name: str):
|
||||
if backtest_dir.is_dir():
|
||||
scpf = Path(backtest_dir,
|
||||
Path(get_latest_backtest_filename(backtest_dir)).stem + "_" + name + ".pkl"
|
||||
)
|
||||
scpf = Path(
|
||||
backtest_dir,
|
||||
Path(get_latest_backtest_filename(backtest_dir)).stem + "_" + name + ".pkl",
|
||||
)
|
||||
else:
|
||||
scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_{name}.pkl")
|
||||
|
||||
@@ -53,7 +57,8 @@ def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_cand
|
||||
for pair in pairlist:
|
||||
if pair in signal_candles[strategy_name]:
|
||||
analysed_trades_dict[strategy_name][pair] = _analyze_candles_and_indicators(
|
||||
pair, trades, signal_candles[strategy_name][pair])
|
||||
pair, trades, signal_candles[strategy_name][pair]
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Cannot process entry/exit reasons for {strategy_name}: ", e)
|
||||
|
||||
@@ -64,28 +69,28 @@ def _analyze_candles_and_indicators(pair, trades: pd.DataFrame, signal_candles:
|
||||
buyf = signal_candles
|
||||
|
||||
if len(buyf) > 0:
|
||||
buyf = buyf.set_index('date', drop=False)
|
||||
trades_red = trades.loc[trades['pair'] == pair].copy()
|
||||
buyf = buyf.set_index("date", drop=False)
|
||||
trades_red = trades.loc[trades["pair"] == pair].copy()
|
||||
|
||||
trades_inds = pd.DataFrame()
|
||||
|
||||
if trades_red.shape[0] > 0 and buyf.shape[0] > 0:
|
||||
for t, v in trades_red.open_date.items():
|
||||
allinds = buyf.loc[(buyf['date'] < v)]
|
||||
allinds = buyf.loc[(buyf["date"] < v)]
|
||||
if allinds.shape[0] > 0:
|
||||
tmp_inds = allinds.iloc[[-1]]
|
||||
|
||||
trades_red.loc[t, 'signal_date'] = tmp_inds['date'].values[0]
|
||||
trades_red.loc[t, 'enter_reason'] = trades_red.loc[t, 'enter_tag']
|
||||
tmp_inds.index.rename('signal_date', inplace=True)
|
||||
trades_red.loc[t, "signal_date"] = tmp_inds["date"].values[0]
|
||||
trades_red.loc[t, "enter_reason"] = trades_red.loc[t, "enter_tag"]
|
||||
tmp_inds.index.rename("signal_date", inplace=True)
|
||||
trades_inds = pd.concat([trades_inds, tmp_inds])
|
||||
|
||||
if 'signal_date' in trades_red:
|
||||
trades_red['signal_date'] = pd.to_datetime(trades_red['signal_date'], utc=True)
|
||||
trades_red.set_index('signal_date', inplace=True)
|
||||
if "signal_date" in trades_red:
|
||||
trades_red["signal_date"] = pd.to_datetime(trades_red["signal_date"], utc=True)
|
||||
trades_red.set_index("signal_date", inplace=True)
|
||||
|
||||
try:
|
||||
trades_red = pd.merge(trades_red, trades_inds, on='signal_date', how='outer')
|
||||
trades_red = pd.merge(trades_red, trades_inds, on="signal_date", how="outer")
|
||||
except Exception as e:
|
||||
raise e
|
||||
return trades_red
|
||||
@@ -93,138 +98,166 @@ def _analyze_candles_and_indicators(pair, trades: pd.DataFrame, signal_candles:
|
||||
return pd.DataFrame()
|
||||
|
||||
|
||||
def _do_group_table_output(bigdf, glist, csv_path: Path, to_csv=False, ):
|
||||
def _do_group_table_output(
|
||||
bigdf,
|
||||
glist,
|
||||
csv_path: Path,
|
||||
to_csv=False,
|
||||
):
|
||||
for g in glist:
|
||||
# 0: summary wins/losses grouped by enter tag
|
||||
if g == "0":
|
||||
group_mask = ['enter_reason']
|
||||
wins = bigdf.loc[bigdf['profit_abs'] >= 0] \
|
||||
.groupby(group_mask) \
|
||||
.agg({'profit_abs': ['sum']})
|
||||
group_mask = ["enter_reason"]
|
||||
wins = (
|
||||
bigdf.loc[bigdf["profit_abs"] >= 0].groupby(group_mask).agg({"profit_abs": ["sum"]})
|
||||
)
|
||||
|
||||
wins.columns = ['profit_abs_wins']
|
||||
loss = bigdf.loc[bigdf['profit_abs'] < 0] \
|
||||
.groupby(group_mask) \
|
||||
.agg({'profit_abs': ['sum']})
|
||||
loss.columns = ['profit_abs_loss']
|
||||
wins.columns = ["profit_abs_wins"]
|
||||
loss = (
|
||||
bigdf.loc[bigdf["profit_abs"] < 0].groupby(group_mask).agg({"profit_abs": ["sum"]})
|
||||
)
|
||||
loss.columns = ["profit_abs_loss"]
|
||||
|
||||
new = bigdf.groupby(group_mask).agg({'profit_abs': [
|
||||
'count',
|
||||
lambda x: sum(x > 0),
|
||||
lambda x: sum(x <= 0)]})
|
||||
new = bigdf.groupby(group_mask).agg(
|
||||
{"profit_abs": ["count", lambda x: sum(x > 0), lambda x: sum(x <= 0)]}
|
||||
)
|
||||
new = pd.concat([new, wins, loss], axis=1).fillna(0)
|
||||
|
||||
new['profit_tot'] = new['profit_abs_wins'] - abs(new['profit_abs_loss'])
|
||||
new['wl_ratio_pct'] = (new.iloc[:, 1] / new.iloc[:, 0] * 100).fillna(0)
|
||||
new['avg_win'] = (new['profit_abs_wins'] / new.iloc[:, 1]).fillna(0)
|
||||
new['avg_loss'] = (new['profit_abs_loss'] / new.iloc[:, 2]).fillna(0)
|
||||
new["profit_tot"] = new["profit_abs_wins"] - abs(new["profit_abs_loss"])
|
||||
new["wl_ratio_pct"] = (new.iloc[:, 1] / new.iloc[:, 0] * 100).fillna(0)
|
||||
new["avg_win"] = (new["profit_abs_wins"] / new.iloc[:, 1]).fillna(0)
|
||||
new["avg_loss"] = (new["profit_abs_loss"] / new.iloc[:, 2]).fillna(0)
|
||||
|
||||
new['exp_ratio'] = (
|
||||
(
|
||||
(1 + (new['avg_win'] / abs(new['avg_loss']))) * (new['wl_ratio_pct'] / 100)
|
||||
) - 1).fillna(0)
|
||||
new["exp_ratio"] = (
|
||||
((1 + (new["avg_win"] / abs(new["avg_loss"]))) * (new["wl_ratio_pct"] / 100)) - 1
|
||||
).fillna(0)
|
||||
|
||||
new.columns = ['total_num_buys', 'wins', 'losses',
|
||||
'profit_abs_wins', 'profit_abs_loss',
|
||||
'profit_tot', 'wl_ratio_pct',
|
||||
'avg_win', 'avg_loss', 'exp_ratio']
|
||||
new.columns = [
|
||||
"total_num_buys",
|
||||
"wins",
|
||||
"losses",
|
||||
"profit_abs_wins",
|
||||
"profit_abs_loss",
|
||||
"profit_tot",
|
||||
"wl_ratio_pct",
|
||||
"avg_win",
|
||||
"avg_loss",
|
||||
"exp_ratio",
|
||||
]
|
||||
|
||||
sortcols = ['total_num_buys']
|
||||
sortcols = ["total_num_buys"]
|
||||
|
||||
_print_table(new, sortcols, show_index=True, name="Group 0:",
|
||||
to_csv=to_csv, csv_path=csv_path)
|
||||
_print_table(
|
||||
new, sortcols, show_index=True, name="Group 0:", to_csv=to_csv, csv_path=csv_path
|
||||
)
|
||||
|
||||
else:
|
||||
agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'],
|
||||
'profit_ratio': ['median', 'mean', 'sum']}
|
||||
agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median',
|
||||
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
|
||||
'total_profit_pct']
|
||||
sortcols = ['profit_abs_sum', 'enter_reason']
|
||||
agg_mask = {
|
||||
"profit_abs": ["count", "sum", "median", "mean"],
|
||||
"profit_ratio": ["median", "mean", "sum"],
|
||||
}
|
||||
agg_cols = [
|
||||
"num_buys",
|
||||
"profit_abs_sum",
|
||||
"profit_abs_median",
|
||||
"profit_abs_mean",
|
||||
"median_profit_pct",
|
||||
"mean_profit_pct",
|
||||
"total_profit_pct",
|
||||
]
|
||||
sortcols = ["profit_abs_sum", "enter_reason"]
|
||||
|
||||
# 1: profit summaries grouped by enter_tag
|
||||
if g == "1":
|
||||
group_mask = ['enter_reason']
|
||||
group_mask = ["enter_reason"]
|
||||
|
||||
# 2: profit summaries grouped by enter_tag and exit_tag
|
||||
if g == "2":
|
||||
group_mask = ['enter_reason', 'exit_reason']
|
||||
group_mask = ["enter_reason", "exit_reason"]
|
||||
|
||||
# 3: profit summaries grouped by pair and enter_tag
|
||||
if g == "3":
|
||||
group_mask = ['pair', 'enter_reason']
|
||||
group_mask = ["pair", "enter_reason"]
|
||||
|
||||
# 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
|
||||
if g == "4":
|
||||
group_mask = ['pair', 'enter_reason', 'exit_reason']
|
||||
group_mask = ["pair", "enter_reason", "exit_reason"]
|
||||
|
||||
# 5: profit summaries grouped by exit_tag
|
||||
if g == "5":
|
||||
group_mask = ['exit_reason']
|
||||
sortcols = ['exit_reason']
|
||||
group_mask = ["exit_reason"]
|
||||
sortcols = ["exit_reason"]
|
||||
|
||||
if group_mask:
|
||||
new = bigdf.groupby(group_mask).agg(agg_mask).reset_index()
|
||||
new.columns = group_mask + agg_cols
|
||||
new['median_profit_pct'] = new['median_profit_pct'] * 100
|
||||
new['mean_profit_pct'] = new['mean_profit_pct'] * 100
|
||||
new['total_profit_pct'] = new['total_profit_pct'] * 100
|
||||
new["median_profit_pct"] = new["median_profit_pct"] * 100
|
||||
new["mean_profit_pct"] = new["mean_profit_pct"] * 100
|
||||
new["total_profit_pct"] = new["total_profit_pct"] * 100
|
||||
|
||||
_print_table(new, sortcols, name=f"Group {g}:",
|
||||
to_csv=to_csv, csv_path=csv_path)
|
||||
_print_table(new, sortcols, name=f"Group {g}:", to_csv=to_csv, csv_path=csv_path)
|
||||
else:
|
||||
logger.warning("Invalid group mask specified.")
|
||||
|
||||
|
||||
def _do_rejected_signals_output(rejected_signals_df: pd.DataFrame,
|
||||
to_csv: bool = False, csv_path=None) -> None:
|
||||
cols = ['pair', 'date', 'enter_tag']
|
||||
sortcols = ['date', 'pair', 'enter_tag']
|
||||
_print_table(rejected_signals_df[cols],
|
||||
sortcols,
|
||||
show_index=False,
|
||||
name="Rejected Signals:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path)
|
||||
def _do_rejected_signals_output(
|
||||
rejected_signals_df: pd.DataFrame, to_csv: bool = False, csv_path=None
|
||||
) -> None:
|
||||
cols = ["pair", "date", "enter_tag"]
|
||||
sortcols = ["date", "pair", "enter_tag"]
|
||||
_print_table(
|
||||
rejected_signals_df[cols],
|
||||
sortcols,
|
||||
show_index=False,
|
||||
name="Rejected Signals:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path,
|
||||
)
|
||||
|
||||
|
||||
def _select_rows_within_dates(df, timerange=None, df_date_col: str = 'date'):
|
||||
def _select_rows_within_dates(df, timerange=None, df_date_col: str = "date"):
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
if timerange.starttype == "date":
|
||||
df = df.loc[(df[df_date_col] >= timerange.startdt)]
|
||||
if timerange.stoptype == 'date':
|
||||
if timerange.stoptype == "date":
|
||||
df = df.loc[(df[df_date_col] < timerange.stopdt)]
|
||||
return df
|
||||
|
||||
|
||||
def _select_rows_by_tags(df, enter_reason_list, exit_reason_list):
|
||||
if enter_reason_list and "all" not in enter_reason_list:
|
||||
df = df.loc[(df['enter_reason'].isin(enter_reason_list))]
|
||||
df = df.loc[(df["enter_reason"].isin(enter_reason_list))]
|
||||
|
||||
if exit_reason_list and "all" not in exit_reason_list:
|
||||
df = df.loc[(df['exit_reason'].isin(exit_reason_list))]
|
||||
df = df.loc[(df["exit_reason"].isin(exit_reason_list))]
|
||||
return df
|
||||
|
||||
|
||||
def prepare_results(analysed_trades, stratname,
|
||||
enter_reason_list, exit_reason_list,
|
||||
timerange=None):
|
||||
def prepare_results(
|
||||
analysed_trades, stratname, enter_reason_list, exit_reason_list, timerange=None
|
||||
):
|
||||
res_df = pd.DataFrame()
|
||||
for pair, trades in analysed_trades[stratname].items():
|
||||
if (trades.shape[0] > 0):
|
||||
trades.dropna(subset=['close_date'], inplace=True)
|
||||
if trades.shape[0] > 0:
|
||||
trades.dropna(subset=["close_date"], inplace=True)
|
||||
res_df = pd.concat([res_df, trades], ignore_index=True)
|
||||
|
||||
res_df = _select_rows_within_dates(res_df, timerange)
|
||||
|
||||
if res_df is not None and res_df.shape[0] > 0 and ('enter_reason' in res_df.columns):
|
||||
if res_df is not None and res_df.shape[0] > 0 and ("enter_reason" in res_df.columns):
|
||||
res_df = _select_rows_by_tags(res_df, enter_reason_list, exit_reason_list)
|
||||
|
||||
return res_df
|
||||
|
||||
|
||||
def print_results(res_df: pd.DataFrame, analysis_groups: List[str], indicator_list: List[str],
|
||||
csv_path: Path, rejected_signals=None, to_csv=False):
|
||||
def print_results(
|
||||
res_df: pd.DataFrame,
|
||||
analysis_groups: List[str],
|
||||
indicator_list: List[str],
|
||||
csv_path: Path,
|
||||
rejected_signals=None,
|
||||
to_csv=False,
|
||||
):
|
||||
if res_df.shape[0] > 0:
|
||||
if analysis_groups:
|
||||
_do_group_table_output(res_df, analysis_groups, to_csv=to_csv, csv_path=csv_path)
|
||||
@@ -237,30 +270,31 @@ def print_results(res_df: pd.DataFrame, analysis_groups: List[str], indicator_li
|
||||
|
||||
# NB this can be large for big dataframes!
|
||||
if "all" in indicator_list:
|
||||
_print_table(res_df,
|
||||
show_index=False,
|
||||
name="Indicators:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path)
|
||||
_print_table(
|
||||
res_df, show_index=False, name="Indicators:", to_csv=to_csv, csv_path=csv_path
|
||||
)
|
||||
elif indicator_list is not None and indicator_list:
|
||||
available_inds = []
|
||||
for ind in indicator_list:
|
||||
if ind in res_df:
|
||||
available_inds.append(ind)
|
||||
ilist = ["pair", "enter_reason", "exit_reason"] + available_inds
|
||||
_print_table(res_df[ilist],
|
||||
sortcols=['exit_reason'],
|
||||
show_index=False,
|
||||
name="Indicators:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path)
|
||||
_print_table(
|
||||
res_df[ilist],
|
||||
sortcols=["exit_reason"],
|
||||
show_index=False,
|
||||
name="Indicators:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path,
|
||||
)
|
||||
else:
|
||||
print("\\No trades to show")
|
||||
|
||||
|
||||
def _print_table(df: pd.DataFrame, sortcols=None, *, show_index=False, name=None,
|
||||
to_csv=False, csv_path: Path):
|
||||
if (sortcols is not None):
|
||||
def _print_table(
|
||||
df: pd.DataFrame, sortcols=None, *, show_index=False, name=None, to_csv=False, csv_path: Path
|
||||
):
|
||||
if sortcols is not None:
|
||||
data = df.sort_values(sortcols)
|
||||
else:
|
||||
data = df
|
||||
@@ -273,60 +307,64 @@ def _print_table(df: pd.DataFrame, sortcols=None, *, show_index=False, name=None
|
||||
if name is not None:
|
||||
print(name)
|
||||
|
||||
print(
|
||||
tabulate(
|
||||
data,
|
||||
headers='keys',
|
||||
tablefmt='psql',
|
||||
showindex=show_index
|
||||
)
|
||||
)
|
||||
print(tabulate(data, headers="keys", tablefmt="psql", showindex=show_index))
|
||||
|
||||
|
||||
def process_entry_exit_reasons(config: Config):
|
||||
try:
|
||||
analysis_groups = config.get('analysis_groups', [])
|
||||
enter_reason_list = config.get('enter_reason_list', ["all"])
|
||||
exit_reason_list = config.get('exit_reason_list', ["all"])
|
||||
indicator_list = config.get('indicator_list', [])
|
||||
do_rejected = config.get('analysis_rejected', False)
|
||||
to_csv = config.get('analysis_to_csv', False)
|
||||
csv_path = Path(config.get('analysis_csv_path', config['exportfilename']))
|
||||
analysis_groups = config.get("analysis_groups", [])
|
||||
enter_reason_list = config.get("enter_reason_list", ["all"])
|
||||
exit_reason_list = config.get("exit_reason_list", ["all"])
|
||||
indicator_list = config.get("indicator_list", [])
|
||||
do_rejected = config.get("analysis_rejected", False)
|
||||
to_csv = config.get("analysis_to_csv", False)
|
||||
csv_path = Path(config.get("analysis_csv_path", config["exportfilename"]))
|
||||
if to_csv and not csv_path.is_dir():
|
||||
raise OperationalException(f"Specified directory {csv_path} does not exist.")
|
||||
|
||||
timerange = TimeRange.parse_timerange(None if config.get(
|
||||
'timerange') is None else str(config.get('timerange')))
|
||||
timerange = TimeRange.parse_timerange(
|
||||
None if config.get("timerange") is None else str(config.get("timerange"))
|
||||
)
|
||||
|
||||
backtest_stats = load_backtest_stats(config['exportfilename'])
|
||||
backtest_stats = load_backtest_stats(config["exportfilename"])
|
||||
|
||||
for strategy_name, results in backtest_stats['strategy'].items():
|
||||
trades = load_backtest_data(config['exportfilename'], strategy_name)
|
||||
for strategy_name, results in backtest_stats["strategy"].items():
|
||||
trades = load_backtest_data(config["exportfilename"], strategy_name)
|
||||
|
||||
if trades is not None and not trades.empty:
|
||||
signal_candles = _load_signal_candles(config['exportfilename'])
|
||||
signal_candles = _load_signal_candles(config["exportfilename"])
|
||||
|
||||
rej_df = None
|
||||
if do_rejected:
|
||||
rejected_signals_dict = _load_rejected_signals(config['exportfilename'])
|
||||
rej_df = prepare_results(rejected_signals_dict, strategy_name,
|
||||
enter_reason_list, exit_reason_list,
|
||||
timerange=timerange)
|
||||
rejected_signals_dict = _load_rejected_signals(config["exportfilename"])
|
||||
rej_df = prepare_results(
|
||||
rejected_signals_dict,
|
||||
strategy_name,
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
timerange=timerange,
|
||||
)
|
||||
|
||||
analysed_trades_dict = _process_candles_and_indicators(
|
||||
config['exchange']['pair_whitelist'], strategy_name,
|
||||
trades, signal_candles)
|
||||
config["exchange"]["pair_whitelist"], strategy_name, trades, signal_candles
|
||||
)
|
||||
|
||||
res_df = prepare_results(analysed_trades_dict, strategy_name,
|
||||
enter_reason_list, exit_reason_list,
|
||||
timerange=timerange)
|
||||
res_df = prepare_results(
|
||||
analysed_trades_dict,
|
||||
strategy_name,
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
timerange=timerange,
|
||||
)
|
||||
|
||||
print_results(res_df,
|
||||
analysis_groups,
|
||||
indicator_list,
|
||||
rejected_signals=rej_df,
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path)
|
||||
print_results(
|
||||
res_df,
|
||||
analysis_groups,
|
||||
indicator_list,
|
||||
rejected_signals=rej_df,
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path,
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@@ -5,8 +5,17 @@ Includes:
|
||||
* load data for a pair (or a list of pairs) from disk
|
||||
* download data from exchange and store to disk
|
||||
"""
|
||||
|
||||
# flake8: noqa: F401
|
||||
from .datahandlers import get_datahandler
|
||||
from .history_utils import (convert_trades_to_ohlcv, download_data_main, get_timerange, load_data,
|
||||
load_pair_history, refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data, refresh_data, validate_backtest_data)
|
||||
from .history_utils import (
|
||||
convert_trades_to_ohlcv,
|
||||
download_data_main,
|
||||
get_timerange,
|
||||
load_data,
|
||||
load_pair_history,
|
||||
refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data,
|
||||
refresh_data,
|
||||
validate_backtest_data,
|
||||
)
|
||||
|
||||
@@ -14,11 +14,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FeatherDataHandler(IDataHandler):
|
||||
|
||||
_columns = DEFAULT_DATAFRAME_COLUMNS
|
||||
|
||||
def ohlcv_store(
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Store data in json format "values".
|
||||
format looks as follows:
|
||||
@@ -33,11 +33,12 @@ class FeatherDataHandler(IDataHandler):
|
||||
self.create_dir_if_needed(filename)
|
||||
|
||||
data.reset_index(drop=True).loc[:, self._columns].to_feather(
|
||||
filename, compression_level=9, compression='lz4')
|
||||
filename, compression_level=9, compression="lz4"
|
||||
)
|
||||
|
||||
def _ohlcv_load(self, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
Implements the loading and conversion to a Pandas dataframe.
|
||||
@@ -50,28 +51,31 @@ class FeatherDataHandler(IDataHandler):
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
if not filename.exists():
|
||||
# Fallback mode for 1M files
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True
|
||||
)
|
||||
if not filename.exists():
|
||||
return DataFrame(columns=self._columns)
|
||||
|
||||
pairdata = read_feather(filename)
|
||||
pairdata.columns = self._columns
|
||||
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
|
||||
pairdata = pairdata.astype(
|
||||
dtype={
|
||||
"open": "float",
|
||||
"high": "float",
|
||||
"low": "float",
|
||||
"close": "float",
|
||||
"volume": "float",
|
||||
}
|
||||
)
|
||||
pairdata["date"] = to_datetime(pairdata["date"], unit="ms", utc=True)
|
||||
return pairdata
|
||||
|
||||
def ohlcv_append(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
data: DataFrame,
|
||||
candle_type: CandleType
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Append data to existing data structures
|
||||
@@ -92,7 +96,7 @@ class FeatherDataHandler(IDataHandler):
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
self.create_dir_if_needed(filename)
|
||||
data.reset_index(drop=True).to_feather(filename, compression_level=9, compression='lz4')
|
||||
data.reset_index(drop=True).to_feather(filename, compression_level=9, compression="lz4")
|
||||
|
||||
def trades_append(self, pair: str, data: DataFrame):
|
||||
"""
|
||||
@@ -104,7 +108,7 @@ class FeatherDataHandler(IDataHandler):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
|
||||
@@ -15,11 +15,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HDF5DataHandler(IDataHandler):
|
||||
|
||||
_columns = DEFAULT_DATAFRAME_COLUMNS
|
||||
|
||||
def ohlcv_store(
|
||||
self, pair: str, timeframe: str, data: pd.DataFrame, candle_type: CandleType) -> None:
|
||||
self, pair: str, timeframe: str, data: pd.DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Store data in hdf5 file.
|
||||
:param pair: Pair - used to generate filename
|
||||
@@ -35,13 +35,18 @@ class HDF5DataHandler(IDataHandler):
|
||||
self.create_dir_if_needed(filename)
|
||||
|
||||
_data.loc[:, self._columns].to_hdf(
|
||||
filename, key=key, mode='a', complevel=9, complib='blosc',
|
||||
format='table', data_columns=['date']
|
||||
filename,
|
||||
key=key,
|
||||
mode="a",
|
||||
complevel=9,
|
||||
complib="blosc",
|
||||
format="table",
|
||||
data_columns=["date"],
|
||||
)
|
||||
|
||||
def _ohlcv_load(self, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> pd.DataFrame:
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
Implements the loading and conversion to a Pandas dataframe.
|
||||
@@ -55,41 +60,40 @@ class HDF5DataHandler(IDataHandler):
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
key = self._pair_ohlcv_key(pair, timeframe)
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir,
|
||||
pair,
|
||||
timeframe,
|
||||
candle_type=candle_type
|
||||
)
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
|
||||
if not filename.exists():
|
||||
# Fallback mode for 1M files
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True
|
||||
)
|
||||
if not filename.exists():
|
||||
return pd.DataFrame(columns=self._columns)
|
||||
where = []
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
if timerange.starttype == "date":
|
||||
where.append(f"date >= Timestamp({timerange.startts * 1e9})")
|
||||
if timerange.stoptype == 'date':
|
||||
if timerange.stoptype == "date":
|
||||
where.append(f"date <= Timestamp({timerange.stopts * 1e9})")
|
||||
|
||||
pairdata = pd.read_hdf(filename, key=key, mode="r", where=where)
|
||||
|
||||
if list(pairdata.columns) != self._columns:
|
||||
raise ValueError("Wrong dataframe format")
|
||||
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||
pairdata = pairdata.astype(
|
||||
dtype={
|
||||
"open": "float",
|
||||
"high": "float",
|
||||
"low": "float",
|
||||
"close": "float",
|
||||
"volume": "float",
|
||||
}
|
||||
)
|
||||
pairdata = pairdata.reset_index(drop=True)
|
||||
return pairdata
|
||||
|
||||
def ohlcv_append(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
data: pd.DataFrame,
|
||||
candle_type: CandleType
|
||||
self, pair: str, timeframe: str, data: pd.DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Append data to existing data structures
|
||||
@@ -111,9 +115,13 @@ class HDF5DataHandler(IDataHandler):
|
||||
key = self._pair_trades_key(pair)
|
||||
|
||||
data.to_hdf(
|
||||
self._pair_trades_filename(self._datadir, pair, trading_mode), key=key,
|
||||
mode='a', complevel=9, complib='blosc',
|
||||
format='table', data_columns=['timestamp']
|
||||
self._pair_trades_filename(self._datadir, pair, trading_mode),
|
||||
key=key,
|
||||
mode="a",
|
||||
complevel=9,
|
||||
complib="blosc",
|
||||
format="table",
|
||||
data_columns=["timestamp"],
|
||||
)
|
||||
|
||||
def trades_append(self, pair: str, data: pd.DataFrame):
|
||||
@@ -142,13 +150,13 @@ class HDF5DataHandler(IDataHandler):
|
||||
return pd.DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
||||
where = []
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
if timerange.starttype == "date":
|
||||
where.append(f"timestamp >= {timerange.startts * 1e3}")
|
||||
if timerange.stoptype == 'date':
|
||||
if timerange.stoptype == "date":
|
||||
where.append(f"timestamp < {timerange.stopts * 1e3}")
|
||||
|
||||
trades: pd.DataFrame = pd.read_hdf(filename, key=key, mode="r", where=where)
|
||||
trades[['id', 'type']] = trades[['id', 'type']].replace({np.nan: None})
|
||||
trades[["id", "type"]] = trades[["id", "type"]].replace({np.nan: None})
|
||||
return trades
|
||||
|
||||
@classmethod
|
||||
@@ -158,7 +166,7 @@ class HDF5DataHandler(IDataHandler):
|
||||
@classmethod
|
||||
def _pair_ohlcv_key(cls, pair: str, timeframe: str) -> str:
|
||||
# Escape futures pairs to avoid warnings
|
||||
pair_esc = pair.replace(':', '_')
|
||||
pair_esc = pair.replace(":", "_")
|
||||
return f"{pair_esc}/ohlcv/tf_{timeframe}"
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -3,6 +3,7 @@ Abstract datahandler interface.
|
||||
It's subclasses handle and storing data from disk.
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
@@ -16,8 +17,12 @@ from pandas import DataFrame
|
||||
from freqtrade import misc
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DEFAULT_TRADES_COLUMNS, ListPairsWithTimeframes
|
||||
from freqtrade.data.converter import (clean_ohlcv_dataframe, trades_convert_types,
|
||||
trades_df_remove_duplicates, trim_dataframe)
|
||||
from freqtrade.data.converter import (
|
||||
clean_ohlcv_dataframe,
|
||||
trades_convert_types,
|
||||
trades_df_remove_duplicates,
|
||||
trim_dataframe,
|
||||
)
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
from freqtrade.exchange import timeframe_to_seconds
|
||||
|
||||
@@ -26,8 +31,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IDataHandler(ABC):
|
||||
|
||||
_OHLCV_REGEX = r'^([a-zA-Z_\d-]+)\-(\d+[a-zA-Z]{1,2})\-?([a-zA-Z_]*)?(?=\.)'
|
||||
_OHLCV_REGEX = r"^([a-zA-Z_\d-]+)\-(\d+[a-zA-Z]{1,2})\-?([a-zA-Z_]*)?(?=\.)"
|
||||
|
||||
def __init__(self, datadir: Path) -> None:
|
||||
self._datadir = datadir
|
||||
@@ -41,7 +45,8 @@ class IDataHandler(ABC):
|
||||
|
||||
@classmethod
|
||||
def ohlcv_get_available_data(
|
||||
cls, datadir: Path, trading_mode: TradingMode) -> ListPairsWithTimeframes:
|
||||
cls, datadir: Path, trading_mode: TradingMode
|
||||
) -> ListPairsWithTimeframes:
|
||||
"""
|
||||
Returns a list of all pairs with ohlcv data available in this datadir
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
@@ -49,17 +54,20 @@ class IDataHandler(ABC):
|
||||
:return: List of Tuples of (pair, timeframe, CandleType)
|
||||
"""
|
||||
if trading_mode == TradingMode.FUTURES:
|
||||
datadir = datadir.joinpath('futures')
|
||||
datadir = datadir.joinpath("futures")
|
||||
_tmp = [
|
||||
re.search(
|
||||
cls._OHLCV_REGEX, p.name
|
||||
) for p in datadir.glob(f"*.{cls._get_file_extension()}")]
|
||||
re.search(cls._OHLCV_REGEX, p.name)
|
||||
for p in datadir.glob(f"*.{cls._get_file_extension()}")
|
||||
]
|
||||
return [
|
||||
(
|
||||
cls.rebuild_pair_from_filename(match[1]),
|
||||
cls.rebuild_timeframe_from_filename(match[2]),
|
||||
CandleType.from_string(match[3])
|
||||
) for match in _tmp if match and len(match.groups()) > 1]
|
||||
CandleType.from_string(match[3]),
|
||||
)
|
||||
for match in _tmp
|
||||
if match and len(match.groups()) > 1
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]:
|
||||
@@ -73,17 +81,20 @@ class IDataHandler(ABC):
|
||||
"""
|
||||
candle = ""
|
||||
if candle_type != CandleType.SPOT:
|
||||
datadir = datadir.joinpath('futures')
|
||||
datadir = datadir.joinpath("futures")
|
||||
candle = f"-{candle_type}"
|
||||
ext = cls._get_file_extension()
|
||||
_tmp = [re.search(r'^(\S+)(?=\-' + timeframe + candle + f'.{ext})', p.name)
|
||||
for p in datadir.glob(f"*{timeframe}{candle}.{ext}")]
|
||||
_tmp = [
|
||||
re.search(r"^(\S+)(?=\-" + timeframe + candle + f".{ext})", p.name)
|
||||
for p in datadir.glob(f"*{timeframe}{candle}.{ext}")
|
||||
]
|
||||
# Check if regex found something and only return these results
|
||||
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
|
||||
|
||||
@abstractmethod
|
||||
def ohlcv_store(
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Store ohlcv data.
|
||||
:param pair: Pair - used to generate filename
|
||||
@@ -93,8 +104,9 @@ class IDataHandler(ABC):
|
||||
:return: None
|
||||
"""
|
||||
|
||||
def ohlcv_data_min_max(self, pair: str, timeframe: str,
|
||||
candle_type: CandleType) -> Tuple[datetime, datetime, int]:
|
||||
def ohlcv_data_min_max(
|
||||
self, pair: str, timeframe: str, candle_type: CandleType
|
||||
) -> Tuple[datetime, datetime, int]:
|
||||
"""
|
||||
Returns the min and max timestamp for the given pair and timeframe.
|
||||
:param pair: Pair to get min/max for
|
||||
@@ -109,12 +121,12 @@ class IDataHandler(ABC):
|
||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
||||
0,
|
||||
)
|
||||
return df.iloc[0]['date'].to_pydatetime(), df.iloc[-1]['date'].to_pydatetime(), len(df)
|
||||
return df.iloc[0]["date"].to_pydatetime(), df.iloc[-1]["date"].to_pydatetime(), len(df)
|
||||
|
||||
@abstractmethod
|
||||
def _ohlcv_load(self, pair: str, timeframe: str, timerange: Optional[TimeRange],
|
||||
candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
Implements the loading and conversion to a Pandas dataframe.
|
||||
@@ -144,11 +156,7 @@ class IDataHandler(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def ohlcv_append(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
data: DataFrame,
|
||||
candle_type: CandleType
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Append data to existing data structures
|
||||
@@ -166,8 +174,10 @@ class IDataHandler(ABC):
|
||||
:return: List of Pairs
|
||||
"""
|
||||
_ext = cls._get_file_extension()
|
||||
_tmp = [re.search(r'^(\S+)(?=\-trades.' + _ext + ')', p.name)
|
||||
for p in datadir.glob(f"*trades.{_ext}")]
|
||||
_tmp = [
|
||||
re.search(r"^(\S+)(?=\-trades." + _ext + ")", p.name)
|
||||
for p in datadir.glob(f"*trades.{_ext}")
|
||||
]
|
||||
# Check if regex found something and only return these results to avoid exceptions.
|
||||
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
|
||||
|
||||
@@ -227,7 +237,7 @@ class IDataHandler(ABC):
|
||||
return False
|
||||
|
||||
def trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
@@ -260,7 +270,7 @@ class IDataHandler(ABC):
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
no_timeframe_modify: bool = False
|
||||
no_timeframe_modify: bool = False,
|
||||
) -> Path:
|
||||
pair_s = misc.pair_to_filename(pair)
|
||||
candle = ""
|
||||
@@ -268,10 +278,9 @@ class IDataHandler(ABC):
|
||||
timeframe = cls.timeframe_to_file(timeframe)
|
||||
|
||||
if candle_type != CandleType.SPOT:
|
||||
datadir = datadir.joinpath('futures')
|
||||
datadir = datadir.joinpath("futures")
|
||||
candle = f"-{candle_type}"
|
||||
filename = datadir.joinpath(
|
||||
f'{pair_s}-{timeframe}{candle}.{cls._get_file_extension()}')
|
||||
filename = datadir.joinpath(f"{pair_s}-{timeframe}{candle}.{cls._get_file_extension()}")
|
||||
return filename
|
||||
|
||||
@classmethod
|
||||
@@ -279,14 +288,14 @@ class IDataHandler(ABC):
|
||||
pair_s = misc.pair_to_filename(pair)
|
||||
if trading_mode == TradingMode.FUTURES:
|
||||
# Futures pair ...
|
||||
datadir = datadir.joinpath('futures')
|
||||
datadir = datadir.joinpath("futures")
|
||||
|
||||
filename = datadir.joinpath(f'{pair_s}-trades.{cls._get_file_extension()}')
|
||||
filename = datadir.joinpath(f"{pair_s}-trades.{cls._get_file_extension()}")
|
||||
return filename
|
||||
|
||||
@staticmethod
|
||||
def timeframe_to_file(timeframe: str):
|
||||
return timeframe.replace('M', 'Mo')
|
||||
return timeframe.replace("M", "Mo")
|
||||
|
||||
@staticmethod
|
||||
def rebuild_timeframe_from_filename(timeframe: str) -> str:
|
||||
@@ -294,7 +303,7 @@ class IDataHandler(ABC):
|
||||
converts timeframe from disk to file
|
||||
Replaces mo with M (to avoid problems on case-insensitive filesystems)
|
||||
"""
|
||||
return re.sub('1mo', '1M', timeframe, flags=re.IGNORECASE)
|
||||
return re.sub("1mo", "1M", timeframe, flags=re.IGNORECASE)
|
||||
|
||||
@staticmethod
|
||||
def rebuild_pair_from_filename(pair: str) -> str:
|
||||
@@ -302,18 +311,22 @@ class IDataHandler(ABC):
|
||||
Rebuild pair name from filename
|
||||
Assumes a asset name of max. 7 length to also support BTC-PERP and BTC-PERP:USD names.
|
||||
"""
|
||||
res = re.sub(r'^(([A-Za-z\d]{1,10})|^([A-Za-z\-]{1,6}))(_)', r'\g<1>/', pair, count=1)
|
||||
res = re.sub('_', ':', res, count=1)
|
||||
res = re.sub(r"^(([A-Za-z\d]{1,10})|^([A-Za-z\-]{1,6}))(_)", r"\g<1>/", pair, count=1)
|
||||
res = re.sub("_", ":", res, count=1)
|
||||
return res
|
||||
|
||||
def ohlcv_load(self, pair, timeframe: str,
|
||||
candle_type: CandleType, *,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_missing: bool = True,
|
||||
drop_incomplete: bool = False,
|
||||
startup_candles: int = 0,
|
||||
warn_no_data: bool = True,
|
||||
) -> DataFrame:
|
||||
def ohlcv_load(
|
||||
self,
|
||||
pair,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
*,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_missing: bool = True,
|
||||
drop_incomplete: bool = False,
|
||||
startup_candles: int = 0,
|
||||
warn_no_data: bool = True,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load cached candle (OHLCV) data for the given pair.
|
||||
|
||||
@@ -333,15 +346,12 @@ class IDataHandler(ABC):
|
||||
timerange_startup.subtract_start(timeframe_to_seconds(timeframe) * startup_candles)
|
||||
|
||||
pairdf = self._ohlcv_load(
|
||||
pair,
|
||||
timeframe,
|
||||
timerange=timerange_startup,
|
||||
candle_type=candle_type
|
||||
pair, timeframe, timerange=timerange_startup, candle_type=candle_type
|
||||
)
|
||||
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data):
|
||||
return pairdf
|
||||
else:
|
||||
enddate = pairdf.iloc[-1]['date']
|
||||
enddate = pairdf.iloc[-1]["date"]
|
||||
|
||||
if timerange_startup:
|
||||
self._validate_pairdata(pair, pairdf, timeframe, candle_type, timerange_startup)
|
||||
@@ -350,17 +360,25 @@ class IDataHandler(ABC):
|
||||
return pairdf
|
||||
|
||||
# incomplete candles should only be dropped if we didn't trim the end beforehand.
|
||||
pairdf = clean_ohlcv_dataframe(pairdf, timeframe,
|
||||
pair=pair,
|
||||
fill_missing=fill_missing,
|
||||
drop_incomplete=(drop_incomplete and
|
||||
enddate == pairdf.iloc[-1]['date']))
|
||||
pairdf = clean_ohlcv_dataframe(
|
||||
pairdf,
|
||||
timeframe,
|
||||
pair=pair,
|
||||
fill_missing=fill_missing,
|
||||
drop_incomplete=(drop_incomplete and enddate == pairdf.iloc[-1]["date"]),
|
||||
)
|
||||
self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data)
|
||||
return pairdf
|
||||
|
||||
def _check_empty_df(
|
||||
self, pairdf: DataFrame, pair: str, timeframe: str, candle_type: CandleType,
|
||||
warn_no_data: bool, warn_price: bool = False) -> bool:
|
||||
self,
|
||||
pairdf: DataFrame,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
warn_no_data: bool,
|
||||
warn_price: bool = False,
|
||||
) -> bool:
|
||||
"""
|
||||
Warn on empty dataframe
|
||||
"""
|
||||
@@ -373,39 +391,55 @@ class IDataHandler(ABC):
|
||||
return True
|
||||
elif warn_price:
|
||||
candle_price_gap = 0
|
||||
if (candle_type in (CandleType.SPOT, CandleType.FUTURES) and
|
||||
not pairdf.empty
|
||||
and 'close' in pairdf.columns and 'open' in pairdf.columns):
|
||||
if (
|
||||
candle_type in (CandleType.SPOT, CandleType.FUTURES)
|
||||
and not pairdf.empty
|
||||
and "close" in pairdf.columns
|
||||
and "open" in pairdf.columns
|
||||
):
|
||||
# Detect gaps between prior close and open
|
||||
gaps = ((pairdf['open'] - pairdf['close'].shift(1)) / pairdf['close'].shift(1))
|
||||
gaps = (pairdf["open"] - pairdf["close"].shift(1)) / pairdf["close"].shift(1)
|
||||
gaps = gaps.dropna()
|
||||
if len(gaps):
|
||||
candle_price_gap = max(abs(gaps))
|
||||
if candle_price_gap > 0.1:
|
||||
logger.info(f"Price jump in {pair}, {timeframe}, {candle_type} between two candles "
|
||||
f"of {candle_price_gap:.2%} detected.")
|
||||
logger.info(
|
||||
f"Price jump in {pair}, {timeframe}, {candle_type} between two candles "
|
||||
f"of {candle_price_gap:.2%} detected."
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
def _validate_pairdata(self, pair, pairdata: DataFrame, timeframe: str,
|
||||
candle_type: CandleType, timerange: TimeRange):
|
||||
def _validate_pairdata(
|
||||
self,
|
||||
pair,
|
||||
pairdata: DataFrame,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
timerange: TimeRange,
|
||||
):
|
||||
"""
|
||||
Validates pairdata for missing data at start end end and logs warnings.
|
||||
:param pairdata: Dataframe to validate
|
||||
:param timerange: Timerange specified for start and end dates
|
||||
"""
|
||||
|
||||
if timerange.starttype == 'date':
|
||||
if pairdata.iloc[0]['date'] > timerange.startdt:
|
||||
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
||||
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}")
|
||||
if timerange.stoptype == 'date':
|
||||
if pairdata.iloc[-1]['date'] < timerange.stopdt:
|
||||
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
||||
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")
|
||||
if timerange.starttype == "date":
|
||||
if pairdata.iloc[0]["date"] > timerange.startdt:
|
||||
logger.warning(
|
||||
f"{pair}, {candle_type}, {timeframe}, "
|
||||
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}"
|
||||
)
|
||||
if timerange.stoptype == "date":
|
||||
if pairdata.iloc[-1]["date"] < timerange.stopdt:
|
||||
logger.warning(
|
||||
f"{pair}, {candle_type}, {timeframe}, "
|
||||
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}"
|
||||
)
|
||||
|
||||
def rename_futures_data(
|
||||
self, pair: str, new_pair: str, timeframe: str, candle_type: CandleType):
|
||||
self, pair: str, new_pair: str, timeframe: str, candle_type: CandleType
|
||||
):
|
||||
"""
|
||||
Temporary method to migrate data from old naming to new naming (BTC/USDT -> BTC/USDT:USDT)
|
||||
Only used for binance to support the binance futures naming unification.
|
||||
@@ -431,18 +465,19 @@ class IDataHandler(ABC):
|
||||
|
||||
if funding_rate_combs:
|
||||
logger.warning(
|
||||
f'Migrating {len(funding_rate_combs)} funding fees to correct timeframe.')
|
||||
f"Migrating {len(funding_rate_combs)} funding fees to correct timeframe."
|
||||
)
|
||||
|
||||
for pair, timeframe, candletype in funding_rate_combs:
|
||||
old_name = self._pair_data_filename(self._datadir, pair, timeframe, candletype)
|
||||
new_name = self._pair_data_filename(self._datadir, pair, ff_timeframe, candletype)
|
||||
|
||||
if not Path(old_name).exists():
|
||||
logger.warning(f'{old_name} does not exist, skipping.')
|
||||
logger.warning(f"{old_name} does not exist, skipping.")
|
||||
continue
|
||||
|
||||
if Path(new_name).exists():
|
||||
logger.warning(f'{new_name} already exists, Removing.')
|
||||
logger.warning(f"{new_name} already exists, Removing.")
|
||||
Path(new_name).unlink()
|
||||
|
||||
Path(old_name).rename(new_name)
|
||||
@@ -457,27 +492,33 @@ def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
|
||||
:return: Datahandler class
|
||||
"""
|
||||
|
||||
if datatype == 'json':
|
||||
if datatype == "json":
|
||||
from .jsondatahandler import JsonDataHandler
|
||||
|
||||
return JsonDataHandler
|
||||
elif datatype == 'jsongz':
|
||||
elif datatype == "jsongz":
|
||||
from .jsondatahandler import JsonGzDataHandler
|
||||
|
||||
return JsonGzDataHandler
|
||||
elif datatype == 'hdf5':
|
||||
elif datatype == "hdf5":
|
||||
from .hdf5datahandler import HDF5DataHandler
|
||||
|
||||
return HDF5DataHandler
|
||||
elif datatype == 'feather':
|
||||
elif datatype == "feather":
|
||||
from .featherdatahandler import FeatherDataHandler
|
||||
|
||||
return FeatherDataHandler
|
||||
elif datatype == 'parquet':
|
||||
elif datatype == "parquet":
|
||||
from .parquetdatahandler import ParquetDataHandler
|
||||
|
||||
return ParquetDataHandler
|
||||
else:
|
||||
raise ValueError(f"No datahandler for datatype {datatype} available.")
|
||||
|
||||
|
||||
def get_datahandler(datadir: Path, data_format: Optional[str] = None,
|
||||
data_handler: Optional[IDataHandler] = None) -> IDataHandler:
|
||||
def get_datahandler(
|
||||
datadir: Path, data_format: Optional[str] = None, data_handler: Optional[IDataHandler] = None
|
||||
) -> IDataHandler:
|
||||
"""
|
||||
:param datadir: Folder to save data
|
||||
:param data_format: dataformat to use
|
||||
@@ -485,6 +526,6 @@ def get_datahandler(datadir: Path, data_format: Optional[str] = None,
|
||||
"""
|
||||
|
||||
if not data_handler:
|
||||
HandlerClass = get_datahandlerclass(data_format or 'feather')
|
||||
HandlerClass = get_datahandlerclass(data_format or "feather")
|
||||
data_handler = HandlerClass(datadir)
|
||||
return data_handler
|
||||
|
||||
@@ -17,12 +17,12 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JsonDataHandler(IDataHandler):
|
||||
|
||||
_use_zip = False
|
||||
_columns = DEFAULT_DATAFRAME_COLUMNS
|
||||
|
||||
def ohlcv_store(
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Store data in json format "values".
|
||||
format looks as follows:
|
||||
@@ -37,16 +37,16 @@ class JsonDataHandler(IDataHandler):
|
||||
self.create_dir_if_needed(filename)
|
||||
_data = data.copy()
|
||||
# Convert date to int
|
||||
_data['date'] = _data['date'].astype(np.int64) // 1000 // 1000
|
||||
_data["date"] = _data["date"].astype(np.int64) // 1000 // 1000
|
||||
|
||||
# Reset index, select only appropriate columns and save as json
|
||||
_data.reset_index(drop=True).loc[:, self._columns].to_json(
|
||||
filename, orient="values",
|
||||
compression='gzip' if self._use_zip else None)
|
||||
filename, orient="values", compression="gzip" if self._use_zip else None
|
||||
)
|
||||
|
||||
def _ohlcv_load(self, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
Implements the loading and conversion to a Pandas dataframe.
|
||||
@@ -59,31 +59,34 @@ class JsonDataHandler(IDataHandler):
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
if not filename.exists():
|
||||
# Fallback mode for 1M files
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True
|
||||
)
|
||||
if not filename.exists():
|
||||
return DataFrame(columns=self._columns)
|
||||
try:
|
||||
pairdata = read_json(filename, orient='values')
|
||||
pairdata = read_json(filename, orient="values")
|
||||
pairdata.columns = self._columns
|
||||
except ValueError:
|
||||
logger.error(f"Could not load data for {pair}.")
|
||||
return DataFrame(columns=self._columns)
|
||||
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
|
||||
pairdata = pairdata.astype(
|
||||
dtype={
|
||||
"open": "float",
|
||||
"high": "float",
|
||||
"low": "float",
|
||||
"close": "float",
|
||||
"volume": "float",
|
||||
}
|
||||
)
|
||||
pairdata["date"] = to_datetime(pairdata["date"], unit="ms", utc=True)
|
||||
return pairdata
|
||||
|
||||
def ohlcv_append(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
data: DataFrame,
|
||||
candle_type: CandleType
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Append data to existing data structures
|
||||
@@ -145,5 +148,4 @@ class JsonDataHandler(IDataHandler):
|
||||
|
||||
|
||||
class JsonGzDataHandler(JsonDataHandler):
|
||||
|
||||
_use_zip = True
|
||||
|
||||
@@ -14,11 +14,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ParquetDataHandler(IDataHandler):
|
||||
|
||||
_columns = DEFAULT_DATAFRAME_COLUMNS
|
||||
|
||||
def ohlcv_store(
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Store data in json format "values".
|
||||
format looks as follows:
|
||||
@@ -34,9 +34,9 @@ class ParquetDataHandler(IDataHandler):
|
||||
|
||||
data.reset_index(drop=True).loc[:, self._columns].to_parquet(filename)
|
||||
|
||||
def _ohlcv_load(self, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
Implements the loading and conversion to a Pandas dataframe.
|
||||
@@ -49,28 +49,31 @@ class ParquetDataHandler(IDataHandler):
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
if not filename.exists():
|
||||
# Fallback mode for 1M files
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True
|
||||
)
|
||||
if not filename.exists():
|
||||
return DataFrame(columns=self._columns)
|
||||
|
||||
pairdata = read_parquet(filename)
|
||||
pairdata.columns = self._columns
|
||||
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
|
||||
pairdata = pairdata.astype(
|
||||
dtype={
|
||||
"open": "float",
|
||||
"high": "float",
|
||||
"low": "float",
|
||||
"close": "float",
|
||||
"volume": "float",
|
||||
}
|
||||
)
|
||||
pairdata["date"] = to_datetime(pairdata["date"], unit="ms", utc=True)
|
||||
return pairdata
|
||||
|
||||
def ohlcv_append(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
data: DataFrame,
|
||||
candle_type: CandleType
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Append data to existing data structures
|
||||
|
||||
@@ -7,11 +7,20 @@ from typing import Dict, List, Optional, Tuple
|
||||
from pandas import DataFrame, concat
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import (DATETIME_PRINT_FORMAT, DEFAULT_DATAFRAME_COLUMNS,
|
||||
DL_DATA_TIMEFRAMES, DOCS_LINK, Config)
|
||||
from freqtrade.data.converter import (clean_ohlcv_dataframe, convert_trades_to_ohlcv,
|
||||
ohlcv_to_dataframe, trades_df_remove_duplicates,
|
||||
trades_list_to_df)
|
||||
from freqtrade.constants import (
|
||||
DATETIME_PRINT_FORMAT,
|
||||
DEFAULT_DATAFRAME_COLUMNS,
|
||||
DL_DATA_TIMEFRAMES,
|
||||
DOCS_LINK,
|
||||
Config,
|
||||
)
|
||||
from freqtrade.data.converter import (
|
||||
clean_ohlcv_dataframe,
|
||||
convert_trades_to_ohlcv,
|
||||
ohlcv_to_dataframe,
|
||||
trades_df_remove_duplicates,
|
||||
trades_list_to_df,
|
||||
)
|
||||
from freqtrade.data.history.datahandlers import IDataHandler, get_datahandler
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
@@ -25,17 +34,19 @@ from freqtrade.util.migrations import migrate_data
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def load_pair_history(pair: str,
|
||||
timeframe: str,
|
||||
datadir: Path, *,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_up_missing: bool = True,
|
||||
drop_incomplete: bool = False,
|
||||
startup_candles: int = 0,
|
||||
data_format: Optional[str] = None,
|
||||
data_handler: Optional[IDataHandler] = None,
|
||||
candle_type: CandleType = CandleType.SPOT
|
||||
) -> DataFrame:
|
||||
def load_pair_history(
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
datadir: Path,
|
||||
*,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_up_missing: bool = True,
|
||||
drop_incomplete: bool = False,
|
||||
startup_candles: int = 0,
|
||||
data_format: Optional[str] = None,
|
||||
data_handler: Optional[IDataHandler] = None,
|
||||
candle_type: CandleType = CandleType.SPOT,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load cached ohlcv history for the given pair.
|
||||
|
||||
@@ -54,27 +65,30 @@ def load_pair_history(pair: str,
|
||||
"""
|
||||
data_handler = get_datahandler(datadir, data_format, data_handler)
|
||||
|
||||
return data_handler.ohlcv_load(pair=pair,
|
||||
timeframe=timeframe,
|
||||
timerange=timerange,
|
||||
fill_missing=fill_up_missing,
|
||||
drop_incomplete=drop_incomplete,
|
||||
startup_candles=startup_candles,
|
||||
candle_type=candle_type,
|
||||
)
|
||||
return data_handler.ohlcv_load(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
timerange=timerange,
|
||||
fill_missing=fill_up_missing,
|
||||
drop_incomplete=drop_incomplete,
|
||||
startup_candles=startup_candles,
|
||||
candle_type=candle_type,
|
||||
)
|
||||
|
||||
|
||||
def load_data(datadir: Path,
|
||||
timeframe: str,
|
||||
pairs: List[str], *,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_up_missing: bool = True,
|
||||
startup_candles: int = 0,
|
||||
fail_without_data: bool = False,
|
||||
data_format: str = 'feather',
|
||||
candle_type: CandleType = CandleType.SPOT,
|
||||
user_futures_funding_rate: Optional[int] = None,
|
||||
) -> Dict[str, DataFrame]:
|
||||
def load_data(
|
||||
datadir: Path,
|
||||
timeframe: str,
|
||||
pairs: List[str],
|
||||
*,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_up_missing: bool = True,
|
||||
startup_candles: int = 0,
|
||||
fail_without_data: bool = False,
|
||||
data_format: str = "feather",
|
||||
candle_type: CandleType = CandleType.SPOT,
|
||||
user_futures_funding_rate: Optional[int] = None,
|
||||
) -> Dict[str, DataFrame]:
|
||||
"""
|
||||
Load ohlcv history data for a list of pairs.
|
||||
|
||||
@@ -91,18 +105,21 @@ def load_data(datadir: Path,
|
||||
"""
|
||||
result: Dict[str, DataFrame] = {}
|
||||
if startup_candles > 0 and timerange:
|
||||
logger.info(f'Using indicator startup period: {startup_candles} ...')
|
||||
logger.info(f"Using indicator startup period: {startup_candles} ...")
|
||||
|
||||
data_handler = get_datahandler(datadir, data_format)
|
||||
|
||||
for pair in pairs:
|
||||
hist = load_pair_history(pair=pair, timeframe=timeframe,
|
||||
datadir=datadir, timerange=timerange,
|
||||
fill_up_missing=fill_up_missing,
|
||||
startup_candles=startup_candles,
|
||||
data_handler=data_handler,
|
||||
candle_type=candle_type,
|
||||
)
|
||||
hist = load_pair_history(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
datadir=datadir,
|
||||
timerange=timerange,
|
||||
fill_up_missing=fill_up_missing,
|
||||
startup_candles=startup_candles,
|
||||
data_handler=data_handler,
|
||||
candle_type=candle_type,
|
||||
)
|
||||
if not hist.empty:
|
||||
result[pair] = hist
|
||||
else:
|
||||
@@ -116,14 +133,16 @@ def load_data(datadir: Path,
|
||||
return result
|
||||
|
||||
|
||||
def refresh_data(*, datadir: Path,
|
||||
timeframe: str,
|
||||
pairs: List[str],
|
||||
exchange: Exchange,
|
||||
data_format: Optional[str] = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
candle_type: CandleType,
|
||||
) -> None:
|
||||
def refresh_data(
|
||||
*,
|
||||
datadir: Path,
|
||||
timeframe: str,
|
||||
pairs: List[str],
|
||||
exchange: Exchange,
|
||||
data_format: Optional[str] = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
candle_type: CandleType,
|
||||
) -> None:
|
||||
"""
|
||||
Refresh ohlcv history data for a list of pairs.
|
||||
|
||||
@@ -137,11 +156,17 @@ def refresh_data(*, datadir: Path,
|
||||
"""
|
||||
data_handler = get_datahandler(datadir, data_format)
|
||||
for idx, pair in enumerate(pairs):
|
||||
process = f'{idx}/{len(pairs)}'
|
||||
_download_pair_history(pair=pair, process=process,
|
||||
timeframe=timeframe, datadir=datadir,
|
||||
timerange=timerange, exchange=exchange, data_handler=data_handler,
|
||||
candle_type=candle_type)
|
||||
process = f"{idx}/{len(pairs)}"
|
||||
_download_pair_history(
|
||||
pair=pair,
|
||||
process=process,
|
||||
timeframe=timeframe,
|
||||
datadir=datadir,
|
||||
timerange=timerange,
|
||||
exchange=exchange,
|
||||
data_handler=data_handler,
|
||||
candle_type=candle_type,
|
||||
)
|
||||
|
||||
|
||||
def _load_cached_data_for_updating(
|
||||
@@ -163,42 +188,49 @@ def _load_cached_data_for_updating(
|
||||
start = None
|
||||
end = None
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
if timerange.starttype == "date":
|
||||
start = timerange.startdt
|
||||
if timerange.stoptype == 'date':
|
||||
if timerange.stoptype == "date":
|
||||
end = timerange.stopdt
|
||||
|
||||
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||
data = data_handler.ohlcv_load(pair, timeframe=timeframe,
|
||||
timerange=None, fill_missing=False,
|
||||
drop_incomplete=True, warn_no_data=False,
|
||||
candle_type=candle_type)
|
||||
data = data_handler.ohlcv_load(
|
||||
pair,
|
||||
timeframe=timeframe,
|
||||
timerange=None,
|
||||
fill_missing=False,
|
||||
drop_incomplete=True,
|
||||
warn_no_data=False,
|
||||
candle_type=candle_type,
|
||||
)
|
||||
if not data.empty:
|
||||
if not prepend and start and start < data.iloc[0]['date']:
|
||||
if not prepend and start and start < data.iloc[0]["date"]:
|
||||
# Earlier data than existing data requested, redownload all
|
||||
data = DataFrame(columns=DEFAULT_DATAFRAME_COLUMNS)
|
||||
else:
|
||||
if prepend:
|
||||
end = data.iloc[0]['date']
|
||||
end = data.iloc[0]["date"]
|
||||
else:
|
||||
start = data.iloc[-1]['date']
|
||||
start = data.iloc[-1]["date"]
|
||||
start_ms = int(start.timestamp() * 1000) if start else None
|
||||
end_ms = int(end.timestamp() * 1000) if end else None
|
||||
return data, start_ms, end_ms
|
||||
|
||||
|
||||
def _download_pair_history(pair: str, *,
|
||||
datadir: Path,
|
||||
exchange: Exchange,
|
||||
timeframe: str = '5m',
|
||||
process: str = '',
|
||||
new_pairs_days: int = 30,
|
||||
data_handler: Optional[IDataHandler] = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
candle_type: CandleType,
|
||||
erase: bool = False,
|
||||
prepend: bool = False,
|
||||
) -> bool:
|
||||
def _download_pair_history(
|
||||
pair: str,
|
||||
*,
|
||||
datadir: Path,
|
||||
exchange: Exchange,
|
||||
timeframe: str = "5m",
|
||||
process: str = "",
|
||||
new_pairs_days: int = 30,
|
||||
data_handler: Optional[IDataHandler] = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
candle_type: CandleType,
|
||||
erase: bool = False,
|
||||
prepend: bool = False,
|
||||
) -> bool:
|
||||
"""
|
||||
Download latest candles from the exchange for the pair and timeframe passed in parameters
|
||||
The data is downloaded starting from the last correct data that
|
||||
@@ -217,54 +249,71 @@ def _download_pair_history(pair: str, *,
|
||||
try:
|
||||
if erase:
|
||||
if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
||||
logger.info(f'Deleting existing data for pair {pair}, {timeframe}, {candle_type}.')
|
||||
logger.info(f"Deleting existing data for pair {pair}, {timeframe}, {candle_type}.")
|
||||
|
||||
data, since_ms, until_ms = _load_cached_data_for_updating(
|
||||
pair, timeframe, timerange,
|
||||
pair,
|
||||
timeframe,
|
||||
timerange,
|
||||
data_handler=data_handler,
|
||||
candle_type=candle_type,
|
||||
prepend=prepend)
|
||||
prepend=prepend,
|
||||
)
|
||||
|
||||
logger.info(f'({process}) - Download history data for "{pair}", {timeframe}, '
|
||||
f'{candle_type} and store in {datadir}. '
|
||||
f'From {format_ms_time(since_ms) if since_ms else "start"} to '
|
||||
f'{format_ms_time(until_ms) if until_ms else "now"}'
|
||||
)
|
||||
logger.info(
|
||||
f'({process}) - Download history data for "{pair}", {timeframe}, '
|
||||
f"{candle_type} and store in {datadir}. "
|
||||
f'From {format_ms_time(since_ms) if since_ms else "start"} to '
|
||||
f'{format_ms_time(until_ms) if until_ms else "now"}'
|
||||
)
|
||||
|
||||
logger.debug("Current Start: %s",
|
||||
f"{data.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}"
|
||||
if not data.empty else 'None')
|
||||
logger.debug("Current End: %s",
|
||||
f"{data.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}"
|
||||
if not data.empty else 'None')
|
||||
logger.debug(
|
||||
"Current Start: %s",
|
||||
f"{data.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}" if not data.empty else "None",
|
||||
)
|
||||
logger.debug(
|
||||
"Current End: %s",
|
||||
f"{data.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}" if not data.empty else "None",
|
||||
)
|
||||
|
||||
# Default since_ms to 30 days if nothing is given
|
||||
new_data = exchange.get_historic_ohlcv(pair=pair,
|
||||
timeframe=timeframe,
|
||||
since_ms=since_ms if since_ms else
|
||||
int((datetime.now() - timedelta(days=new_pairs_days)
|
||||
).timestamp()) * 1000,
|
||||
is_new_pair=data.empty,
|
||||
candle_type=candle_type,
|
||||
until_ms=until_ms if until_ms else None
|
||||
)
|
||||
new_data = exchange.get_historic_ohlcv(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
since_ms=(
|
||||
since_ms
|
||||
if since_ms
|
||||
else int((datetime.now() - timedelta(days=new_pairs_days)).timestamp()) * 1000
|
||||
),
|
||||
is_new_pair=data.empty,
|
||||
candle_type=candle_type,
|
||||
until_ms=until_ms if until_ms else None,
|
||||
)
|
||||
# TODO: Maybe move parsing to exchange class (?)
|
||||
new_dataframe = ohlcv_to_dataframe(new_data, timeframe, pair,
|
||||
fill_missing=False, drop_incomplete=True)
|
||||
new_dataframe = ohlcv_to_dataframe(
|
||||
new_data, timeframe, pair, fill_missing=False, drop_incomplete=True
|
||||
)
|
||||
if data.empty:
|
||||
data = new_dataframe
|
||||
else:
|
||||
# Run cleaning again to ensure there were no duplicate candles
|
||||
# Especially between existing and new data.
|
||||
data = clean_ohlcv_dataframe(concat([data, new_dataframe], axis=0), timeframe, pair,
|
||||
fill_missing=False, drop_incomplete=False)
|
||||
data = clean_ohlcv_dataframe(
|
||||
concat([data, new_dataframe], axis=0),
|
||||
timeframe,
|
||||
pair,
|
||||
fill_missing=False,
|
||||
drop_incomplete=False,
|
||||
)
|
||||
|
||||
logger.debug("New Start: %s",
|
||||
f"{data.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}"
|
||||
if not data.empty else 'None')
|
||||
logger.debug("New End: %s",
|
||||
f"{data.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}"
|
||||
if not data.empty else 'None')
|
||||
logger.debug(
|
||||
"New Start: %s",
|
||||
f"{data.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}" if not data.empty else "None",
|
||||
)
|
||||
logger.debug(
|
||||
"New End: %s",
|
||||
f"{data.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}" if not data.empty else "None",
|
||||
)
|
||||
|
||||
data_handler.ohlcv_store(pair, timeframe, data=data, candle_type=candle_type)
|
||||
return True
|
||||
@@ -276,13 +325,18 @@ def _download_pair_history(pair: str, *,
|
||||
return False
|
||||
|
||||
|
||||
def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes: List[str],
|
||||
datadir: Path, trading_mode: str,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
new_pairs_days: int = 30, erase: bool = False,
|
||||
data_format: Optional[str] = None,
|
||||
prepend: bool = False,
|
||||
) -> List[str]:
|
||||
def refresh_backtest_ohlcv_data(
|
||||
exchange: Exchange,
|
||||
pairs: List[str],
|
||||
timeframes: List[str],
|
||||
datadir: Path,
|
||||
trading_mode: str,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
new_pairs_days: int = 30,
|
||||
erase: bool = False,
|
||||
data_format: Optional[str] = None,
|
||||
prepend: bool = False,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Refresh stored ohlcv data for backtesting and hyperopt operations.
|
||||
Used by freqtrade download-data subcommand.
|
||||
@@ -291,63 +345,77 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
||||
pairs_not_available = []
|
||||
data_handler = get_datahandler(datadir, data_format)
|
||||
candle_type = CandleType.get_default(trading_mode)
|
||||
process = ''
|
||||
process = ""
|
||||
for idx, pair in enumerate(pairs, start=1):
|
||||
if pair not in exchange.markets:
|
||||
pairs_not_available.append(pair)
|
||||
logger.info(f"Skipping pair {pair}...")
|
||||
continue
|
||||
for timeframe in timeframes:
|
||||
|
||||
logger.debug(f'Downloading pair {pair}, {candle_type}, interval {timeframe}.')
|
||||
process = f'{idx}/{len(pairs)}'
|
||||
_download_pair_history(pair=pair, process=process,
|
||||
datadir=datadir, exchange=exchange,
|
||||
timerange=timerange, data_handler=data_handler,
|
||||
timeframe=str(timeframe), new_pairs_days=new_pairs_days,
|
||||
candle_type=candle_type,
|
||||
erase=erase, prepend=prepend)
|
||||
if trading_mode == 'futures':
|
||||
logger.debug(f"Downloading pair {pair}, {candle_type}, interval {timeframe}.")
|
||||
process = f"{idx}/{len(pairs)}"
|
||||
_download_pair_history(
|
||||
pair=pair,
|
||||
process=process,
|
||||
datadir=datadir,
|
||||
exchange=exchange,
|
||||
timerange=timerange,
|
||||
data_handler=data_handler,
|
||||
timeframe=str(timeframe),
|
||||
new_pairs_days=new_pairs_days,
|
||||
candle_type=candle_type,
|
||||
erase=erase,
|
||||
prepend=prepend,
|
||||
)
|
||||
if trading_mode == "futures":
|
||||
# Predefined candletype (and timeframe) depending on exchange
|
||||
# Downloads what is necessary to backtest based on futures data.
|
||||
tf_mark = exchange.get_option('mark_ohlcv_timeframe')
|
||||
tf_funding_rate = exchange.get_option('funding_fee_timeframe')
|
||||
tf_mark = exchange.get_option("mark_ohlcv_timeframe")
|
||||
tf_funding_rate = exchange.get_option("funding_fee_timeframe")
|
||||
|
||||
fr_candle_type = CandleType.from_string(exchange.get_option('mark_ohlcv_price'))
|
||||
fr_candle_type = CandleType.from_string(exchange.get_option("mark_ohlcv_price"))
|
||||
# All exchanges need FundingRate for futures trading.
|
||||
# The timeframe is aligned to the mark-price timeframe.
|
||||
combs = ((CandleType.FUNDING_RATE, tf_funding_rate), (fr_candle_type, tf_mark))
|
||||
for candle_type_f, tf in combs:
|
||||
logger.debug(f'Downloading pair {pair}, {candle_type_f}, interval {tf}.')
|
||||
_download_pair_history(pair=pair, process=process,
|
||||
datadir=datadir, exchange=exchange,
|
||||
timerange=timerange, data_handler=data_handler,
|
||||
timeframe=str(tf), new_pairs_days=new_pairs_days,
|
||||
candle_type=candle_type_f,
|
||||
erase=erase, prepend=prepend)
|
||||
logger.debug(f"Downloading pair {pair}, {candle_type_f}, interval {tf}.")
|
||||
_download_pair_history(
|
||||
pair=pair,
|
||||
process=process,
|
||||
datadir=datadir,
|
||||
exchange=exchange,
|
||||
timerange=timerange,
|
||||
data_handler=data_handler,
|
||||
timeframe=str(tf),
|
||||
new_pairs_days=new_pairs_days,
|
||||
candle_type=candle_type_f,
|
||||
erase=erase,
|
||||
prepend=prepend,
|
||||
)
|
||||
|
||||
return pairs_not_available
|
||||
|
||||
|
||||
def _download_trades_history(exchange: Exchange,
|
||||
pair: str, *,
|
||||
new_pairs_days: int = 30,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
data_handler: IDataHandler,
|
||||
trading_mode: TradingMode,
|
||||
) -> bool:
|
||||
def _download_trades_history(
|
||||
exchange: Exchange,
|
||||
pair: str,
|
||||
*,
|
||||
new_pairs_days: int = 30,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
data_handler: IDataHandler,
|
||||
trading_mode: TradingMode,
|
||||
) -> bool:
|
||||
"""
|
||||
Download trade history from the exchange.
|
||||
Appends to previously downloaded trades data.
|
||||
"""
|
||||
try:
|
||||
|
||||
until = None
|
||||
since = 0
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
if timerange.starttype == "date":
|
||||
since = timerange.startts * 1000
|
||||
if timerange.stoptype == 'date':
|
||||
if timerange.stoptype == "date":
|
||||
until = timerange.stopts * 1000
|
||||
|
||||
trades = data_handler.trades_load(pair, trading_mode)
|
||||
@@ -356,60 +424,76 @@ def _download_trades_history(exchange: Exchange,
|
||||
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
||||
# DEFAULT_TRADES_COLUMNS: 1 -> id
|
||||
|
||||
if not trades.empty and since > 0 and since < trades.iloc[0]['timestamp']:
|
||||
if not trades.empty and since > 0 and since < trades.iloc[0]["timestamp"]:
|
||||
# since is before the first trade
|
||||
logger.info(f"Start ({trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}) earlier than "
|
||||
f"available data. Redownloading trades for {pair}...")
|
||||
logger.info(
|
||||
f"Start ({trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}) earlier than "
|
||||
f"available data. Redownloading trades for {pair}..."
|
||||
)
|
||||
trades = trades_list_to_df([])
|
||||
|
||||
from_id = trades.iloc[-1]['id'] if not trades.empty else None
|
||||
if not trades.empty and since < trades.iloc[-1]['timestamp']:
|
||||
from_id = trades.iloc[-1]["id"] if not trades.empty else None
|
||||
if not trades.empty and since < trades.iloc[-1]["timestamp"]:
|
||||
# Reset since to the last available point
|
||||
# - 5 seconds (to ensure we're getting all trades)
|
||||
since = trades.iloc[-1]['timestamp'] - (5 * 1000)
|
||||
logger.info(f"Using last trade date -5s - Downloading trades for {pair} "
|
||||
f"since: {format_ms_time(since)}.")
|
||||
since = trades.iloc[-1]["timestamp"] - (5 * 1000)
|
||||
logger.info(
|
||||
f"Using last trade date -5s - Downloading trades for {pair} "
|
||||
f"since: {format_ms_time(since)}."
|
||||
)
|
||||
|
||||
if not since:
|
||||
since = dt_ts(dt_now() - timedelta(days=new_pairs_days))
|
||||
|
||||
logger.debug("Current Start: %s", 'None' if trades.empty else
|
||||
f"{trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}")
|
||||
logger.debug("Current End: %s", 'None' if trades.empty else
|
||||
f"{trades.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}")
|
||||
logger.debug(
|
||||
"Current Start: %s",
|
||||
"None" if trades.empty else f"{trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}",
|
||||
)
|
||||
logger.debug(
|
||||
"Current End: %s",
|
||||
"None" if trades.empty else f"{trades.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}",
|
||||
)
|
||||
logger.info(f"Current Amount of trades: {len(trades)}")
|
||||
|
||||
# Default since_ms to 30 days if nothing is given
|
||||
new_trades = exchange.get_historic_trades(pair=pair,
|
||||
since=since,
|
||||
until=until,
|
||||
from_id=from_id,
|
||||
)
|
||||
new_trades = exchange.get_historic_trades(
|
||||
pair=pair,
|
||||
since=since,
|
||||
until=until,
|
||||
from_id=from_id,
|
||||
)
|
||||
new_trades_df = trades_list_to_df(new_trades[1])
|
||||
trades = concat([trades, new_trades_df], axis=0)
|
||||
# Remove duplicates to make sure we're not storing data we don't need
|
||||
trades = trades_df_remove_duplicates(trades)
|
||||
data_handler.trades_store(pair, trades, trading_mode)
|
||||
|
||||
logger.debug("New Start: %s", 'None' if trades.empty else
|
||||
f"{trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}")
|
||||
logger.debug("New End: %s", 'None' if trades.empty else
|
||||
f"{trades.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}")
|
||||
logger.debug(
|
||||
"New Start: %s",
|
||||
"None" if trades.empty else f"{trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}",
|
||||
)
|
||||
logger.debug(
|
||||
"New End: %s",
|
||||
"None" if trades.empty else f"{trades.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}",
|
||||
)
|
||||
logger.info(f"New Amount of trades: {len(trades)}")
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f'Failed to download historic trades for pair: "{pair}". '
|
||||
)
|
||||
logger.exception(f'Failed to download historic trades for pair: "{pair}". ')
|
||||
return False
|
||||
|
||||
|
||||
def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path,
|
||||
timerange: TimeRange, trading_mode: TradingMode,
|
||||
new_pairs_days: int = 30,
|
||||
erase: bool = False, data_format: str = 'feather',
|
||||
) -> List[str]:
|
||||
def refresh_backtest_trades_data(
|
||||
exchange: Exchange,
|
||||
pairs: List[str],
|
||||
datadir: Path,
|
||||
timerange: TimeRange,
|
||||
trading_mode: TradingMode,
|
||||
new_pairs_days: int = 30,
|
||||
erase: bool = False,
|
||||
data_format: str = "feather",
|
||||
) -> List[str]:
|
||||
"""
|
||||
Refresh stored trades data for backtesting and hyperopt operations.
|
||||
Used by freqtrade download-data subcommand.
|
||||
@@ -425,15 +509,17 @@ def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir:
|
||||
|
||||
if erase:
|
||||
if data_handler.trades_purge(pair, trading_mode):
|
||||
logger.info(f'Deleting existing data for pair {pair}.')
|
||||
logger.info(f"Deleting existing data for pair {pair}.")
|
||||
|
||||
logger.info(f'Downloading trades for pair {pair}.')
|
||||
_download_trades_history(exchange=exchange,
|
||||
pair=pair,
|
||||
new_pairs_days=new_pairs_days,
|
||||
timerange=timerange,
|
||||
data_handler=data_handler,
|
||||
trading_mode=trading_mode)
|
||||
logger.info(f"Downloading trades for pair {pair}.")
|
||||
_download_trades_history(
|
||||
exchange=exchange,
|
||||
pair=pair,
|
||||
new_pairs_days=new_pairs_days,
|
||||
timerange=timerange,
|
||||
data_handler=data_handler,
|
||||
trading_mode=trading_mode,
|
||||
)
|
||||
return pairs_not_available
|
||||
|
||||
|
||||
@@ -445,15 +531,18 @@ def get_timerange(data: Dict[str, DataFrame]) -> Tuple[datetime, datetime]:
|
||||
:return: tuple containing min_date, max_date
|
||||
"""
|
||||
timeranges = [
|
||||
(frame['date'].min().to_pydatetime(), frame['date'].max().to_pydatetime())
|
||||
(frame["date"].min().to_pydatetime(), frame["date"].max().to_pydatetime())
|
||||
for frame in data.values()
|
||||
]
|
||||
return (min(timeranges, key=operator.itemgetter(0))[0],
|
||||
max(timeranges, key=operator.itemgetter(1))[1])
|
||||
return (
|
||||
min(timeranges, key=operator.itemgetter(0))[0],
|
||||
max(timeranges, key=operator.itemgetter(1))[1],
|
||||
)
|
||||
|
||||
|
||||
def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime,
|
||||
max_date: datetime, timeframe_min: int) -> bool:
|
||||
def validate_backtest_data(
|
||||
data: DataFrame, pair: str, min_date: datetime, max_date: datetime, timeframe_min: int
|
||||
) -> bool:
|
||||
"""
|
||||
Validates preprocessed backtesting data for missing values and shows warnings about it that.
|
||||
|
||||
@@ -469,89 +558,111 @@ def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime,
|
||||
dflen = len(data)
|
||||
if dflen < expected_frames:
|
||||
found_missing = True
|
||||
logger.warning("%s has missing frames: expected %s, got %s, that's %s missing values",
|
||||
pair, expected_frames, dflen, expected_frames - dflen)
|
||||
logger.warning(
|
||||
"%s has missing frames: expected %s, got %s, that's %s missing values",
|
||||
pair,
|
||||
expected_frames,
|
||||
dflen,
|
||||
expected_frames - dflen,
|
||||
)
|
||||
return found_missing
|
||||
|
||||
|
||||
def download_data_main(config: Config) -> None:
|
||||
|
||||
timerange = TimeRange()
|
||||
if 'days' in config:
|
||||
time_since = (datetime.now() - timedelta(days=config['days'])).strftime("%Y%m%d")
|
||||
timerange = TimeRange.parse_timerange(f'{time_since}-')
|
||||
if "days" in config:
|
||||
time_since = (datetime.now() - timedelta(days=config["days"])).strftime("%Y%m%d")
|
||||
timerange = TimeRange.parse_timerange(f"{time_since}-")
|
||||
|
||||
if 'timerange' in config:
|
||||
timerange = timerange.parse_timerange(config['timerange'])
|
||||
if "timerange" in config:
|
||||
timerange = timerange.parse_timerange(config["timerange"])
|
||||
|
||||
# Remove stake-currency to skip checks which are not relevant for datadownload
|
||||
config['stake_currency'] = ''
|
||||
config["stake_currency"] = ""
|
||||
|
||||
pairs_not_available: List[str] = []
|
||||
|
||||
# Init exchange
|
||||
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
|
||||
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
available_pairs = [
|
||||
p for p in exchange.get_markets(
|
||||
tradable_only=True, active_only=not config.get('include_inactive')
|
||||
).keys()
|
||||
p
|
||||
for p in exchange.get_markets(
|
||||
tradable_only=True, active_only=not config.get("include_inactive")
|
||||
).keys()
|
||||
]
|
||||
|
||||
expanded_pairs = dynamic_expand_pairlist(config, available_pairs)
|
||||
if 'timeframes' not in config:
|
||||
config['timeframes'] = DL_DATA_TIMEFRAMES
|
||||
if "timeframes" not in config:
|
||||
config["timeframes"] = DL_DATA_TIMEFRAMES
|
||||
|
||||
# Manual validations of relevant settings
|
||||
if not config['exchange'].get('skip_pair_validation', False):
|
||||
if not config["exchange"].get("skip_pair_validation", False):
|
||||
exchange.validate_pairs(expanded_pairs)
|
||||
logger.info(f"About to download pairs: {expanded_pairs}, "
|
||||
f"intervals: {config['timeframes']} to {config['datadir']}")
|
||||
logger.info(
|
||||
f"About to download pairs: {expanded_pairs}, "
|
||||
f"intervals: {config['timeframes']} to {config['datadir']}"
|
||||
)
|
||||
|
||||
if len(expanded_pairs) == 0:
|
||||
logger.warning(
|
||||
"No pairs available for download. "
|
||||
"Please make sure you're using the correct Pair naming for your selected trade mode. \n"
|
||||
f"More info: {DOCS_LINK}/bot-basics/#pair-naming")
|
||||
f"More info: {DOCS_LINK}/bot-basics/#pair-naming"
|
||||
)
|
||||
|
||||
for timeframe in config['timeframes']:
|
||||
for timeframe in config["timeframes"]:
|
||||
exchange.validate_timeframes(timeframe)
|
||||
|
||||
# Start downloading
|
||||
try:
|
||||
if config.get('download_trades'):
|
||||
if config.get("download_trades"):
|
||||
pairs_not_available = refresh_backtest_trades_data(
|
||||
exchange, pairs=expanded_pairs, datadir=config['datadir'],
|
||||
timerange=timerange, new_pairs_days=config['new_pairs_days'],
|
||||
erase=bool(config.get('erase')), data_format=config['dataformat_trades'],
|
||||
trading_mode=config.get('trading_mode', TradingMode.SPOT),
|
||||
)
|
||||
exchange,
|
||||
pairs=expanded_pairs,
|
||||
datadir=config["datadir"],
|
||||
timerange=timerange,
|
||||
new_pairs_days=config["new_pairs_days"],
|
||||
erase=bool(config.get("erase")),
|
||||
data_format=config["dataformat_trades"],
|
||||
trading_mode=config.get("trading_mode", TradingMode.SPOT),
|
||||
)
|
||||
|
||||
# Convert downloaded trade data to different timeframes
|
||||
convert_trades_to_ohlcv(
|
||||
pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||
data_format_trades=config['dataformat_trades'],
|
||||
candle_type=config.get('candle_type_def', CandleType.SPOT),
|
||||
pairs=expanded_pairs,
|
||||
timeframes=config["timeframes"],
|
||||
datadir=config["datadir"],
|
||||
timerange=timerange,
|
||||
erase=bool(config.get("erase")),
|
||||
data_format_ohlcv=config["dataformat_ohlcv"],
|
||||
data_format_trades=config["dataformat_trades"],
|
||||
candle_type=config.get("candle_type_def", CandleType.SPOT),
|
||||
)
|
||||
else:
|
||||
if not exchange.get_option('ohlcv_has_history', True):
|
||||
if not exchange.get_option("ohlcv_has_history", True):
|
||||
raise OperationalException(
|
||||
f"Historic klines not available for {exchange.name}. "
|
||||
"Please use `--dl-trades` instead for this exchange "
|
||||
"(will unfortunately take a long time)."
|
||||
)
|
||||
)
|
||||
migrate_data(config, exchange)
|
||||
pairs_not_available = refresh_backtest_ohlcv_data(
|
||||
exchange, pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange,
|
||||
new_pairs_days=config['new_pairs_days'],
|
||||
erase=bool(config.get('erase')), data_format=config['dataformat_ohlcv'],
|
||||
trading_mode=config.get('trading_mode', 'spot'),
|
||||
prepend=config.get('prepend_data', False)
|
||||
exchange,
|
||||
pairs=expanded_pairs,
|
||||
timeframes=config["timeframes"],
|
||||
datadir=config["datadir"],
|
||||
timerange=timerange,
|
||||
new_pairs_days=config["new_pairs_days"],
|
||||
erase=bool(config.get("erase")),
|
||||
data_format=config["dataformat_ohlcv"],
|
||||
trading_mode=config.get("trading_mode", "spot"),
|
||||
prepend=config.get("prepend_data", False),
|
||||
)
|
||||
finally:
|
||||
if pairs_not_available:
|
||||
logger.info(f"Pairs [{','.join(pairs_not_available)}] not available "
|
||||
f"on exchange {exchange.name}.")
|
||||
logger.info(
|
||||
f"Pairs [{','.join(pairs_not_available)}] not available "
|
||||
f"on exchange {exchange.name}."
|
||||
)
|
||||
|
||||
@@ -31,7 +31,8 @@ def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close"
|
||||
|
||||
|
||||
def combine_dataframes_by_column(
|
||||
data: Dict[str, pd.DataFrame], column: str = "close") -> pd.DataFrame:
|
||||
data: Dict[str, pd.DataFrame], column: str = "close"
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
@@ -41,14 +42,15 @@ def combine_dataframes_by_column(
|
||||
"""
|
||||
if not data:
|
||||
raise ValueError("No data provided.")
|
||||
df_comb = pd.concat([data[pair].set_index('date').rename(
|
||||
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
||||
df_comb = pd.concat(
|
||||
[data[pair].set_index("date").rename({column: pair}, axis=1)[pair] for pair in data], axis=1
|
||||
)
|
||||
return df_comb
|
||||
|
||||
|
||||
def combined_dataframes_with_rel_mean(
|
||||
data: Dict[str, pd.DataFrame], fromdt: datetime, todt: datetime,
|
||||
column: str = "close") -> pd.DataFrame:
|
||||
data: Dict[str, pd.DataFrame], fromdt: datetime, todt: datetime, column: str = "close"
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
@@ -60,14 +62,15 @@ def combined_dataframes_with_rel_mean(
|
||||
df_comb = combine_dataframes_by_column(data, column)
|
||||
# Trim dataframes to the given timeframe
|
||||
df_comb = df_comb.iloc[(df_comb.index >= fromdt) & (df_comb.index < todt)]
|
||||
df_comb['count'] = df_comb.count(axis=1)
|
||||
df_comb['mean'] = df_comb.mean(axis=1)
|
||||
df_comb['rel_mean'] = df_comb['mean'].pct_change().fillna(0).cumsum()
|
||||
return df_comb[['mean', 'rel_mean', 'count']]
|
||||
df_comb["count"] = df_comb.count(axis=1)
|
||||
df_comb["mean"] = df_comb.mean(axis=1)
|
||||
df_comb["rel_mean"] = df_comb["mean"].pct_change().fillna(0).cumsum()
|
||||
return df_comb[["mean", "rel_mean", "count"]]
|
||||
|
||||
|
||||
def combine_dataframes_with_mean(
|
||||
data: Dict[str, pd.DataFrame], column: str = "close") -> pd.DataFrame:
|
||||
data: Dict[str, pd.DataFrame], column: str = "close"
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
@@ -78,13 +81,14 @@ def combine_dataframes_with_mean(
|
||||
"""
|
||||
df_comb = combine_dataframes_by_column(data, column)
|
||||
|
||||
df_comb['mean'] = df_comb.mean(axis=1)
|
||||
df_comb["mean"] = df_comb.mean(axis=1)
|
||||
|
||||
return df_comb
|
||||
|
||||
|
||||
def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
||||
timeframe: str) -> pd.DataFrame:
|
||||
def create_cum_profit(
|
||||
df: pd.DataFrame, trades: pd.DataFrame, col_name: str, timeframe: str
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Adds a column `col_name` with the cumulative profit for the given trades array.
|
||||
:param df: DataFrame with date index
|
||||
@@ -97,11 +101,11 @@ def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
from freqtrade.exchange import timeframe_to_resample_freq
|
||||
|
||||
timeframe_freq = timeframe_to_resample_freq(timeframe)
|
||||
# Resample to timeframe to make sure trades match candles
|
||||
_trades_sum = trades.resample(timeframe_freq, on='close_date'
|
||||
)[['profit_abs']].sum()
|
||||
df.loc[:, col_name] = _trades_sum['profit_abs'].cumsum()
|
||||
_trades_sum = trades.resample(timeframe_freq, on="close_date")[["profit_abs"]].sum()
|
||||
df.loc[:, col_name] = _trades_sum["profit_abs"].cumsum()
|
||||
# Set first value to 0
|
||||
df.loc[df.iloc[0].name, col_name] = 0
|
||||
# FFill to get continuous
|
||||
@@ -109,29 +113,34 @@ def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
||||
return df
|
||||
|
||||
|
||||
def _calc_drawdown_series(profit_results: pd.DataFrame, *, date_col: str, value_col: str,
|
||||
starting_balance: float) -> pd.DataFrame:
|
||||
def _calc_drawdown_series(
|
||||
profit_results: pd.DataFrame, *, date_col: str, value_col: str, starting_balance: float
|
||||
) -> pd.DataFrame:
|
||||
max_drawdown_df = pd.DataFrame()
|
||||
max_drawdown_df['cumulative'] = profit_results[value_col].cumsum()
|
||||
max_drawdown_df['high_value'] = max_drawdown_df['cumulative'].cummax()
|
||||
max_drawdown_df['drawdown'] = max_drawdown_df['cumulative'] - max_drawdown_df['high_value']
|
||||
max_drawdown_df['date'] = profit_results.loc[:, date_col]
|
||||
max_drawdown_df["cumulative"] = profit_results[value_col].cumsum()
|
||||
max_drawdown_df["high_value"] = max_drawdown_df["cumulative"].cummax()
|
||||
max_drawdown_df["drawdown"] = max_drawdown_df["cumulative"] - max_drawdown_df["high_value"]
|
||||
max_drawdown_df["date"] = profit_results.loc[:, date_col]
|
||||
if starting_balance:
|
||||
cumulative_balance = starting_balance + max_drawdown_df['cumulative']
|
||||
max_balance = starting_balance + max_drawdown_df['high_value']
|
||||
max_drawdown_df['drawdown_relative'] = ((max_balance - cumulative_balance) / max_balance)
|
||||
cumulative_balance = starting_balance + max_drawdown_df["cumulative"]
|
||||
max_balance = starting_balance + max_drawdown_df["high_value"]
|
||||
max_drawdown_df["drawdown_relative"] = (max_balance - cumulative_balance) / max_balance
|
||||
else:
|
||||
# NOTE: This is not completely accurate,
|
||||
# but might good enough if starting_balance is not available
|
||||
max_drawdown_df['drawdown_relative'] = (
|
||||
(max_drawdown_df['high_value'] - max_drawdown_df['cumulative'])
|
||||
/ max_drawdown_df['high_value'])
|
||||
max_drawdown_df["drawdown_relative"] = (
|
||||
max_drawdown_df["high_value"] - max_drawdown_df["cumulative"]
|
||||
) / max_drawdown_df["high_value"]
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_underwater(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_ratio', starting_balance: float = 0.0
|
||||
):
|
||||
def calculate_underwater(
|
||||
trades: pd.DataFrame,
|
||||
*,
|
||||
date_col: str = "close_date",
|
||||
value_col: str = "profit_ratio",
|
||||
starting_balance: float = 0.0,
|
||||
):
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
@@ -145,18 +154,20 @@ def calculate_underwater(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(
|
||||
profit_results,
|
||||
date_col=date_col,
|
||||
value_col=value_col,
|
||||
starting_balance=starting_balance)
|
||||
profit_results, date_col=date_col, value_col=value_col, starting_balance=starting_balance
|
||||
)
|
||||
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_abs', starting_balance: float = 0,
|
||||
relative: bool = False
|
||||
) -> Tuple[float, pd.Timestamp, pd.Timestamp, float, float, float]:
|
||||
def calculate_max_drawdown(
|
||||
trades: pd.DataFrame,
|
||||
*,
|
||||
date_col: str = "close_date",
|
||||
value_col: str = "profit_abs",
|
||||
starting_balance: float = 0,
|
||||
relative: bool = False,
|
||||
) -> Tuple[float, pd.Timestamp, pd.Timestamp, float, float, float]:
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
@@ -172,32 +183,31 @@ def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_date'
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(
|
||||
profit_results,
|
||||
date_col=date_col,
|
||||
value_col=value_col,
|
||||
starting_balance=starting_balance
|
||||
profit_results, date_col=date_col, value_col=value_col, starting_balance=starting_balance
|
||||
)
|
||||
|
||||
idxmin = (
|
||||
max_drawdown_df['drawdown_relative'].idxmax()
|
||||
if relative else max_drawdown_df['drawdown'].idxmin()
|
||||
max_drawdown_df["drawdown_relative"].idxmax()
|
||||
if relative
|
||||
else max_drawdown_df["drawdown"].idxmin()
|
||||
)
|
||||
if idxmin == 0:
|
||||
raise ValueError("No losing trade, therefore no drawdown.")
|
||||
high_date = profit_results.loc[max_drawdown_df.iloc[:idxmin]['high_value'].idxmax(), date_col]
|
||||
high_date = profit_results.loc[max_drawdown_df.iloc[:idxmin]["high_value"].idxmax(), date_col]
|
||||
low_date = profit_results.loc[idxmin, date_col]
|
||||
high_val = max_drawdown_df.loc[max_drawdown_df.iloc[:idxmin]
|
||||
['high_value'].idxmax(), 'cumulative']
|
||||
low_val = max_drawdown_df.loc[idxmin, 'cumulative']
|
||||
max_drawdown_rel = max_drawdown_df.loc[idxmin, 'drawdown_relative']
|
||||
high_val = max_drawdown_df.loc[
|
||||
max_drawdown_df.iloc[:idxmin]["high_value"].idxmax(), "cumulative"
|
||||
]
|
||||
low_val = max_drawdown_df.loc[idxmin, "cumulative"]
|
||||
max_drawdown_rel = max_drawdown_df.loc[idxmin, "drawdown_relative"]
|
||||
|
||||
return (
|
||||
abs(max_drawdown_df.loc[idxmin, 'drawdown']),
|
||||
abs(max_drawdown_df.loc[idxmin, "drawdown"]),
|
||||
high_date,
|
||||
low_date,
|
||||
high_val,
|
||||
low_val,
|
||||
max_drawdown_rel
|
||||
max_drawdown_rel,
|
||||
)
|
||||
|
||||
|
||||
@@ -213,9 +223,9 @@ def calculate_csum(trades: pd.DataFrame, starting_balance: float = 0) -> Tuple[f
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
|
||||
csum_df = pd.DataFrame()
|
||||
csum_df['sum'] = trades['profit_abs'].cumsum()
|
||||
csum_min = csum_df['sum'].min() + starting_balance
|
||||
csum_max = csum_df['sum'].max() + starting_balance
|
||||
csum_df["sum"] = trades["profit_abs"].cumsum()
|
||||
csum_min = csum_df["sum"].min() + starting_balance
|
||||
csum_max = csum_df["sum"].max() + starting_balance
|
||||
|
||||
return csum_min, csum_max
|
||||
|
||||
@@ -245,28 +255,29 @@ def calculate_expectancy(trades: pd.DataFrame) -> Tuple[float, float]:
|
||||
expectancy_ratio = 100
|
||||
|
||||
if len(trades) > 0:
|
||||
winning_trades = trades.loc[trades['profit_abs'] > 0]
|
||||
losing_trades = trades.loc[trades['profit_abs'] < 0]
|
||||
profit_sum = winning_trades['profit_abs'].sum()
|
||||
loss_sum = abs(losing_trades['profit_abs'].sum())
|
||||
winning_trades = trades.loc[trades["profit_abs"] > 0]
|
||||
losing_trades = trades.loc[trades["profit_abs"] < 0]
|
||||
profit_sum = winning_trades["profit_abs"].sum()
|
||||
loss_sum = abs(losing_trades["profit_abs"].sum())
|
||||
nb_win_trades = len(winning_trades)
|
||||
nb_loss_trades = len(losing_trades)
|
||||
|
||||
average_win = (profit_sum / nb_win_trades) if nb_win_trades > 0 else 0
|
||||
average_loss = (loss_sum / nb_loss_trades) if nb_loss_trades > 0 else 0
|
||||
winrate = (nb_win_trades / len(trades))
|
||||
loserate = (nb_loss_trades / len(trades))
|
||||
winrate = nb_win_trades / len(trades)
|
||||
loserate = nb_loss_trades / len(trades)
|
||||
|
||||
expectancy = (winrate * average_win) - (loserate * average_loss)
|
||||
if (average_loss > 0):
|
||||
if average_loss > 0:
|
||||
risk_reward_ratio = average_win / average_loss
|
||||
expectancy_ratio = ((1 + risk_reward_ratio) * winrate) - 1
|
||||
|
||||
return expectancy, expectancy_ratio
|
||||
|
||||
|
||||
def calculate_sortino(trades: pd.DataFrame, min_date: datetime, max_date: datetime,
|
||||
starting_balance: float) -> float:
|
||||
def calculate_sortino(
|
||||
trades: pd.DataFrame, min_date: datetime, max_date: datetime, starting_balance: float
|
||||
) -> float:
|
||||
"""
|
||||
Calculate sortino
|
||||
:param trades: DataFrame containing trades (requires columns profit_abs)
|
||||
@@ -275,12 +286,12 @@ def calculate_sortino(trades: pd.DataFrame, min_date: datetime, max_date: dateti
|
||||
if (len(trades) == 0) or (min_date is None) or (max_date is None) or (min_date == max_date):
|
||||
return 0
|
||||
|
||||
total_profit = trades['profit_abs'] / starting_balance
|
||||
total_profit = trades["profit_abs"] / starting_balance
|
||||
days_period = max(1, (max_date - min_date).days)
|
||||
|
||||
expected_returns_mean = total_profit.sum() / days_period
|
||||
|
||||
down_stdev = np.std(trades.loc[trades['profit_abs'] < 0, 'profit_abs'] / starting_balance)
|
||||
down_stdev = np.std(trades.loc[trades["profit_abs"] < 0, "profit_abs"] / starting_balance)
|
||||
|
||||
if down_stdev != 0 and not np.isnan(down_stdev):
|
||||
sortino_ratio = expected_returns_mean / down_stdev * np.sqrt(365)
|
||||
@@ -292,8 +303,9 @@ def calculate_sortino(trades: pd.DataFrame, min_date: datetime, max_date: dateti
|
||||
return sortino_ratio
|
||||
|
||||
|
||||
def calculate_sharpe(trades: pd.DataFrame, min_date: datetime, max_date: datetime,
|
||||
starting_balance: float) -> float:
|
||||
def calculate_sharpe(
|
||||
trades: pd.DataFrame, min_date: datetime, max_date: datetime, starting_balance: float
|
||||
) -> float:
|
||||
"""
|
||||
Calculate sharpe
|
||||
:param trades: DataFrame containing trades (requires column profit_abs)
|
||||
@@ -302,7 +314,7 @@ def calculate_sharpe(trades: pd.DataFrame, min_date: datetime, max_date: datetim
|
||||
if (len(trades) == 0) or (min_date is None) or (max_date is None) or (min_date == max_date):
|
||||
return 0
|
||||
|
||||
total_profit = trades['profit_abs'] / starting_balance
|
||||
total_profit = trades["profit_abs"] / starting_balance
|
||||
days_period = max(1, (max_date - min_date).days)
|
||||
|
||||
expected_returns_mean = total_profit.sum() / days_period
|
||||
@@ -318,8 +330,9 @@ def calculate_sharpe(trades: pd.DataFrame, min_date: datetime, max_date: datetim
|
||||
return sharp_ratio
|
||||
|
||||
|
||||
def calculate_calmar(trades: pd.DataFrame, min_date: datetime, max_date: datetime,
|
||||
starting_balance: float) -> float:
|
||||
def calculate_calmar(
|
||||
trades: pd.DataFrame, min_date: datetime, max_date: datetime, starting_balance: float
|
||||
) -> float:
|
||||
"""
|
||||
Calculate calmar
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)
|
||||
@@ -328,7 +341,7 @@ def calculate_calmar(trades: pd.DataFrame, min_date: datetime, max_date: datetim
|
||||
if (len(trades) == 0) or (min_date is None) or (max_date is None) or (min_date == max_date):
|
||||
return 0
|
||||
|
||||
total_profit = trades['profit_abs'].sum() / starting_balance
|
||||
total_profit = trades["profit_abs"].sum() / starting_balance
|
||||
days_period = max(1, (max_date - min_date).days)
|
||||
|
||||
# adding slippage of 0.1% per trade
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# pragma pylint: disable=W0603
|
||||
""" Edge positioning package """
|
||||
"""Edge positioning package"""
|
||||
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
@@ -46,48 +47,49 @@ class Edge:
|
||||
_cached_pairs: Dict[str, Any] = {} # Keeps a list of pairs
|
||||
|
||||
def __init__(self, config: Config, exchange, strategy) -> None:
|
||||
|
||||
self.config = config
|
||||
self.exchange = exchange
|
||||
self.strategy: IStrategy = strategy
|
||||
|
||||
self.edge_config = self.config.get('edge', {})
|
||||
self.edge_config = self.config.get("edge", {})
|
||||
self._cached_pairs: Dict[str, Any] = {} # Keeps a list of pairs
|
||||
self._final_pairs: list = []
|
||||
|
||||
# checking max_open_trades. it should be -1 as with Edge
|
||||
# the number of trades is determined by position size
|
||||
if self.config['max_open_trades'] != float('inf'):
|
||||
logger.critical('max_open_trades should be -1 in config !')
|
||||
if self.config["max_open_trades"] != float("inf"):
|
||||
logger.critical("max_open_trades should be -1 in config !")
|
||||
|
||||
if self.config['stake_amount'] != UNLIMITED_STAKE_AMOUNT:
|
||||
raise OperationalException('Edge works only with unlimited stake amount')
|
||||
if self.config["stake_amount"] != UNLIMITED_STAKE_AMOUNT:
|
||||
raise OperationalException("Edge works only with unlimited stake amount")
|
||||
|
||||
self._capital_ratio: float = self.config['tradable_balance_ratio']
|
||||
self._allowed_risk: float = self.edge_config.get('allowed_risk')
|
||||
self._since_number_of_days: int = self.edge_config.get('calculate_since_number_of_days', 14)
|
||||
self._capital_ratio: float = self.config["tradable_balance_ratio"]
|
||||
self._allowed_risk: float = self.edge_config.get("allowed_risk")
|
||||
self._since_number_of_days: int = self.edge_config.get("calculate_since_number_of_days", 14)
|
||||
self._last_updated: int = 0 # Timestamp of pairs last updated time
|
||||
self._refresh_pairs = True
|
||||
|
||||
self._stoploss_range_min = float(self.edge_config.get('stoploss_range_min', -0.01))
|
||||
self._stoploss_range_max = float(self.edge_config.get('stoploss_range_max', -0.05))
|
||||
self._stoploss_range_step = float(self.edge_config.get('stoploss_range_step', -0.001))
|
||||
self._stoploss_range_min = float(self.edge_config.get("stoploss_range_min", -0.01))
|
||||
self._stoploss_range_max = float(self.edge_config.get("stoploss_range_max", -0.05))
|
||||
self._stoploss_range_step = float(self.edge_config.get("stoploss_range_step", -0.001))
|
||||
|
||||
# calculating stoploss range
|
||||
self._stoploss_range = np.arange(
|
||||
self._stoploss_range_min,
|
||||
self._stoploss_range_max,
|
||||
self._stoploss_range_step
|
||||
self._stoploss_range_min, self._stoploss_range_max, self._stoploss_range_step
|
||||
)
|
||||
|
||||
self._timerange: TimeRange = TimeRange.parse_timerange(
|
||||
f"{(dt_now() - timedelta(days=self._since_number_of_days)).strftime('%Y%m%d')}-")
|
||||
if config.get('fee'):
|
||||
self.fee = config['fee']
|
||||
f"{(dt_now() - timedelta(days=self._since_number_of_days)).strftime('%Y%m%d')}-"
|
||||
)
|
||||
if config.get("fee"):
|
||||
self.fee = config["fee"]
|
||||
else:
|
||||
try:
|
||||
self.fee = self.exchange.get_fee(symbol=expand_pairlist(
|
||||
self.config['exchange']['pair_whitelist'], list(self.exchange.markets))[0])
|
||||
self.fee = self.exchange.get_fee(
|
||||
symbol=expand_pairlist(
|
||||
self.config["exchange"]["pair_whitelist"], list(self.exchange.markets)
|
||||
)[0]
|
||||
)
|
||||
except IndexError:
|
||||
self.fee = None
|
||||
|
||||
@@ -95,28 +97,30 @@ class Edge:
|
||||
if self.fee is None and pairs:
|
||||
self.fee = self.exchange.get_fee(pairs[0])
|
||||
|
||||
heartbeat = self.edge_config.get('process_throttle_secs')
|
||||
heartbeat = self.edge_config.get("process_throttle_secs")
|
||||
|
||||
if (self._last_updated > 0) and (
|
||||
self._last_updated + heartbeat > int(dt_now().timestamp())):
|
||||
self._last_updated + heartbeat > int(dt_now().timestamp())
|
||||
):
|
||||
return False
|
||||
|
||||
data: Dict[str, Any] = {}
|
||||
logger.info('Using stake_currency: %s ...', self.config['stake_currency'])
|
||||
logger.info('Using local backtesting data (using whitelist in given config) ...')
|
||||
logger.info("Using stake_currency: %s ...", self.config["stake_currency"])
|
||||
logger.info("Using local backtesting data (using whitelist in given config) ...")
|
||||
|
||||
if self._refresh_pairs:
|
||||
timerange_startup = deepcopy(self._timerange)
|
||||
timerange_startup.subtract_start(timeframe_to_seconds(
|
||||
self.strategy.timeframe) * self.strategy.startup_candle_count)
|
||||
timerange_startup.subtract_start(
|
||||
timeframe_to_seconds(self.strategy.timeframe) * self.strategy.startup_candle_count
|
||||
)
|
||||
refresh_data(
|
||||
datadir=self.config['datadir'],
|
||||
datadir=self.config["datadir"],
|
||||
pairs=pairs,
|
||||
exchange=self.exchange,
|
||||
timeframe=self.strategy.timeframe,
|
||||
timerange=timerange_startup,
|
||||
data_format=self.config['dataformat_ohlcv'],
|
||||
candle_type=self.config.get('candle_type_def', CandleType.SPOT),
|
||||
data_format=self.config["dataformat_ohlcv"],
|
||||
candle_type=self.config.get("candle_type_def", CandleType.SPOT),
|
||||
)
|
||||
# Download informative pairs too
|
||||
res = defaultdict(list)
|
||||
@@ -124,26 +128,27 @@ class Edge:
|
||||
res[timeframe].append(pair)
|
||||
for timeframe, inf_pairs in res.items():
|
||||
timerange_startup = deepcopy(self._timerange)
|
||||
timerange_startup.subtract_start(timeframe_to_seconds(
|
||||
timeframe) * self.strategy.startup_candle_count)
|
||||
timerange_startup.subtract_start(
|
||||
timeframe_to_seconds(timeframe) * self.strategy.startup_candle_count
|
||||
)
|
||||
refresh_data(
|
||||
datadir=self.config['datadir'],
|
||||
datadir=self.config["datadir"],
|
||||
pairs=inf_pairs,
|
||||
exchange=self.exchange,
|
||||
timeframe=timeframe,
|
||||
timerange=timerange_startup,
|
||||
data_format=self.config['dataformat_ohlcv'],
|
||||
candle_type=self.config.get('candle_type_def', CandleType.SPOT),
|
||||
data_format=self.config["dataformat_ohlcv"],
|
||||
candle_type=self.config.get("candle_type_def", CandleType.SPOT),
|
||||
)
|
||||
|
||||
data = load_data(
|
||||
datadir=self.config['datadir'],
|
||||
datadir=self.config["datadir"],
|
||||
pairs=pairs,
|
||||
timeframe=self.strategy.timeframe,
|
||||
timerange=self._timerange,
|
||||
startup_candles=self.strategy.startup_candle_count,
|
||||
data_format=self.config['dataformat_ohlcv'],
|
||||
candle_type=self.config.get('candle_type_def', CandleType.SPOT),
|
||||
data_format=self.config["dataformat_ohlcv"],
|
||||
candle_type=self.config.get("candle_type_def", CandleType.SPOT),
|
||||
)
|
||||
|
||||
if not data:
|
||||
@@ -152,27 +157,29 @@ class Edge:
|
||||
logger.critical("No data found. Edge is stopped ...")
|
||||
return False
|
||||
# Fake run-mode to Edge
|
||||
prior_rm = self.config['runmode']
|
||||
self.config['runmode'] = RunMode.EDGE
|
||||
prior_rm = self.config["runmode"]
|
||||
self.config["runmode"] = RunMode.EDGE
|
||||
preprocessed = self.strategy.advise_all_indicators(data)
|
||||
self.config['runmode'] = prior_rm
|
||||
self.config["runmode"] = prior_rm
|
||||
|
||||
# Print timeframe
|
||||
min_date, max_date = get_timerange(preprocessed)
|
||||
logger.info(f'Measuring data from {min_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||
f'up to {max_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||
f'({(max_date - min_date).days} days)..')
|
||||
logger.info(
|
||||
f"Measuring data from {min_date.strftime(DATETIME_PRINT_FORMAT)} "
|
||||
f"up to {max_date.strftime(DATETIME_PRINT_FORMAT)} "
|
||||
f"({(max_date - min_date).days} days).."
|
||||
)
|
||||
# TODO: Should edge support shorts? needs to be investigated further
|
||||
# * (add enter_short exit_short)
|
||||
headers = ['date', 'open', 'high', 'low', 'close', 'enter_long', 'exit_long']
|
||||
headers = ["date", "open", "high", "low", "close", "enter_long", "exit_long"]
|
||||
|
||||
trades: list = []
|
||||
for pair, pair_data in preprocessed.items():
|
||||
# Sorting dataframe by date and reset index
|
||||
pair_data = pair_data.sort_values(by=['date'])
|
||||
pair_data = pair_data.sort_values(by=["date"])
|
||||
pair_data = pair_data.reset_index(drop=True)
|
||||
|
||||
df_analyzed = self.strategy.ft_advise_signals(pair_data, {'pair': pair})[headers].copy()
|
||||
df_analyzed = self.strategy.ft_advise_signals(pair_data, {"pair": pair})[headers].copy()
|
||||
|
||||
trades += self._find_trades_for_stoploss_range(df_analyzed, pair, self._stoploss_range)
|
||||
|
||||
@@ -188,8 +195,9 @@ class Edge:
|
||||
|
||||
return True
|
||||
|
||||
def stake_amount(self, pair: str, free_capital: float,
|
||||
total_capital: float, capital_in_trade: float) -> float:
|
||||
def stake_amount(
|
||||
self, pair: str, free_capital: float, total_capital: float, capital_in_trade: float
|
||||
) -> float:
|
||||
stoploss = self.get_stoploss(pair)
|
||||
available_capital = (total_capital + capital_in_trade) * self._capital_ratio
|
||||
allowed_capital_at_risk = available_capital * self._allowed_risk
|
||||
@@ -198,14 +206,18 @@ class Edge:
|
||||
position_size = min(min(max_position_size, free_capital), available_capital)
|
||||
if pair in self._cached_pairs:
|
||||
logger.info(
|
||||
'winrate: %s, expectancy: %s, position size: %s, pair: %s,'
|
||||
' capital in trade: %s, free capital: %s, total capital: %s,'
|
||||
' stoploss: %s, available capital: %s.',
|
||||
"winrate: %s, expectancy: %s, position size: %s, pair: %s,"
|
||||
" capital in trade: %s, free capital: %s, total capital: %s,"
|
||||
" stoploss: %s, available capital: %s.",
|
||||
self._cached_pairs[pair].winrate,
|
||||
self._cached_pairs[pair].expectancy,
|
||||
position_size, pair,
|
||||
capital_in_trade, free_capital, total_capital,
|
||||
stoploss, available_capital
|
||||
position_size,
|
||||
pair,
|
||||
capital_in_trade,
|
||||
free_capital,
|
||||
total_capital,
|
||||
stoploss,
|
||||
available_capital,
|
||||
)
|
||||
return round(position_size, 15)
|
||||
|
||||
@@ -213,8 +225,10 @@ class Edge:
|
||||
if pair in self._cached_pairs:
|
||||
return self._cached_pairs[pair].stoploss
|
||||
else:
|
||||
logger.warning(f'Tried to access stoploss of non-existing pair {pair}, '
|
||||
'strategy stoploss is returned instead.')
|
||||
logger.warning(
|
||||
f"Tried to access stoploss of non-existing pair {pair}, "
|
||||
"strategy stoploss is returned instead."
|
||||
)
|
||||
return self.strategy.stoploss
|
||||
|
||||
def adjust(self, pairs: List[str]) -> list:
|
||||
@@ -224,8 +238,8 @@ class Edge:
|
||||
final = []
|
||||
for pair, info in self._cached_pairs.items():
|
||||
if (
|
||||
info.expectancy > float(self.edge_config.get('minimum_expectancy', 0.2))
|
||||
and info.winrate > float(self.edge_config.get('minimum_winrate', 0.60))
|
||||
info.expectancy > float(self.edge_config.get("minimum_expectancy", 0.2))
|
||||
and info.winrate > float(self.edge_config.get("minimum_winrate", 0.60))
|
||||
and pair in pairs
|
||||
):
|
||||
final.append(pair)
|
||||
@@ -234,14 +248,14 @@ class Edge:
|
||||
self._final_pairs = final
|
||||
if self._final_pairs:
|
||||
logger.info(
|
||||
'Minimum expectancy and minimum winrate are met only for %s,'
|
||||
' so other pairs are filtered out.',
|
||||
self._final_pairs
|
||||
"Minimum expectancy and minimum winrate are met only for %s,"
|
||||
" so other pairs are filtered out.",
|
||||
self._final_pairs,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
'Edge removed all pairs as no pair with minimum expectancy '
|
||||
'and minimum winrate was found !'
|
||||
"Edge removed all pairs as no pair with minimum expectancy "
|
||||
"and minimum winrate was found !"
|
||||
)
|
||||
|
||||
return self._final_pairs
|
||||
@@ -252,14 +266,17 @@ class Edge:
|
||||
"""
|
||||
final = []
|
||||
for pair, info in self._cached_pairs.items():
|
||||
if (info.expectancy > float(self.edge_config.get('minimum_expectancy', 0.2)) and
|
||||
info.winrate > float(self.edge_config.get('minimum_winrate', 0.60))):
|
||||
final.append({
|
||||
'Pair': pair,
|
||||
'Winrate': info.winrate,
|
||||
'Expectancy': info.expectancy,
|
||||
'Stoploss': info.stoploss,
|
||||
})
|
||||
if info.expectancy > float(
|
||||
self.edge_config.get("minimum_expectancy", 0.2)
|
||||
) and info.winrate > float(self.edge_config.get("minimum_winrate", 0.60)):
|
||||
final.append(
|
||||
{
|
||||
"Pair": pair,
|
||||
"Winrate": info.winrate,
|
||||
"Expectancy": info.expectancy,
|
||||
"Stoploss": info.stoploss,
|
||||
}
|
||||
)
|
||||
return final
|
||||
|
||||
def _fill_calculable_fields(self, result: DataFrame) -> DataFrame:
|
||||
@@ -279,28 +296,29 @@ class Edge:
|
||||
# All returned values are relative, they are defined as ratios.
|
||||
stake = 0.015
|
||||
|
||||
result['trade_duration'] = result['close_date'] - result['open_date']
|
||||
result["trade_duration"] = result["close_date"] - result["open_date"]
|
||||
|
||||
result['trade_duration'] = result['trade_duration'].map(
|
||||
lambda x: int(x.total_seconds() / 60))
|
||||
result["trade_duration"] = result["trade_duration"].map(
|
||||
lambda x: int(x.total_seconds() / 60)
|
||||
)
|
||||
|
||||
# Spends, Takes, Profit, Absolute Profit
|
||||
|
||||
# Buy Price
|
||||
result['buy_vol'] = stake / result['open_rate'] # How many target are we buying
|
||||
result['buy_fee'] = stake * self.fee
|
||||
result['buy_spend'] = stake + result['buy_fee'] # How much we're spending
|
||||
result["buy_vol"] = stake / result["open_rate"] # How many target are we buying
|
||||
result["buy_fee"] = stake * self.fee
|
||||
result["buy_spend"] = stake + result["buy_fee"] # How much we're spending
|
||||
|
||||
# Sell price
|
||||
result['sell_sum'] = result['buy_vol'] * result['close_rate']
|
||||
result['sell_fee'] = result['sell_sum'] * self.fee
|
||||
result['sell_take'] = result['sell_sum'] - result['sell_fee']
|
||||
result["sell_sum"] = result["buy_vol"] * result["close_rate"]
|
||||
result["sell_fee"] = result["sell_sum"] * self.fee
|
||||
result["sell_take"] = result["sell_sum"] - result["sell_fee"]
|
||||
|
||||
# profit_ratio
|
||||
result['profit_ratio'] = (result['sell_take'] - result['buy_spend']) / result['buy_spend']
|
||||
result["profit_ratio"] = (result["sell_take"] - result["buy_spend"]) / result["buy_spend"]
|
||||
|
||||
# Absolute profit
|
||||
result['profit_abs'] = result['sell_take'] - result['buy_spend']
|
||||
result["profit_abs"] = result["sell_take"] - result["buy_spend"]
|
||||
|
||||
return result
|
||||
|
||||
@@ -310,8 +328,8 @@ class Edge:
|
||||
The calculation will be done per pair and per strategy.
|
||||
"""
|
||||
# Removing pairs having less than min_trades_number
|
||||
min_trades_number = self.edge_config.get('min_trade_number', 10)
|
||||
results = results.groupby(['pair', 'stoploss']).filter(lambda x: len(x) > min_trades_number)
|
||||
min_trades_number = self.edge_config.get("min_trade_number", 10)
|
||||
results = results.groupby(["pair", "stoploss"]).filter(lambda x: len(x) > min_trades_number)
|
||||
###################################
|
||||
|
||||
# Removing outliers (Only Pumps) from the dataset
|
||||
@@ -319,13 +337,15 @@ class Edge:
|
||||
# Then every value more than (standard deviation + 2*average) is out (pump)
|
||||
#
|
||||
# Removing Pumps
|
||||
if self.edge_config.get('remove_pumps', False):
|
||||
results = results[results['profit_abs'] < 2 * results['profit_abs'].std()
|
||||
+ results['profit_abs'].mean()]
|
||||
if self.edge_config.get("remove_pumps", False):
|
||||
results = results[
|
||||
results["profit_abs"]
|
||||
< 2 * results["profit_abs"].std() + results["profit_abs"].mean()
|
||||
]
|
||||
##########################################################################
|
||||
|
||||
# Removing trades having a duration more than X minutes (set in config)
|
||||
max_trade_duration = self.edge_config.get('max_trade_duration_minute', 1440)
|
||||
max_trade_duration = self.edge_config.get("max_trade_duration_minute", 1440)
|
||||
results = results[results.trade_duration < max_trade_duration]
|
||||
#######################################################################
|
||||
|
||||
@@ -333,44 +353,54 @@ class Edge:
|
||||
return {}
|
||||
|
||||
groupby_aggregator = {
|
||||
'profit_abs': [
|
||||
('nb_trades', 'count'), # number of all trades
|
||||
('profit_sum', lambda x: x[x > 0].sum()), # cumulative profit of all winning trades
|
||||
('loss_sum', lambda x: abs(x[x < 0].sum())), # cumulative loss of all losing trades
|
||||
('nb_win_trades', lambda x: x[x > 0].count()) # number of winning trades
|
||||
"profit_abs": [
|
||||
("nb_trades", "count"), # number of all trades
|
||||
("profit_sum", lambda x: x[x > 0].sum()), # cumulative profit of all winning trades
|
||||
("loss_sum", lambda x: abs(x[x < 0].sum())), # cumulative loss of all losing trades
|
||||
("nb_win_trades", lambda x: x[x > 0].count()), # number of winning trades
|
||||
],
|
||||
'trade_duration': [('avg_trade_duration', 'mean')]
|
||||
"trade_duration": [("avg_trade_duration", "mean")],
|
||||
}
|
||||
|
||||
# Group by (pair and stoploss) by applying above aggregator
|
||||
df = results.groupby(['pair', 'stoploss'])[['profit_abs', 'trade_duration']].agg(
|
||||
groupby_aggregator).reset_index(col_level=1)
|
||||
df = (
|
||||
results.groupby(["pair", "stoploss"])[["profit_abs", "trade_duration"]]
|
||||
.agg(groupby_aggregator)
|
||||
.reset_index(col_level=1)
|
||||
)
|
||||
|
||||
# Dropping level 0 as we don't need it
|
||||
df.columns = df.columns.droplevel(0)
|
||||
|
||||
# Calculating number of losing trades, average win and average loss
|
||||
df['nb_loss_trades'] = df['nb_trades'] - df['nb_win_trades']
|
||||
df['average_win'] = np.where(df['nb_win_trades'] == 0, 0.0,
|
||||
df['profit_sum'] / df['nb_win_trades'])
|
||||
df['average_loss'] = np.where(df['nb_loss_trades'] == 0, 0.0,
|
||||
df['loss_sum'] / df['nb_loss_trades'])
|
||||
df["nb_loss_trades"] = df["nb_trades"] - df["nb_win_trades"]
|
||||
df["average_win"] = np.where(
|
||||
df["nb_win_trades"] == 0, 0.0, df["profit_sum"] / df["nb_win_trades"]
|
||||
)
|
||||
df["average_loss"] = np.where(
|
||||
df["nb_loss_trades"] == 0, 0.0, df["loss_sum"] / df["nb_loss_trades"]
|
||||
)
|
||||
|
||||
# Win rate = number of profitable trades / number of trades
|
||||
df['winrate'] = df['nb_win_trades'] / df['nb_trades']
|
||||
df["winrate"] = df["nb_win_trades"] / df["nb_trades"]
|
||||
|
||||
# risk_reward_ratio = average win / average loss
|
||||
df['risk_reward_ratio'] = df['average_win'] / df['average_loss']
|
||||
df["risk_reward_ratio"] = df["average_win"] / df["average_loss"]
|
||||
|
||||
# required_risk_reward = (1 / winrate) - 1
|
||||
df['required_risk_reward'] = (1 / df['winrate']) - 1
|
||||
df["required_risk_reward"] = (1 / df["winrate"]) - 1
|
||||
|
||||
# expectancy = (risk_reward_ratio * winrate) - (lossrate)
|
||||
df['expectancy'] = (df['risk_reward_ratio'] * df['winrate']) - (1 - df['winrate'])
|
||||
df["expectancy"] = (df["risk_reward_ratio"] * df["winrate"]) - (1 - df["winrate"])
|
||||
|
||||
# sort by expectancy and stoploss
|
||||
df = df.sort_values(by=['expectancy', 'stoploss'], ascending=False).groupby(
|
||||
'pair').first().sort_values(by=['expectancy'], ascending=False).reset_index()
|
||||
df = (
|
||||
df.sort_values(by=["expectancy", "stoploss"], ascending=False)
|
||||
.groupby("pair")
|
||||
.first()
|
||||
.sort_values(by=["expectancy"], ascending=False)
|
||||
.reset_index()
|
||||
)
|
||||
|
||||
final = {}
|
||||
for x in df.itertuples():
|
||||
@@ -381,17 +411,17 @@ class Edge:
|
||||
x.required_risk_reward,
|
||||
x.expectancy,
|
||||
x.nb_trades,
|
||||
x.avg_trade_duration
|
||||
x.avg_trade_duration,
|
||||
)
|
||||
|
||||
# Returning a list of pairs in order of "expectancy"
|
||||
return final
|
||||
|
||||
def _find_trades_for_stoploss_range(self, df, pair: str, stoploss_range) -> list:
|
||||
buy_column = df['enter_long'].values
|
||||
sell_column = df['exit_long'].values
|
||||
date_column = df['date'].values
|
||||
ohlc_columns = df[['open', 'high', 'low', 'close']].values
|
||||
buy_column = df["enter_long"].values
|
||||
sell_column = df["exit_long"].values
|
||||
date_column = df["date"].values
|
||||
ohlc_columns = df[["open", "high", "low", "close"]].values
|
||||
|
||||
result: list = []
|
||||
for stoploss in stoploss_range:
|
||||
@@ -401,8 +431,9 @@ class Edge:
|
||||
|
||||
return result
|
||||
|
||||
def _detect_next_stop_or_sell_point(self, buy_column, sell_column, date_column,
|
||||
ohlc_columns, stoploss, pair: str):
|
||||
def _detect_next_stop_or_sell_point(
|
||||
self, buy_column, sell_column, date_column, ohlc_columns, stoploss, pair: str
|
||||
):
|
||||
"""
|
||||
Iterate through ohlc_columns in order to find the next trade
|
||||
Next trade opens from the first buy signal noticed to
|
||||
@@ -429,27 +460,28 @@ class Edge:
|
||||
open_trade_index += 1
|
||||
|
||||
open_price = ohlc_columns[open_trade_index, 0]
|
||||
stop_price = (open_price * (stoploss + 1))
|
||||
stop_price = open_price * (stoploss + 1)
|
||||
|
||||
# Searching for the index where stoploss is hit
|
||||
stop_index = utf1st.find_1st(
|
||||
ohlc_columns[open_trade_index:, 2], stop_price, utf1st.cmp_smaller)
|
||||
ohlc_columns[open_trade_index:, 2], stop_price, utf1st.cmp_smaller
|
||||
)
|
||||
|
||||
# If we don't find it then we assume stop_index will be far in future (infinite number)
|
||||
if stop_index == -1:
|
||||
stop_index = float('inf')
|
||||
stop_index = float("inf")
|
||||
|
||||
# Searching for the index where sell is hit
|
||||
sell_index = utf1st.find_1st(sell_column[open_trade_index:], 1, utf1st.cmp_equal)
|
||||
|
||||
# If we don't find it then we assume sell_index will be far in future (infinite number)
|
||||
if sell_index == -1:
|
||||
sell_index = float('inf')
|
||||
sell_index = float("inf")
|
||||
|
||||
# Check if we don't find any stop or sell point (in that case trade remains open)
|
||||
# It is not interesting for Edge to consider it so we simply ignore the trade
|
||||
# And stop iterating there is no more entry
|
||||
if stop_index == sell_index == float('inf'):
|
||||
if stop_index == sell_index == float("inf"):
|
||||
break
|
||||
|
||||
if stop_index <= sell_index:
|
||||
@@ -467,17 +499,18 @@ class Edge:
|
||||
exit_type = ExitType.EXIT_SIGNAL
|
||||
exit_price = ohlc_columns[exit_index, 0]
|
||||
|
||||
trade = {'pair': pair,
|
||||
'stoploss': stoploss,
|
||||
'profit_ratio': '',
|
||||
'profit_abs': '',
|
||||
'open_date': date_column[open_trade_index],
|
||||
'close_date': date_column[exit_index],
|
||||
'trade_duration': '',
|
||||
'open_rate': round(open_price, 15),
|
||||
'close_rate': round(exit_price, 15),
|
||||
'exit_type': exit_type
|
||||
}
|
||||
trade = {
|
||||
"pair": pair,
|
||||
"stoploss": stoploss,
|
||||
"profit_ratio": "",
|
||||
"profit_abs": "",
|
||||
"open_date": date_column[open_trade_index],
|
||||
"close_date": date_column[exit_index],
|
||||
"trade_duration": "",
|
||||
"open_rate": round(open_price, 15),
|
||||
"close_rate": round(exit_price, 15),
|
||||
"exit_type": exit_type,
|
||||
}
|
||||
|
||||
result.append(trade)
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ class BacktestState(Enum):
|
||||
"""
|
||||
Bot application states
|
||||
"""
|
||||
|
||||
STARTUP = 1
|
||||
DATALOAD = 2
|
||||
ANALYZE = 3
|
||||
|
||||
@@ -3,6 +3,7 @@ from enum import Enum
|
||||
|
||||
class CandleType(str, Enum):
|
||||
"""Enum to distinguish candle types"""
|
||||
|
||||
SPOT = "spot"
|
||||
FUTURES = "futures"
|
||||
MARK = "mark"
|
||||
@@ -17,14 +18,14 @@ class CandleType(str, Enum):
|
||||
return f"{self.name.lower()}"
|
||||
|
||||
@staticmethod
|
||||
def from_string(value: str) -> 'CandleType':
|
||||
def from_string(value: str) -> "CandleType":
|
||||
if not value:
|
||||
# Default to spot
|
||||
return CandleType.SPOT
|
||||
return CandleType(value)
|
||||
|
||||
@staticmethod
|
||||
def get_default(trading_mode: str) -> 'CandleType':
|
||||
if trading_mode == 'futures':
|
||||
def get_default(trading_mode: str) -> "CandleType":
|
||||
if trading_mode == "futures":
|
||||
return CandleType.FUTURES
|
||||
return CandleType.SPOT
|
||||
|
||||
@@ -5,10 +5,11 @@ class ExitCheckTuple:
|
||||
"""
|
||||
NamedTuple for Exit type + reason
|
||||
"""
|
||||
exit_type: ExitType
|
||||
exit_reason: str = ''
|
||||
|
||||
def __init__(self, exit_type: ExitType, exit_reason: str = ''):
|
||||
exit_type: ExitType
|
||||
exit_reason: str = ""
|
||||
|
||||
def __init__(self, exit_type: ExitType, exit_reason: str = ""):
|
||||
self.exit_type = exit_type
|
||||
self.exit_reason = exit_reason or exit_type.value
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ class ExitType(Enum):
|
||||
"""
|
||||
Enum to distinguish between exit reasons
|
||||
"""
|
||||
|
||||
ROI = "roi"
|
||||
STOP_LOSS = "stop_loss"
|
||||
STOPLOSS_ON_EXCHANGE = "stoploss_on_exchange"
|
||||
|
||||
@@ -2,7 +2,8 @@ from enum import Enum
|
||||
|
||||
|
||||
class HyperoptState(Enum):
|
||||
""" Hyperopt states """
|
||||
"""Hyperopt states"""
|
||||
|
||||
STARTUP = 1
|
||||
DATALOAD = 2
|
||||
INDICATORS = 3
|
||||
|
||||
@@ -7,6 +7,7 @@ class MarginMode(str, Enum):
|
||||
cross margin/futures margin_mode and
|
||||
isolated margin/futures margin_mode
|
||||
"""
|
||||
|
||||
CROSS = "cross"
|
||||
ISOLATED = "isolated"
|
||||
NONE = ''
|
||||
NONE = ""
|
||||
|
||||
@@ -5,6 +5,7 @@ class MarketDirection(Enum):
|
||||
"""
|
||||
Enum for various market directions.
|
||||
"""
|
||||
|
||||
LONG = "long"
|
||||
SHORT = "short"
|
||||
EVEN = "even"
|
||||
|
||||
@@ -2,5 +2,5 @@ from enum import Enum
|
||||
|
||||
|
||||
class OrderTypeValues(str, Enum):
|
||||
limit = 'limit'
|
||||
market = 'market'
|
||||
limit = "limit"
|
||||
market = "market"
|
||||
|
||||
@@ -3,6 +3,7 @@ from enum import Enum
|
||||
|
||||
class PriceType(str, Enum):
|
||||
"""Enum to distinguish possible trigger prices for stoplosses"""
|
||||
|
||||
LAST = "last"
|
||||
MARK = "mark"
|
||||
INDEX = "index"
|
||||
|
||||
@@ -2,27 +2,27 @@ from enum import Enum
|
||||
|
||||
|
||||
class RPCMessageType(str, Enum):
|
||||
STATUS = 'status'
|
||||
WARNING = 'warning'
|
||||
EXCEPTION = 'exception'
|
||||
STARTUP = 'startup'
|
||||
STATUS = "status"
|
||||
WARNING = "warning"
|
||||
EXCEPTION = "exception"
|
||||
STARTUP = "startup"
|
||||
|
||||
ENTRY = 'entry'
|
||||
ENTRY_FILL = 'entry_fill'
|
||||
ENTRY_CANCEL = 'entry_cancel'
|
||||
ENTRY = "entry"
|
||||
ENTRY_FILL = "entry_fill"
|
||||
ENTRY_CANCEL = "entry_cancel"
|
||||
|
||||
EXIT = 'exit'
|
||||
EXIT_FILL = 'exit_fill'
|
||||
EXIT_CANCEL = 'exit_cancel'
|
||||
EXIT = "exit"
|
||||
EXIT_FILL = "exit_fill"
|
||||
EXIT_CANCEL = "exit_cancel"
|
||||
|
||||
PROTECTION_TRIGGER = 'protection_trigger'
|
||||
PROTECTION_TRIGGER_GLOBAL = 'protection_trigger_global'
|
||||
PROTECTION_TRIGGER = "protection_trigger"
|
||||
PROTECTION_TRIGGER_GLOBAL = "protection_trigger_global"
|
||||
|
||||
STRATEGY_MSG = 'strategy_msg'
|
||||
STRATEGY_MSG = "strategy_msg"
|
||||
|
||||
WHITELIST = 'whitelist'
|
||||
ANALYZED_DF = 'analyzed_df'
|
||||
NEW_CANDLE = 'new_candle'
|
||||
WHITELIST = "whitelist"
|
||||
ANALYZED_DF = "analyzed_df"
|
||||
NEW_CANDLE = "new_candle"
|
||||
|
||||
def __repr__(self):
|
||||
return self.value
|
||||
@@ -33,10 +33,10 @@ class RPCMessageType(str, Enum):
|
||||
|
||||
# Enum for parsing requests from ws consumers
|
||||
class RPCRequestType(str, Enum):
|
||||
SUBSCRIBE = 'subscribe'
|
||||
SUBSCRIBE = "subscribe"
|
||||
|
||||
WHITELIST = 'whitelist'
|
||||
ANALYZED_DF = 'analyzed_df'
|
||||
WHITELIST = "whitelist"
|
||||
ANALYZED_DF = "analyzed_df"
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
@@ -6,6 +6,7 @@ class RunMode(Enum):
|
||||
Bot running mode (backtest, hyperopt, ...)
|
||||
can be "live", "dry-run", "backtest", "edge", "hyperopt".
|
||||
"""
|
||||
|
||||
LIVE = "live"
|
||||
DRY_RUN = "dry_run"
|
||||
BACKTEST = "backtest"
|
||||
|
||||
@@ -5,6 +5,7 @@ class SignalType(Enum):
|
||||
"""
|
||||
Enum to distinguish between enter and exit signals
|
||||
"""
|
||||
|
||||
ENTER_LONG = "enter_long"
|
||||
EXIT_LONG = "exit_long"
|
||||
ENTER_SHORT = "enter_short"
|
||||
@@ -18,6 +19,7 @@ class SignalTagType(Enum):
|
||||
"""
|
||||
Enum for signal columns
|
||||
"""
|
||||
|
||||
ENTER_TAG = "enter_tag"
|
||||
EXIT_TAG = "exit_tag"
|
||||
|
||||
@@ -26,8 +28,8 @@ class SignalTagType(Enum):
|
||||
|
||||
|
||||
class SignalDirection(str, Enum):
|
||||
LONG = 'long'
|
||||
SHORT = 'short'
|
||||
LONG = "long"
|
||||
SHORT = "short"
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name.lower()}"
|
||||
|
||||
@@ -5,6 +5,7 @@ class State(Enum):
|
||||
"""
|
||||
Bot application states
|
||||
"""
|
||||
|
||||
RUNNING = 1
|
||||
STOPPED = 2
|
||||
RELOAD_CONFIG = 3
|
||||
|
||||
@@ -6,6 +6,7 @@ class TradingMode(str, Enum):
|
||||
Enum to distinguish between
|
||||
spot, margin, futures or any other trading method
|
||||
"""
|
||||
|
||||
SPOT = "spot"
|
||||
MARGIN = "margin"
|
||||
FUTURES = "futures"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# isort: off
|
||||
from freqtrade.exchange.common import remove_exchange_credentials, MAP_EXCHANGE_CHILDCLASS
|
||||
from freqtrade.exchange.exchange import Exchange
|
||||
|
||||
# isort: on
|
||||
from freqtrade.exchange.binance import Binance
|
||||
from freqtrade.exchange.bingx import Bingx
|
||||
@@ -10,18 +11,30 @@ from freqtrade.exchange.bitpanda import Bitpanda
|
||||
from freqtrade.exchange.bitvavo import Bitvavo
|
||||
from freqtrade.exchange.bybit import Bybit
|
||||
from freqtrade.exchange.coinbasepro import Coinbasepro
|
||||
from freqtrade.exchange.exchange_utils import (ROUND_DOWN, ROUND_UP, amount_to_contract_precision,
|
||||
amount_to_contracts, amount_to_precision,
|
||||
available_exchanges, ccxt_exchanges,
|
||||
contracts_to_amount, date_minus_candles,
|
||||
is_exchange_known_ccxt, list_available_exchanges,
|
||||
market_is_active, price_to_precision,
|
||||
validate_exchange)
|
||||
from freqtrade.exchange.exchange_utils_timeframe import (timeframe_to_minutes, timeframe_to_msecs,
|
||||
timeframe_to_next_date,
|
||||
timeframe_to_prev_date,
|
||||
timeframe_to_resample_freq,
|
||||
timeframe_to_seconds)
|
||||
from freqtrade.exchange.exchange_utils import (
|
||||
ROUND_DOWN,
|
||||
ROUND_UP,
|
||||
amount_to_contract_precision,
|
||||
amount_to_contracts,
|
||||
amount_to_precision,
|
||||
available_exchanges,
|
||||
ccxt_exchanges,
|
||||
contracts_to_amount,
|
||||
date_minus_candles,
|
||||
is_exchange_known_ccxt,
|
||||
list_available_exchanges,
|
||||
market_is_active,
|
||||
price_to_precision,
|
||||
validate_exchange,
|
||||
)
|
||||
from freqtrade.exchange.exchange_utils_timeframe import (
|
||||
timeframe_to_minutes,
|
||||
timeframe_to_msecs,
|
||||
timeframe_to_next_date,
|
||||
timeframe_to_prev_date,
|
||||
timeframe_to_resample_freq,
|
||||
timeframe_to_seconds,
|
||||
)
|
||||
from freqtrade.exchange.gate import Gate
|
||||
from freqtrade.exchange.hitbtc import Hitbtc
|
||||
from freqtrade.exchange.htx import Htx
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" Binance exchange subclass """
|
||||
"""Binance exchange subclass"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
@@ -18,7 +19,6 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Binance(Exchange):
|
||||
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": True,
|
||||
"stop_price_param": "stopPrice",
|
||||
@@ -36,7 +36,7 @@ class Binance(Exchange):
|
||||
"tickers_have_price": False,
|
||||
"floor_leverage": True,
|
||||
"stop_price_type_field": "workingType",
|
||||
"order_props_in_contracts": ['amount', 'cost', 'filled', 'remaining'],
|
||||
"order_props_in_contracts": ["amount", "cost", "filled", "remaining"],
|
||||
"stop_price_type_value_mapping": {
|
||||
PriceType.LAST: "CONTRACT_PRICE",
|
||||
PriceType.MARK: "MARK_PRICE",
|
||||
@@ -67,36 +67,44 @@ class Binance(Exchange):
|
||||
Must be overridden in child methods if required.
|
||||
"""
|
||||
try:
|
||||
if self.trading_mode == TradingMode.FUTURES and not self._config['dry_run']:
|
||||
if self.trading_mode == TradingMode.FUTURES and not self._config["dry_run"]:
|
||||
position_side = self._api.fapiPrivateGetPositionSideDual()
|
||||
self._log_exchange_response('position_side_setting', position_side)
|
||||
self._log_exchange_response("position_side_setting", position_side)
|
||||
assets_margin = self._api.fapiPrivateGetMultiAssetsMargin()
|
||||
self._log_exchange_response('multi_asset_margin', assets_margin)
|
||||
self._log_exchange_response("multi_asset_margin", assets_margin)
|
||||
msg = ""
|
||||
if position_side.get('dualSidePosition') is True:
|
||||
if position_side.get("dualSidePosition") is True:
|
||||
msg += (
|
||||
"\nHedge Mode is not supported by freqtrade. "
|
||||
"Please change 'Position Mode' on your binance futures account.")
|
||||
if assets_margin.get('multiAssetsMargin') is True:
|
||||
msg += ("\nMulti-Asset Mode is not supported by freqtrade. "
|
||||
"Please change 'Asset Mode' on your binance futures account.")
|
||||
"Please change 'Position Mode' on your binance futures account."
|
||||
)
|
||||
if assets_margin.get("multiAssetsMargin") is True:
|
||||
msg += (
|
||||
"\nMulti-Asset Mode is not supported by freqtrade. "
|
||||
"Please change 'Asset Mode' on your binance futures account."
|
||||
)
|
||||
if msg:
|
||||
raise OperationalException(msg)
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f'Error in additional_exchange_init due to {e.__class__.__name__}. Message: {e}'
|
||||
) from e
|
||||
f"Error in additional_exchange_init due to {e.__class__.__name__}. Message: {e}"
|
||||
) from e
|
||||
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||
since_ms: int, candle_type: CandleType,
|
||||
is_new_pair: bool = False, raise_: bool = False,
|
||||
until_ms: Optional[int] = None
|
||||
) -> OHLCVResponse:
|
||||
async def _async_get_historic_ohlcv(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
since_ms: int,
|
||||
candle_type: CandleType,
|
||||
is_new_pair: bool = False,
|
||||
raise_: bool = False,
|
||||
until_ms: Optional[int] = None,
|
||||
) -> OHLCVResponse:
|
||||
"""
|
||||
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
|
||||
Does not work for other exchanges, which don't return the earliest data when called with "0"
|
||||
@@ -109,7 +117,8 @@ class Binance(Exchange):
|
||||
since_ms = x[3][0][0]
|
||||
logger.info(
|
||||
f"Candle-data for {pair} available starting with "
|
||||
f"{datetime.fromtimestamp(since_ms // 1000, tz=timezone.utc).isoformat()}.")
|
||||
f"{datetime.fromtimestamp(since_ms // 1000, tz=timezone.utc).isoformat()}."
|
||||
)
|
||||
|
||||
return await super()._async_get_historic_ohlcv(
|
||||
pair=pair,
|
||||
@@ -135,7 +144,7 @@ class Binance(Exchange):
|
||||
def dry_run_liquidation_price(
|
||||
self,
|
||||
pair: str,
|
||||
open_rate: float, # Entry price of position
|
||||
open_rate: float, # Entry price of position
|
||||
is_short: bool,
|
||||
amount: float,
|
||||
stake_amount: float,
|
||||
@@ -177,7 +186,7 @@ class Binance(Exchange):
|
||||
# maintenance_amt: (CUM) Maintenance Amount of position
|
||||
mm_ratio, maintenance_amt = self.get_maintenance_ratio_and_amt(pair, stake_amount)
|
||||
|
||||
if (maintenance_amt is None):
|
||||
if maintenance_amt is None:
|
||||
raise OperationalException(
|
||||
"Parameter maintenance_amt is required by Binance.liquidation_price"
|
||||
f"for {self.trading_mode.value}"
|
||||
@@ -185,24 +194,18 @@ class Binance(Exchange):
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
return (
|
||||
(
|
||||
(wallet_balance + cross_vars + maintenance_amt) -
|
||||
(side_1 * amount * open_rate)
|
||||
) / (
|
||||
(amount * mm_ratio) - (side_1 * amount)
|
||||
)
|
||||
)
|
||||
(wallet_balance + cross_vars + maintenance_amt) - (side_1 * amount * open_rate)
|
||||
) / ((amount * mm_ratio) - (side_1 * amount))
|
||||
else:
|
||||
raise OperationalException(
|
||||
"Freqtrade only supports isolated futures for leverage trading")
|
||||
"Freqtrade only supports isolated futures for leverage trading"
|
||||
)
|
||||
|
||||
@retrier
|
||||
def load_leverage_tiers(self) -> Dict[str, List[Dict]]:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
if self._config['dry_run']:
|
||||
leverage_tiers_path = (
|
||||
Path(__file__).parent / 'binance_leverage_tiers.json'
|
||||
)
|
||||
if self._config["dry_run"]:
|
||||
leverage_tiers_path = Path(__file__).parent / "binance_leverage_tiers.json"
|
||||
with leverage_tiers_path.open() as json_file:
|
||||
return json_load(json_file)
|
||||
else:
|
||||
@@ -211,8 +214,10 @@ class Binance(Exchange):
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(f'Could not fetch leverage amounts due to'
|
||||
f'{e.__class__.__name__}. Message: {e}') from e
|
||||
raise TemporaryError(
|
||||
f"Could not fetch leverage amounts due to"
|
||||
f"{e.__class__.__name__}. Message: {e}"
|
||||
) from e
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
else:
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" Bingx exchange subclass """
|
||||
"""Bingx exchange subclass"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" Bitmart exchange subclass """
|
||||
"""Bitmart exchange subclass"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" Bitpanda exchange subclass """
|
||||
"""Bitpanda exchange subclass"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, List, Optional
|
||||
@@ -15,8 +16,9 @@ class Bitpanda(Exchange):
|
||||
with this exchange.
|
||||
"""
|
||||
|
||||
def get_trades_for_order(self, order_id: str, pair: str, since: datetime,
|
||||
params: Optional[Dict] = None) -> List:
|
||||
def get_trades_for_order(
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[Dict] = None
|
||||
) -> List:
|
||||
"""
|
||||
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
||||
The "since" argument passed in is coming from the database and is in UTC,
|
||||
@@ -33,5 +35,5 @@ class Bitpanda(Exchange):
|
||||
:param pair: Pair the order is for
|
||||
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
||||
"""
|
||||
params = {'to': int(datetime.now(timezone.utc).timestamp() * 1000)}
|
||||
params = {"to": int(datetime.now(timezone.utc).timestamp() * 1000)}
|
||||
return super().get_trades_for_order(order_id, pair, since, params)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Kucoin exchange subclass."""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" Bybit exchange subclass """
|
||||
"""Bybit exchange subclass"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
@@ -25,6 +26,7 @@ class Bybit(Exchange):
|
||||
officially supported by the Freqtrade development team. So some features
|
||||
may still not work as expected.
|
||||
"""
|
||||
|
||||
unified_account = False
|
||||
|
||||
_ft_has: Dict = {
|
||||
@@ -60,20 +62,14 @@ class Bybit(Exchange):
|
||||
# ccxt defaults to swap mode.
|
||||
config = {}
|
||||
if self.trading_mode == TradingMode.SPOT:
|
||||
config.update({
|
||||
"options": {
|
||||
"defaultType": "spot"
|
||||
}
|
||||
})
|
||||
config.update({"options": {"defaultType": "spot"}})
|
||||
config.update(super()._ccxt_config)
|
||||
return config
|
||||
|
||||
def market_is_future(self, market: Dict[str, Any]) -> bool:
|
||||
main = super().market_is_future(market)
|
||||
# For ByBit, we'll only support USDT markets for now.
|
||||
return (
|
||||
main and market['settle'] == 'USDT'
|
||||
)
|
||||
return main and market["settle"] == "USDT"
|
||||
|
||||
@retrier
|
||||
def additional_exchange_init(self) -> None:
|
||||
@@ -83,17 +79,19 @@ class Bybit(Exchange):
|
||||
Must be overridden in child methods if required.
|
||||
"""
|
||||
try:
|
||||
if not self._config['dry_run']:
|
||||
if not self._config["dry_run"]:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
position_mode = self._api.set_position_mode(False)
|
||||
self._log_exchange_response('set_position_mode', position_mode)
|
||||
self._log_exchange_response("set_position_mode", position_mode)
|
||||
is_unified = self._api.is_unified_enabled()
|
||||
# Returns a tuple of bools, first for margin, second for Account
|
||||
if is_unified and len(is_unified) > 1 and is_unified[1]:
|
||||
self.unified_account = True
|
||||
logger.info("Bybit: Unified account.")
|
||||
raise OperationalException("Bybit: Unified account is not supported. "
|
||||
"Please use a standard (sub)account.")
|
||||
raise OperationalException(
|
||||
"Bybit: Unified account is not supported. "
|
||||
"Please use a standard (sub)account."
|
||||
)
|
||||
else:
|
||||
self.unified_account = False
|
||||
logger.info("Bybit: Standard account.")
|
||||
@@ -101,14 +99,14 @@ class Bybit(Exchange):
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f'Error in additional_exchange_init due to {e.__class__.__name__}. Message: {e}'
|
||||
) from e
|
||||
f"Error in additional_exchange_init due to {e.__class__.__name__}. Message: {e}"
|
||||
) from e
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def ohlcv_candle_limit(
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None) -> int:
|
||||
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None
|
||||
) -> int:
|
||||
if candle_type in (CandleType.FUNDING_RATE):
|
||||
return 200
|
||||
|
||||
@@ -116,7 +114,7 @@ class Bybit(Exchange):
|
||||
|
||||
def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False):
|
||||
if self.trading_mode != TradingMode.SPOT:
|
||||
params = {'leverage': leverage}
|
||||
params = {"leverage": leverage}
|
||||
self.set_margin_mode(pair, self.margin_mode, accept_fail=True, params=params)
|
||||
self._set_leverage(leverage, pair, accept_fail=True)
|
||||
|
||||
@@ -126,7 +124,7 @@ class Bybit(Exchange):
|
||||
ordertype: str,
|
||||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = 'GTC',
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
@@ -136,13 +134,13 @@ class Bybit(Exchange):
|
||||
time_in_force=time_in_force,
|
||||
)
|
||||
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
|
||||
params['position_idx'] = 0
|
||||
params["position_idx"] = 0
|
||||
return params
|
||||
|
||||
def dry_run_liquidation_price(
|
||||
self,
|
||||
pair: str,
|
||||
open_rate: float, # Entry price of position
|
||||
open_rate: float, # Entry price of position
|
||||
is_short: bool,
|
||||
amount: float,
|
||||
stake_amount: float,
|
||||
@@ -185,10 +183,8 @@ class Bybit(Exchange):
|
||||
mm_ratio, _ = self.get_maintenance_ratio_and_amt(pair, stake_amount)
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES and self.margin_mode == MarginMode.ISOLATED:
|
||||
|
||||
if market['inverse']:
|
||||
raise OperationalException(
|
||||
"Freqtrade does not yet support inverse contracts")
|
||||
if market["inverse"]:
|
||||
raise OperationalException("Freqtrade does not yet support inverse contracts")
|
||||
initial_margin_rate = 1 / leverage
|
||||
|
||||
# See docstring - ignores extra margin!
|
||||
@@ -199,10 +195,12 @@ class Bybit(Exchange):
|
||||
|
||||
else:
|
||||
raise OperationalException(
|
||||
"Freqtrade only supports isolated futures for leverage trading")
|
||||
"Freqtrade only supports isolated futures for leverage trading"
|
||||
)
|
||||
|
||||
def get_funding_fees(
|
||||
self, pair: str, amount: float, is_short: bool, open_date: datetime) -> float:
|
||||
self, pair: str, amount: float, is_short: bool, open_date: datetime
|
||||
) -> float:
|
||||
"""
|
||||
Fetch funding fees, either from the exchange (live) or calculates them
|
||||
based on funding rate/mark price history
|
||||
@@ -216,8 +214,7 @@ class Bybit(Exchange):
|
||||
# Bybit does not provide "applied" funding fees per position.
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
try:
|
||||
return self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
return self._fetch_and_calculate_funding_fees(pair, amount, is_short, open_date)
|
||||
except ExchangeError:
|
||||
logger.warning(f"Could not update funding fees for {pair}.")
|
||||
return 0.0
|
||||
@@ -234,7 +231,7 @@ class Bybit(Exchange):
|
||||
|
||||
while since < dt_now():
|
||||
until = since + timedelta(days=7, minutes=-1)
|
||||
orders += super().fetch_orders(pair, since, params={'until': dt_ts(until)})
|
||||
orders += super().fetch_orders(pair, since, params={"until": dt_ts(until)})
|
||||
since = until
|
||||
|
||||
return orders
|
||||
@@ -242,12 +239,12 @@ class Bybit(Exchange):
|
||||
def fetch_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
order = super().fetch_order(order_id, pair, params)
|
||||
if (
|
||||
order.get('status') == 'canceled'
|
||||
and order.get('filled') == 0.0
|
||||
and order.get('remaining') == 0.0
|
||||
order.get("status") == "canceled"
|
||||
and order.get("filled") == 0.0
|
||||
and order.get("remaining") == 0.0
|
||||
):
|
||||
# Canceled orders will have "remaining=0" on bybit.
|
||||
order['remaining'] = None
|
||||
order["remaining"] = None
|
||||
return order
|
||||
|
||||
@retrier
|
||||
@@ -258,7 +255,7 @@ class Bybit(Exchange):
|
||||
"""
|
||||
|
||||
# Load cached tiers
|
||||
tiers_cached = self.load_cached_leverage_tiers(self._config['stake_currency'])
|
||||
tiers_cached = self.load_cached_leverage_tiers(self._config["stake_currency"])
|
||||
if tiers_cached:
|
||||
tiers = tiers_cached
|
||||
return tiers
|
||||
@@ -268,12 +265,12 @@ class Bybit(Exchange):
|
||||
symbols = self._api.market_symbols([])
|
||||
|
||||
def parse_resp(response):
|
||||
result = self._api.safe_dict(response, 'result', {})
|
||||
data = self._api.safe_list(result, 'list', [])
|
||||
return self._api.parse_leverage_tiers(data, symbols, 'symbol')
|
||||
result = self._api.safe_dict(response, "result", {})
|
||||
data = self._api.safe_list(result, "list", [])
|
||||
return self._api.parse_leverage_tiers(data, symbols, "symbol")
|
||||
|
||||
params = {
|
||||
'category': 'linear',
|
||||
"category": "linear",
|
||||
}
|
||||
tiers = {}
|
||||
# 20 pairs ... should be sufficient assuming 30 pairs per page
|
||||
@@ -282,11 +279,9 @@ class Bybit(Exchange):
|
||||
# Fetch from private endpoint
|
||||
response = self._api.publicGetV5MarketRiskLimit(params)
|
||||
tiers = tiers | parse_resp(response)
|
||||
if (cursor := response['result']['nextPageCursor']) == '':
|
||||
if (cursor := response["result"]["nextPageCursor"]) == "":
|
||||
break
|
||||
params.update({
|
||||
"cursor": cursor
|
||||
})
|
||||
params.update({"cursor": cursor})
|
||||
|
||||
self.cache_leverage_tiers(tiers, self._config['stake_currency'])
|
||||
self.cache_leverage_tiers(tiers, self._config["stake_currency"])
|
||||
return tiers
|
||||
|
||||
@@ -21,45 +21,52 @@ def check_exchange(config: Config, check_for_bad: bool = True) -> bool:
|
||||
and thus is not known for the Freqtrade at all.
|
||||
"""
|
||||
|
||||
if (config['runmode'] in [RunMode.PLOT, RunMode.UTIL_NO_EXCHANGE, RunMode.OTHER]
|
||||
and not config.get('exchange', {}).get('name')):
|
||||
if config["runmode"] in [
|
||||
RunMode.PLOT,
|
||||
RunMode.UTIL_NO_EXCHANGE,
|
||||
RunMode.OTHER,
|
||||
] and not config.get("exchange", {}).get("name"):
|
||||
# Skip checking exchange in plot mode, since it requires no exchange
|
||||
return True
|
||||
logger.info("Checking exchange...")
|
||||
|
||||
exchange = config.get('exchange', {}).get('name', '').lower()
|
||||
exchange = config.get("exchange", {}).get("name", "").lower()
|
||||
if not exchange:
|
||||
raise OperationalException(
|
||||
f'This command requires a configured exchange. You should either use '
|
||||
f'`--exchange <exchange_name>` or specify a configuration file via `--config`.\n'
|
||||
f'The following exchanges are available for Freqtrade: '
|
||||
f"This command requires a configured exchange. You should either use "
|
||||
f"`--exchange <exchange_name>` or specify a configuration file via `--config`.\n"
|
||||
f"The following exchanges are available for Freqtrade: "
|
||||
f'{", ".join(available_exchanges())}'
|
||||
)
|
||||
|
||||
if not is_exchange_known_ccxt(exchange):
|
||||
raise OperationalException(
|
||||
f'Exchange "{exchange}" is not known to the ccxt library '
|
||||
f'and therefore not available for the bot.\n'
|
||||
f'The following exchanges are available for Freqtrade: '
|
||||
f"and therefore not available for the bot.\n"
|
||||
f"The following exchanges are available for Freqtrade: "
|
||||
f'{", ".join(available_exchanges())}'
|
||||
)
|
||||
|
||||
valid, reason = validate_exchange(exchange)
|
||||
if not valid:
|
||||
if check_for_bad:
|
||||
raise OperationalException(f'Exchange "{exchange}" will not work with Freqtrade. '
|
||||
f'Reason: {reason}')
|
||||
raise OperationalException(
|
||||
f'Exchange "{exchange}" will not work with Freqtrade. ' f"Reason: {reason}"
|
||||
)
|
||||
else:
|
||||
logger.warning(f'Exchange "{exchange}" will not work with Freqtrade. Reason: {reason}')
|
||||
|
||||
if MAP_EXCHANGE_CHILDCLASS.get(exchange, exchange) in SUPPORTED_EXCHANGES:
|
||||
logger.info(f'Exchange "{exchange}" is officially supported '
|
||||
f'by the Freqtrade development team.')
|
||||
logger.info(
|
||||
f'Exchange "{exchange}" is officially supported ' f"by the Freqtrade development team."
|
||||
)
|
||||
else:
|
||||
logger.warning(f'Exchange "{exchange}" is known to the ccxt library, '
|
||||
f'available for the bot, but not officially supported '
|
||||
f'by the Freqtrade development team. '
|
||||
f'It may work flawlessly (please report back) or have serious issues. '
|
||||
f'Use it at your own discretion.')
|
||||
logger.warning(
|
||||
f'Exchange "{exchange}" is known to the ccxt library, '
|
||||
f"available for the bot, but not officially supported "
|
||||
f"by the Freqtrade development team. "
|
||||
f"It may work flawlessly (please report back) or have serious issues. "
|
||||
f"Use it at your own discretion."
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" CoinbasePro exchange subclass """
|
||||
"""CoinbasePro exchange subclass"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
|
||||
@@ -43,46 +43,48 @@ BAD_EXCHANGES = {
|
||||
}
|
||||
|
||||
MAP_EXCHANGE_CHILDCLASS = {
|
||||
'binanceus': 'binance',
|
||||
'binanceje': 'binance',
|
||||
'binanceusdm': 'binance',
|
||||
'okex': 'okx',
|
||||
'gateio': 'gate',
|
||||
'huboi': 'htx',
|
||||
"binanceus": "binance",
|
||||
"binanceje": "binance",
|
||||
"binanceusdm": "binance",
|
||||
"okex": "okx",
|
||||
"gateio": "gate",
|
||||
"huboi": "htx",
|
||||
}
|
||||
|
||||
SUPPORTED_EXCHANGES = [
|
||||
'binance',
|
||||
'bitmart',
|
||||
'gate',
|
||||
'htx',
|
||||
'kraken',
|
||||
'okx',
|
||||
"binance",
|
||||
"bitmart",
|
||||
"gate",
|
||||
"htx",
|
||||
"kraken",
|
||||
"okx",
|
||||
]
|
||||
|
||||
# either the main, or replacement methods (array) is required
|
||||
EXCHANGE_HAS_REQUIRED: Dict[str, List[str]] = {
|
||||
# Required / private
|
||||
'fetchOrder': ['fetchOpenOrder', 'fetchClosedOrder'],
|
||||
'cancelOrder': [],
|
||||
'createOrder': [],
|
||||
'fetchBalance': [],
|
||||
|
||||
"fetchOrder": ["fetchOpenOrder", "fetchClosedOrder"],
|
||||
"cancelOrder": [],
|
||||
"createOrder": [],
|
||||
"fetchBalance": [],
|
||||
# Public endpoints
|
||||
'fetchOHLCV': [],
|
||||
"fetchOHLCV": [],
|
||||
}
|
||||
|
||||
EXCHANGE_HAS_OPTIONAL = [
|
||||
# Private
|
||||
'fetchMyTrades', # Trades for order - fee detection
|
||||
'createLimitOrder', 'createMarketOrder', # Either OR for orders
|
||||
"fetchMyTrades", # Trades for order - fee detection
|
||||
"createLimitOrder",
|
||||
"createMarketOrder", # Either OR for orders
|
||||
# 'setLeverage', # Margin/Futures trading
|
||||
# 'setMarginMode', # Margin/Futures trading
|
||||
# 'fetchFundingHistory', # Futures trading
|
||||
# Public
|
||||
'fetchOrderBook', 'fetchL2OrderBook', 'fetchTicker', # OR for pricing
|
||||
'fetchTickers', # For volumepairlist?
|
||||
'fetchTrades', # Downloading trades data
|
||||
"fetchOrderBook",
|
||||
"fetchL2OrderBook",
|
||||
"fetchTicker", # OR for pricing
|
||||
"fetchTickers", # For volumepairlist?
|
||||
"fetchTrades", # Downloading trades data
|
||||
# 'fetchFundingRateHistory', # Futures trading
|
||||
# 'fetchPositions', # Futures trading
|
||||
# 'fetchLeverageTiers', # Futures initialization
|
||||
@@ -99,11 +101,11 @@ def remove_exchange_credentials(exchange_config: ExchangeConfig, dry_run: bool)
|
||||
Modifies the input dict!
|
||||
"""
|
||||
if dry_run:
|
||||
exchange_config['key'] = ''
|
||||
exchange_config['apiKey'] = ''
|
||||
exchange_config['secret'] = ''
|
||||
exchange_config['password'] = ''
|
||||
exchange_config['uid'] = ''
|
||||
exchange_config["key"] = ""
|
||||
exchange_config["apiKey"] = ""
|
||||
exchange_config["secret"] = ""
|
||||
exchange_config["password"] = ""
|
||||
exchange_config["uid"] = ""
|
||||
|
||||
|
||||
def calculate_backoff(retrycount, max_retries):
|
||||
@@ -115,25 +117,27 @@ def calculate_backoff(retrycount, max_retries):
|
||||
|
||||
def retrier_async(f):
|
||||
async def wrapper(*args, **kwargs):
|
||||
count = kwargs.pop('count', API_RETRY_COUNT)
|
||||
count = kwargs.pop("count", API_RETRY_COUNT)
|
||||
kucoin = args[0].name == "KuCoin" # Check if the exchange is KuCoin.
|
||||
try:
|
||||
return await f(*args, **kwargs)
|
||||
except TemporaryError as ex:
|
||||
msg = f'{f.__name__}() returned exception: "{ex}". '
|
||||
if count > 0:
|
||||
msg += f'Retrying still for {count} times.'
|
||||
msg += f"Retrying still for {count} times."
|
||||
count -= 1
|
||||
kwargs['count'] = count
|
||||
kwargs["count"] = count
|
||||
if isinstance(ex, DDosProtection):
|
||||
if kucoin and "429000" in str(ex):
|
||||
# Temporary fix for 429000 error on kucoin
|
||||
# see https://github.com/freqtrade/freqtrade/issues/5700 for details.
|
||||
_get_logging_mixin().log_once(
|
||||
f"Kucoin 429 error, avoid triggering DDosProtection backoff delay. "
|
||||
f"{count} tries left before giving up", logmethod=logger.warning)
|
||||
f"{count} tries left before giving up",
|
||||
logmethod=logger.warning,
|
||||
)
|
||||
# Reset msg to avoid logging too many times.
|
||||
msg = ''
|
||||
msg = ""
|
||||
else:
|
||||
backoff_delay = calculate_backoff(count + 1, API_RETRY_COUNT)
|
||||
logger.info(f"Applying DDosProtection backoff delay: {backoff_delay}")
|
||||
@@ -142,38 +146,37 @@ def retrier_async(f):
|
||||
logger.warning(msg)
|
||||
return await wrapper(*args, **kwargs)
|
||||
else:
|
||||
logger.warning(msg + 'Giving up.')
|
||||
logger.warning(msg + "Giving up.")
|
||||
raise ex
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
F = TypeVar('F', bound=Callable[..., Any])
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
|
||||
# Type shenanigans
|
||||
@overload
|
||||
def retrier(_func: F) -> F:
|
||||
...
|
||||
def retrier(_func: F) -> F: ...
|
||||
|
||||
|
||||
@overload
|
||||
def retrier(*, retries=API_RETRY_COUNT) -> Callable[[F], F]:
|
||||
...
|
||||
def retrier(*, retries=API_RETRY_COUNT) -> Callable[[F], F]: ...
|
||||
|
||||
|
||||
def retrier(_func: Optional[F] = None, *, retries=API_RETRY_COUNT):
|
||||
def decorator(f: F) -> F:
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
count = kwargs.pop('count', retries)
|
||||
count = kwargs.pop("count", retries)
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except (TemporaryError, RetryableOrderError) as ex:
|
||||
msg = f'{f.__name__}() returned exception: "{ex}". '
|
||||
if count > 0:
|
||||
logger.warning(msg + f'Retrying still for {count} times.')
|
||||
logger.warning(msg + f"Retrying still for {count} times.")
|
||||
count -= 1
|
||||
kwargs.update({'count': count})
|
||||
kwargs.update({"count": count})
|
||||
if isinstance(ex, (DDosProtection, RetryableOrderError)):
|
||||
# increasing backoff
|
||||
backoff_delay = calculate_backoff(count + 1, retries)
|
||||
@@ -181,9 +184,11 @@ def retrier(_func: Optional[F] = None, *, retries=API_RETRY_COUNT):
|
||||
time.sleep(backoff_delay)
|
||||
return wrapper(*args, **kwargs)
|
||||
else:
|
||||
logger.warning(msg + 'Giving up.')
|
||||
logger.warning(msg + "Giving up.")
|
||||
raise ex
|
||||
|
||||
return cast(F, wrapper)
|
||||
|
||||
# Support both @retrier and @retrier(retries=2) syntax
|
||||
if _func is None:
|
||||
return decorator
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,29 @@
|
||||
"""
|
||||
Exchange support utils
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import ceil, floor
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import ccxt
|
||||
from ccxt import (DECIMAL_PLACES, ROUND, ROUND_DOWN, ROUND_UP, SIGNIFICANT_DIGITS, TICK_SIZE,
|
||||
TRUNCATE, decimal_to_precision)
|
||||
from ccxt import (
|
||||
DECIMAL_PLACES,
|
||||
ROUND,
|
||||
ROUND_DOWN,
|
||||
ROUND_UP,
|
||||
SIGNIFICANT_DIGITS,
|
||||
TICK_SIZE,
|
||||
TRUNCATE,
|
||||
decimal_to_precision,
|
||||
)
|
||||
|
||||
from freqtrade.exchange.common import (BAD_EXCHANGES, EXCHANGE_HAS_OPTIONAL, EXCHANGE_HAS_REQUIRED,
|
||||
SUPPORTED_EXCHANGES)
|
||||
from freqtrade.exchange.common import (
|
||||
BAD_EXCHANGES,
|
||||
EXCHANGE_HAS_OPTIONAL,
|
||||
EXCHANGE_HAS_REQUIRED,
|
||||
SUPPORTED_EXCHANGES,
|
||||
)
|
||||
from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_minutes, timeframe_to_prev_date
|
||||
from freqtrade.types import ValidExchangesType
|
||||
from freqtrade.util import FtPrecise
|
||||
@@ -20,7 +33,8 @@ CcxtModuleType = Any
|
||||
|
||||
|
||||
def is_exchange_known_ccxt(
|
||||
exchange_name: str, ccxt_module: Optional[CcxtModuleType] = None) -> bool:
|
||||
exchange_name: str, ccxt_module: Optional[CcxtModuleType] = None
|
||||
) -> bool:
|
||||
return exchange_name in ccxt_exchanges(ccxt_module)
|
||||
|
||||
|
||||
@@ -46,13 +60,13 @@ def validate_exchange(exchange: str) -> Tuple[bool, str]:
|
||||
"""
|
||||
ex_mod = getattr(ccxt, exchange.lower())()
|
||||
result = True
|
||||
reason = ''
|
||||
reason = ""
|
||||
if not ex_mod or not ex_mod.has:
|
||||
return False, ''
|
||||
return False, ""
|
||||
missing = [
|
||||
k for k, v in EXCHANGE_HAS_REQUIRED.items()
|
||||
if ex_mod.has.get(k) is not True
|
||||
and not (all(ex_mod.has.get(x) for x in v))
|
||||
k
|
||||
for k, v in EXCHANGE_HAS_REQUIRED.items()
|
||||
if ex_mod.has.get(k) is not True and not (all(ex_mod.has.get(x) for x in v))
|
||||
]
|
||||
if missing:
|
||||
result = False
|
||||
@@ -62,7 +76,7 @@ def validate_exchange(exchange: str) -> Tuple[bool, str]:
|
||||
|
||||
if exchange.lower() in BAD_EXCHANGES:
|
||||
result = False
|
||||
reason = BAD_EXCHANGES.get(exchange.lower(), '')
|
||||
reason = BAD_EXCHANGES.get(exchange.lower(), "")
|
||||
|
||||
if missing_opt:
|
||||
reason += f"{'. ' if reason else ''}missing opt: {', '.join(missing_opt)}. "
|
||||
@@ -71,23 +85,26 @@ def validate_exchange(exchange: str) -> Tuple[bool, str]:
|
||||
|
||||
|
||||
def _build_exchange_list_entry(
|
||||
exchange_name: str, exchangeClasses: Dict[str, Any]) -> ValidExchangesType:
|
||||
exchange_name: str, exchangeClasses: Dict[str, Any]
|
||||
) -> ValidExchangesType:
|
||||
valid, comment = validate_exchange(exchange_name)
|
||||
result: ValidExchangesType = {
|
||||
'name': exchange_name,
|
||||
'valid': valid,
|
||||
'supported': exchange_name.lower() in SUPPORTED_EXCHANGES,
|
||||
'comment': comment,
|
||||
'trade_modes': [{'trading_mode': 'spot', 'margin_mode': ''}],
|
||||
"name": exchange_name,
|
||||
"valid": valid,
|
||||
"supported": exchange_name.lower() in SUPPORTED_EXCHANGES,
|
||||
"comment": comment,
|
||||
"trade_modes": [{"trading_mode": "spot", "margin_mode": ""}],
|
||||
}
|
||||
if resolved := exchangeClasses.get(exchange_name.lower()):
|
||||
supported_modes = [{'trading_mode': 'spot', 'margin_mode': ''}] + [
|
||||
{'trading_mode': tm.value, 'margin_mode': mm.value}
|
||||
for tm, mm in resolved['class']._supported_trading_mode_margin_pairs
|
||||
supported_modes = [{"trading_mode": "spot", "margin_mode": ""}] + [
|
||||
{"trading_mode": tm.value, "margin_mode": mm.value}
|
||||
for tm, mm in resolved["class"]._supported_trading_mode_margin_pairs
|
||||
]
|
||||
result.update({
|
||||
'trade_modes': supported_modes,
|
||||
})
|
||||
result.update(
|
||||
{
|
||||
"trade_modes": supported_modes,
|
||||
}
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -99,7 +116,7 @@ def list_available_exchanges(all_exchanges: bool) -> List[ValidExchangesType]:
|
||||
exchanges = ccxt_exchanges() if all_exchanges else available_exchanges()
|
||||
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
|
||||
|
||||
subclassed = {e['name'].lower(): e for e in ExchangeResolver.search_all_objects({}, False)}
|
||||
subclassed = {e["name"].lower(): e for e in ExchangeResolver.search_all_objects({}, False)}
|
||||
|
||||
exchanges_valid: List[ValidExchangesType] = [
|
||||
_build_exchange_list_entry(e, subclassed) for e in exchanges
|
||||
@@ -109,7 +126,8 @@ def list_available_exchanges(all_exchanges: bool) -> List[ValidExchangesType]:
|
||||
|
||||
|
||||
def date_minus_candles(
|
||||
timeframe: str, candle_count: int, date: Optional[datetime] = None) -> datetime:
|
||||
timeframe: str, candle_count: int, date: Optional[datetime] = None
|
||||
) -> datetime:
|
||||
"""
|
||||
subtract X candles from a date.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
@@ -133,7 +151,7 @@ def market_is_active(market: Dict) -> bool:
|
||||
# true then it's true. If it's undefined, then it's most likely true, but not 100% )"
|
||||
# See https://github.com/ccxt/ccxt/issues/4874,
|
||||
# https://github.com/ccxt/ccxt/issues/4075#issuecomment-434760520
|
||||
return market.get('active', True) is not False
|
||||
return market.get("active", True) is not False
|
||||
|
||||
|
||||
def amount_to_contracts(amount: float, contract_size: Optional[float]) -> float:
|
||||
@@ -163,8 +181,9 @@ def contracts_to_amount(num_contracts: float, contract_size: Optional[float]) ->
|
||||
return num_contracts
|
||||
|
||||
|
||||
def amount_to_precision(amount: float, amount_precision: Optional[float],
|
||||
precisionMode: Optional[int]) -> float:
|
||||
def amount_to_precision(
|
||||
amount: float, amount_precision: Optional[float], precisionMode: Optional[int]
|
||||
) -> float:
|
||||
"""
|
||||
Returns the amount to buy or sell to a precision the Exchange accepts
|
||||
Re-implementation of ccxt internal methods - ensuring we can test the result is correct
|
||||
@@ -179,17 +198,24 @@ def amount_to_precision(amount: float, amount_precision: Optional[float],
|
||||
if amount_precision is not None and precisionMode is not None:
|
||||
precision = int(amount_precision) if precisionMode != TICK_SIZE else amount_precision
|
||||
# precision must be an int for non-ticksize inputs.
|
||||
amount = float(decimal_to_precision(amount, rounding_mode=TRUNCATE,
|
||||
precision=precision,
|
||||
counting_mode=precisionMode,
|
||||
))
|
||||
amount = float(
|
||||
decimal_to_precision(
|
||||
amount,
|
||||
rounding_mode=TRUNCATE,
|
||||
precision=precision,
|
||||
counting_mode=precisionMode,
|
||||
)
|
||||
)
|
||||
|
||||
return amount
|
||||
|
||||
|
||||
def amount_to_contract_precision(
|
||||
amount, amount_precision: Optional[float], precisionMode: Optional[int],
|
||||
contract_size: Optional[float]) -> float:
|
||||
amount,
|
||||
amount_precision: Optional[float],
|
||||
precisionMode: Optional[int],
|
||||
contract_size: Optional[float],
|
||||
) -> float:
|
||||
"""
|
||||
Returns the amount to buy or sell to a precision the Exchange accepts
|
||||
including calculation to and from contracts.
|
||||
@@ -222,23 +248,25 @@ def __price_to_precision_significant_digits(
|
||||
from decimal import ROUND_DOWN as dec_ROUND_DOWN
|
||||
from decimal import ROUND_UP as dec_ROUND_UP
|
||||
from decimal import Decimal
|
||||
|
||||
dec = Decimal(str(price))
|
||||
string = f'{dec:f}'
|
||||
string = f"{dec:f}"
|
||||
precision = round(price_precision)
|
||||
|
||||
q = precision - dec.adjusted() - 1
|
||||
sigfig = Decimal('10') ** -q
|
||||
sigfig = Decimal("10") ** -q
|
||||
if q < 0:
|
||||
string_to_precision = string[:precision]
|
||||
# string_to_precision is '' when we have zero precision
|
||||
below = sigfig * Decimal(string_to_precision if string_to_precision else '0')
|
||||
below = sigfig * Decimal(string_to_precision if string_to_precision else "0")
|
||||
above = below + sigfig
|
||||
res = above if rounding_mode == ROUND_UP else below
|
||||
precise = f'{res:f}'
|
||||
precise = f"{res:f}"
|
||||
else:
|
||||
precise = '{:f}'.format(dec.quantize(
|
||||
sigfig,
|
||||
rounding=dec_ROUND_DOWN if rounding_mode == ROUND_DOWN else dec_ROUND_UP)
|
||||
precise = "{:f}".format(
|
||||
dec.quantize(
|
||||
sigfig, rounding=dec_ROUND_DOWN if rounding_mode == ROUND_DOWN else dec_ROUND_UP
|
||||
)
|
||||
)
|
||||
return float(precise)
|
||||
|
||||
@@ -268,10 +296,14 @@ def price_to_precision(
|
||||
if price_precision is not None and precisionMode is not None:
|
||||
if rounding_mode not in (ROUND_UP, ROUND_DOWN):
|
||||
# Use CCXT code where possible.
|
||||
return float(decimal_to_precision(price, rounding_mode=rounding_mode,
|
||||
precision=price_precision,
|
||||
counting_mode=precisionMode
|
||||
))
|
||||
return float(
|
||||
decimal_to_precision(
|
||||
price,
|
||||
rounding_mode=rounding_mode,
|
||||
precision=price_precision,
|
||||
counting_mode=precisionMode,
|
||||
)
|
||||
)
|
||||
|
||||
if precisionMode == TICK_SIZE:
|
||||
precision = FtPrecise(price_precision)
|
||||
@@ -285,7 +317,6 @@ def price_to_precision(
|
||||
return round(float(str(res)), 14)
|
||||
return price
|
||||
elif precisionMode == DECIMAL_PLACES:
|
||||
|
||||
ndigits = round(price_precision)
|
||||
ticks = price * (10**ndigits)
|
||||
if rounding_mode == ROUND_UP:
|
||||
|
||||
@@ -36,16 +36,16 @@ def timeframe_to_resample_freq(timeframe: str) -> str:
|
||||
form ('1m', '5m', '1h', '1d', '1w', etc.) to the resample frequency
|
||||
used by pandas ('1T', '5T', '1H', '1D', '1W', etc.)
|
||||
"""
|
||||
if timeframe == '1y':
|
||||
return '1YS'
|
||||
if timeframe == "1y":
|
||||
return "1YS"
|
||||
timeframe_seconds = timeframe_to_seconds(timeframe)
|
||||
timeframe_minutes = timeframe_seconds // 60
|
||||
resample_interval = f'{timeframe_seconds}s'
|
||||
resample_interval = f"{timeframe_seconds}s"
|
||||
if 10000 < timeframe_minutes < 43200:
|
||||
resample_interval = '1W-MON'
|
||||
resample_interval = "1W-MON"
|
||||
elif timeframe_minutes >= 43200 and timeframe_minutes < 525600:
|
||||
# Monthly candles need special treatment to stick to the 1st of the month
|
||||
resample_interval = f'{timeframe}S'
|
||||
resample_interval = f"{timeframe}S"
|
||||
elif timeframe_minutes > 43200:
|
||||
resample_interval = timeframe
|
||||
return resample_interval
|
||||
@@ -62,8 +62,7 @@ def timeframe_to_prev_date(timeframe: str, date: Optional[datetime] = None) -> d
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(
|
||||
timeframe, dt_ts(date), ROUND_DOWN) // 1000
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_DOWN) // 1000
|
||||
return dt_from_ts(new_timestamp)
|
||||
|
||||
|
||||
@@ -76,6 +75,5 @@ def timeframe_to_next_date(timeframe: str, date: Optional[datetime] = None) -> d
|
||||
"""
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(
|
||||
timeframe, dt_ts(date), ROUND_UP) // 1000
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_UP) // 1000
|
||||
return dt_from_ts(new_timestamp)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" Gate.io exchange subclass """
|
||||
"""Gate.io exchange subclass"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
@@ -24,7 +25,7 @@ class Gate(Exchange):
|
||||
|
||||
_ft_has: Dict = {
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"order_time_in_force": ['GTC', 'IOC'],
|
||||
"order_time_in_force": ["GTC", "IOC"],
|
||||
"stoploss_on_exchange": True,
|
||||
"stoploss_order_types": {"limit": "limit"},
|
||||
"stop_price_param": "stopPrice",
|
||||
@@ -51,13 +52,13 @@ class Gate(Exchange):
|
||||
]
|
||||
|
||||
def _get_params(
|
||||
self,
|
||||
side: BuySell,
|
||||
ordertype: str,
|
||||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = 'GTC',
|
||||
) -> Dict:
|
||||
self,
|
||||
side: BuySell,
|
||||
ordertype: str,
|
||||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
ordertype=ordertype,
|
||||
@@ -65,13 +66,14 @@ class Gate(Exchange):
|
||||
reduceOnly=reduceOnly,
|
||||
time_in_force=time_in_force,
|
||||
)
|
||||
if ordertype == 'market' and self.trading_mode == TradingMode.FUTURES:
|
||||
params['type'] = 'market'
|
||||
params.update({'timeInForce': 'IOC'})
|
||||
if ordertype == "market" and self.trading_mode == TradingMode.FUTURES:
|
||||
params["type"] = "market"
|
||||
params.update({"timeInForce": "IOC"})
|
||||
return params
|
||||
|
||||
def get_trades_for_order(self, order_id: str, pair: str, since: datetime,
|
||||
params: Optional[Dict] = None) -> List:
|
||||
def get_trades_for_order(
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[Dict] = None
|
||||
) -> List:
|
||||
trades = super().get_trades_for_order(order_id, pair, since, params)
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
@@ -84,45 +86,38 @@ class Gate(Exchange):
|
||||
pair_fees = self._trading_fees.get(pair, {})
|
||||
if pair_fees:
|
||||
for idx, trade in enumerate(trades):
|
||||
fee = trade.get('fee', {})
|
||||
if fee and fee.get('cost') is None:
|
||||
takerOrMaker = trade.get('takerOrMaker', 'taker')
|
||||
fee = trade.get("fee", {})
|
||||
if fee and fee.get("cost") is None:
|
||||
takerOrMaker = trade.get("takerOrMaker", "taker")
|
||||
if pair_fees.get(takerOrMaker) is not None:
|
||||
trades[idx]['fee'] = {
|
||||
'currency': self.get_pair_quote_currency(pair),
|
||||
'cost': trade['cost'] * pair_fees[takerOrMaker],
|
||||
'rate': pair_fees[takerOrMaker],
|
||||
trades[idx]["fee"] = {
|
||||
"currency": self.get_pair_quote_currency(pair),
|
||||
"cost": trade["cost"] * pair_fees[takerOrMaker],
|
||||
"rate": pair_fees[takerOrMaker],
|
||||
}
|
||||
return trades
|
||||
|
||||
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
||||
return safe_value_fallback2(order, order, 'id_stop', 'id')
|
||||
return safe_value_fallback2(order, order, "id_stop", "id")
|
||||
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
order = self.fetch_order(
|
||||
order_id=order_id,
|
||||
pair=pair,
|
||||
params={'stop': True}
|
||||
)
|
||||
if order.get('status', 'open') == 'closed':
|
||||
order = self.fetch_order(order_id=order_id, pair=pair, params={"stop": True})
|
||||
if order.get("status", "open") == "closed":
|
||||
# Places a real order - which we need to fetch explicitly.
|
||||
val = 'trade_id' if self.trading_mode == TradingMode.FUTURES else 'fired_order_id'
|
||||
val = "trade_id" if self.trading_mode == TradingMode.FUTURES else "fired_order_id"
|
||||
|
||||
if new_orderid := order.get('info', {}).get(val):
|
||||
if new_orderid := order.get("info", {}).get(val):
|
||||
order1 = self.fetch_order(order_id=new_orderid, pair=pair, params=params)
|
||||
order1['id_stop'] = order1['id']
|
||||
order1['id'] = order_id
|
||||
order1['type'] = 'stoploss'
|
||||
order1['stopPrice'] = order.get('stopPrice')
|
||||
order1['status_stop'] = 'triggered'
|
||||
order1["id_stop"] = order1["id"]
|
||||
order1["id"] = order_id
|
||||
order1["type"] = "stoploss"
|
||||
order1["stopPrice"] = order.get("stopPrice")
|
||||
order1["status_stop"] = "triggered"
|
||||
|
||||
return order1
|
||||
return order
|
||||
|
||||
def cancel_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
return self.cancel_order(
|
||||
order_id=order_id,
|
||||
pair=pair,
|
||||
params={'stop': True}
|
||||
)
|
||||
self, order_id: str, pair: str, params: Optional[Dict] = None
|
||||
) -> Dict:
|
||||
return self.cancel_order(order_id=order_id, pair=pair, params={"stop": True})
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" HTX exchange subclass """
|
||||
"""HTX exchange subclass"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
@@ -26,10 +27,11 @@ class Htx(Exchange):
|
||||
}
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
|
||||
params = self._params.copy()
|
||||
params.update({
|
||||
"stopPrice": stop_price,
|
||||
"operator": "lte",
|
||||
})
|
||||
params.update(
|
||||
{
|
||||
"stopPrice": stop_price,
|
||||
"operator": "lte",
|
||||
}
|
||||
)
|
||||
return params
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" Idex exchange subclass """
|
||||
"""Idex exchange subclass"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" Kraken exchange subclass """
|
||||
"""Kraken exchange subclass"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
@@ -18,7 +19,6 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Kraken(Exchange):
|
||||
|
||||
_params: Dict = {"trading_agreement": "agree"}
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": True,
|
||||
@@ -47,18 +47,17 @@ class Kraken(Exchange):
|
||||
"""
|
||||
parent_check = super().market_is_tradable(market)
|
||||
|
||||
return (parent_check and
|
||||
market.get('darkpool', False) is False)
|
||||
return parent_check and market.get("darkpool", False) is False
|
||||
|
||||
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
|
||||
# Only fetch tickers for current stake currency
|
||||
# Otherwise the request for kraken becomes too large.
|
||||
symbols = list(self.get_markets(quote_currencies=[self._config['stake_currency']]))
|
||||
symbols = list(self.get_markets(quote_currencies=[self._config["stake_currency"]]))
|
||||
return super().get_tickers(symbols=symbols, cached=cached)
|
||||
|
||||
@retrier
|
||||
def get_balances(self) -> dict:
|
||||
if self._config['dry_run']:
|
||||
if self._config["dry_run"]:
|
||||
return {}
|
||||
|
||||
try:
|
||||
@@ -70,23 +69,28 @@ class Kraken(Exchange):
|
||||
balances.pop("used", None)
|
||||
|
||||
orders = self._api.fetch_open_orders()
|
||||
order_list = [(x["symbol"].split("/")[0 if x["side"] == "sell" else 1],
|
||||
x["remaining"] if x["side"] == "sell" else x["remaining"] * x["price"],
|
||||
# Don't remove the below comment, this can be important for debugging
|
||||
# x["side"], x["amount"],
|
||||
) for x in orders]
|
||||
order_list = [
|
||||
(
|
||||
x["symbol"].split("/")[0 if x["side"] == "sell" else 1],
|
||||
x["remaining"] if x["side"] == "sell" else x["remaining"] * x["price"],
|
||||
# Don't remove the below comment, this can be important for debugging
|
||||
# x["side"], x["amount"],
|
||||
)
|
||||
for x in orders
|
||||
]
|
||||
for bal in balances:
|
||||
if not isinstance(balances[bal], dict):
|
||||
continue
|
||||
balances[bal]['used'] = sum(order[1] for order in order_list if order[0] == bal)
|
||||
balances[bal]['free'] = balances[bal]['total'] - balances[bal]['used']
|
||||
balances[bal]["used"] = sum(order[1] for order in order_list if order[0] == bal)
|
||||
balances[bal]["free"] = balances[bal]["total"] - balances[bal]["used"]
|
||||
|
||||
return balances
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f'Could not get balance due to {e.__class__.__name__}. Message: {e}') from e
|
||||
f"Could not get balance due to {e.__class__.__name__}. Message: {e}"
|
||||
) from e
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@@ -108,7 +112,7 @@ class Kraken(Exchange):
|
||||
ordertype: str,
|
||||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = 'GTC'
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
@@ -118,10 +122,10 @@ class Kraken(Exchange):
|
||||
time_in_force=time_in_force,
|
||||
)
|
||||
if leverage > 1.0:
|
||||
params['leverage'] = round(leverage)
|
||||
if time_in_force == 'PO':
|
||||
params.pop('timeInForce', None)
|
||||
params['postOnly'] = True
|
||||
params["leverage"] = round(leverage)
|
||||
if time_in_force == "PO":
|
||||
params.pop("timeInForce", None)
|
||||
params["postOnly"] = True
|
||||
return params
|
||||
|
||||
def calculate_funding_fees(
|
||||
@@ -131,7 +135,7 @@ class Kraken(Exchange):
|
||||
is_short: bool,
|
||||
open_date: datetime,
|
||||
close_date: datetime,
|
||||
time_in_ratio: Optional[float] = None
|
||||
time_in_ratio: Optional[float] = None,
|
||||
) -> float:
|
||||
"""
|
||||
# ! This method will always error when run by Freqtrade because time_in_ratio is never
|
||||
@@ -149,12 +153,13 @@ class Kraken(Exchange):
|
||||
"""
|
||||
if not time_in_ratio:
|
||||
raise OperationalException(
|
||||
f"time_in_ratio is required for {self.name}._get_funding_fee")
|
||||
f"time_in_ratio is required for {self.name}._get_funding_fee"
|
||||
)
|
||||
fees: float = 0
|
||||
|
||||
if not df.empty:
|
||||
df = df[(df['date'] >= open_date) & (df['date'] <= close_date)]
|
||||
fees = sum(df['open_fund'] * df['open_mark'] * amount * time_in_ratio)
|
||||
df = df[(df["date"] >= open_date) & (df["date"] <= close_date)]
|
||||
fees = sum(df["open_fund"] * df["open_mark"] * amount * time_in_ratio)
|
||||
|
||||
return fees if is_short else -fees
|
||||
|
||||
@@ -164,14 +169,11 @@ class Kraken(Exchange):
|
||||
Applies only to fetch_trade_history by id.
|
||||
"""
|
||||
if len(trades) > 0:
|
||||
if (
|
||||
isinstance(trades[-1].get('info'), list)
|
||||
and len(trades[-1].get('info', [])) > 7
|
||||
):
|
||||
if isinstance(trades[-1].get("info"), list) and len(trades[-1].get("info", [])) > 7:
|
||||
# Trade response's "last" value.
|
||||
return trades[-1].get('info', [])[-1]
|
||||
return trades[-1].get("info", [])[-1]
|
||||
# Fall back to timestamp if info is somehow empty.
|
||||
return trades[-1].get('timestamp')
|
||||
return trades[-1].get("timestamp")
|
||||
return None
|
||||
|
||||
def _valid_trade_pagination_id(self, pair: str, from_id: str) -> bool:
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Kucoin exchange subclass."""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
@@ -26,32 +27,27 @@ class Kucoin(Exchange):
|
||||
"stoploss_order_types": {"limit": "limit", "market": "market"},
|
||||
"l2_limit_range": [20, 100],
|
||||
"l2_limit_range_required": False,
|
||||
"order_time_in_force": ['GTC', 'FOK', 'IOC'],
|
||||
"order_time_in_force": ["GTC", "FOK", "IOC"],
|
||||
"ohlcv_candle_limit": 1500,
|
||||
}
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
|
||||
params = self._params.copy()
|
||||
params.update({
|
||||
'stopPrice': stop_price,
|
||||
'stop': 'loss'
|
||||
})
|
||||
params.update({"stopPrice": stop_price, "stop": "loss"})
|
||||
return params
|
||||
|
||||
def create_order(
|
||||
self,
|
||||
*,
|
||||
pair: str,
|
||||
ordertype: str,
|
||||
side: BuySell,
|
||||
amount: float,
|
||||
rate: float,
|
||||
leverage: float,
|
||||
reduceOnly: bool = False,
|
||||
time_in_force: str = 'GTC',
|
||||
) -> Dict:
|
||||
|
||||
self,
|
||||
*,
|
||||
pair: str,
|
||||
ordertype: str,
|
||||
side: BuySell,
|
||||
amount: float,
|
||||
rate: float,
|
||||
leverage: float,
|
||||
reduceOnly: bool = False,
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
res = super().create_order(
|
||||
pair=pair,
|
||||
ordertype=ordertype,
|
||||
@@ -66,7 +62,7 @@ class Kucoin(Exchange):
|
||||
# ccxt returns status = 'closed' at the moment - which is information ccxt invented.
|
||||
# Since we rely on status heavily, we must set it to 'open' here.
|
||||
# ref: https://github.com/ccxt/ccxt/pull/16674, (https://github.com/ccxt/ccxt/pull/16553)
|
||||
if not self._config['dry_run']:
|
||||
res['type'] = ordertype
|
||||
res['status'] = 'open'
|
||||
if not self._config["dry_run"]:
|
||||
res["type"] = ordertype
|
||||
res["status"] = "open"
|
||||
return res
|
||||
|
||||
@@ -6,8 +6,12 @@ import ccxt
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import (DDosProtection, OperationalException, RetryableOrderError,
|
||||
TemporaryError)
|
||||
from freqtrade.exceptions import (
|
||||
DDosProtection,
|
||||
OperationalException,
|
||||
RetryableOrderError,
|
||||
TemporaryError,
|
||||
)
|
||||
from freqtrade.exchange import Exchange, date_minus_candles
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.misc import safe_value_fallback2
|
||||
@@ -37,7 +41,7 @@ class Okx(Exchange):
|
||||
PriceType.LAST: "last",
|
||||
PriceType.MARK: "index",
|
||||
PriceType.INDEX: "mark",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
@@ -49,10 +53,11 @@ class Okx(Exchange):
|
||||
|
||||
net_only = True
|
||||
|
||||
_ccxt_params: Dict = {'options': {'brokerId': 'ffb5405ad327SUDE'}}
|
||||
_ccxt_params: Dict = {"options": {"brokerId": "ffb5405ad327SUDE"}}
|
||||
|
||||
def ohlcv_candle_limit(
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None) -> int:
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None
|
||||
) -> int:
|
||||
"""
|
||||
Exchange ohlcv candle limit
|
||||
OKX has the following behaviour:
|
||||
@@ -64,9 +69,8 @@ class Okx(Exchange):
|
||||
:param since_ms: Starting timestamp
|
||||
:return: Candle limit as integer
|
||||
"""
|
||||
if (
|
||||
candle_type in (CandleType.FUTURES, CandleType.SPOT) and
|
||||
(not since_ms or since_ms > (date_minus_candles(timeframe, 300).timestamp() * 1000))
|
||||
if candle_type in (CandleType.FUTURES, CandleType.SPOT) and (
|
||||
not since_ms or since_ms > (date_minus_candles(timeframe, 300).timestamp() * 1000)
|
||||
):
|
||||
return 300
|
||||
|
||||
@@ -80,29 +84,29 @@ class Okx(Exchange):
|
||||
Must be overridden in child methods if required.
|
||||
"""
|
||||
try:
|
||||
if self.trading_mode == TradingMode.FUTURES and not self._config['dry_run']:
|
||||
if self.trading_mode == TradingMode.FUTURES and not self._config["dry_run"]:
|
||||
accounts = self._api.fetch_accounts()
|
||||
self._log_exchange_response('fetch_accounts', accounts)
|
||||
self._log_exchange_response("fetch_accounts", accounts)
|
||||
if len(accounts) > 0:
|
||||
self.net_only = accounts[0].get('info', {}).get('posMode') == 'net_mode'
|
||||
self.net_only = accounts[0].get("info", {}).get("posMode") == "net_mode"
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f'Error in additional_exchange_init due to {e.__class__.__name__}. Message: {e}'
|
||||
) from e
|
||||
f"Error in additional_exchange_init due to {e.__class__.__name__}. Message: {e}"
|
||||
) from e
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def _get_posSide(self, side: BuySell, reduceOnly: bool):
|
||||
if self.net_only:
|
||||
return 'net'
|
||||
return "net"
|
||||
if not reduceOnly:
|
||||
# Enter
|
||||
return 'long' if side == 'buy' else 'short'
|
||||
return "long" if side == "buy" else "short"
|
||||
else:
|
||||
# Exit
|
||||
return 'long' if side == 'sell' else 'short'
|
||||
return "long" if side == "sell" else "short"
|
||||
|
||||
def _get_params(
|
||||
self,
|
||||
@@ -110,7 +114,7 @@ class Okx(Exchange):
|
||||
ordertype: str,
|
||||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = 'GTC',
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
@@ -120,18 +124,21 @@ class Okx(Exchange):
|
||||
time_in_force=time_in_force,
|
||||
)
|
||||
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
|
||||
params['tdMode'] = self.margin_mode.value
|
||||
params['posSide'] = self._get_posSide(side, reduceOnly)
|
||||
params["tdMode"] = self.margin_mode.value
|
||||
params["posSide"] = self._get_posSide(side, reduceOnly)
|
||||
return params
|
||||
|
||||
def __fetch_leverage_already_set(self, pair: str, leverage: float, side: BuySell) -> bool:
|
||||
try:
|
||||
res_lev = self._api.fetch_leverage(symbol=pair, params={
|
||||
res_lev = self._api.fetch_leverage(
|
||||
symbol=pair,
|
||||
params={
|
||||
"mgnMode": self.margin_mode.value,
|
||||
"posSide": self._get_posSide(side, False),
|
||||
})
|
||||
self._log_exchange_response('get_leverage', res_lev)
|
||||
already_set = all(float(x['lever']) == leverage for x in res_lev['data'])
|
||||
},
|
||||
)
|
||||
self._log_exchange_response("get_leverage", res_lev)
|
||||
already_set = all(float(x["lever"]) == leverage for x in res_lev["data"])
|
||||
return already_set
|
||||
|
||||
except ccxt.BaseError:
|
||||
@@ -148,8 +155,9 @@ class Okx(Exchange):
|
||||
params={
|
||||
"mgnMode": self.margin_mode.value,
|
||||
"posSide": self._get_posSide(side, False),
|
||||
})
|
||||
self._log_exchange_response('set_leverage', res)
|
||||
},
|
||||
)
|
||||
self._log_exchange_response("set_leverage", res)
|
||||
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
@@ -157,84 +165,81 @@ class Okx(Exchange):
|
||||
already_set = self.__fetch_leverage_already_set(pair, leverage, side)
|
||||
if not already_set:
|
||||
raise TemporaryError(
|
||||
f'Could not set leverage due to {e.__class__.__name__}. Message: {e}'
|
||||
) from e
|
||||
f"Could not set leverage due to {e.__class__.__name__}. Message: {e}"
|
||||
) from e
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def get_max_pair_stake_amount(
|
||||
self,
|
||||
pair: str,
|
||||
price: float,
|
||||
leverage: float = 1.0
|
||||
) -> float:
|
||||
|
||||
def get_max_pair_stake_amount(self, pair: str, price: float, leverage: float = 1.0) -> float:
|
||||
if self.trading_mode == TradingMode.SPOT:
|
||||
return float('inf') # Not actually inf, but this probably won't matter for SPOT
|
||||
return float("inf") # Not actually inf, but this probably won't matter for SPOT
|
||||
|
||||
if pair not in self._leverage_tiers:
|
||||
return float('inf')
|
||||
return float("inf")
|
||||
|
||||
pair_tiers = self._leverage_tiers[pair]
|
||||
return pair_tiers[-1]['maxNotional'] / leverage
|
||||
return pair_tiers[-1]["maxNotional"] / leverage
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
params = super()._get_stop_params(side, ordertype, stop_price)
|
||||
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
|
||||
params['tdMode'] = self.margin_mode.value
|
||||
params['posSide'] = self._get_posSide(side, True)
|
||||
params["tdMode"] = self.margin_mode.value
|
||||
params["posSide"] = self._get_posSide(side, True)
|
||||
return params
|
||||
|
||||
def _convert_stop_order(self, pair: str, order_id: str, order: Dict) -> Dict:
|
||||
if (
|
||||
order.get('status', 'open') == 'closed'
|
||||
and (real_order_id := order.get('info', {}).get('ordId')) is not None
|
||||
order.get("status", "open") == "closed"
|
||||
and (real_order_id := order.get("info", {}).get("ordId")) is not None
|
||||
):
|
||||
# Once a order triggered, we fetch the regular followup order.
|
||||
order_reg = self.fetch_order(real_order_id, pair)
|
||||
self._log_exchange_response('fetch_stoploss_order1', order_reg)
|
||||
order_reg['id_stop'] = order_reg['id']
|
||||
order_reg['id'] = order_id
|
||||
order_reg['type'] = 'stoploss'
|
||||
order_reg['status_stop'] = 'triggered'
|
||||
self._log_exchange_response("fetch_stoploss_order1", order_reg)
|
||||
order_reg["id_stop"] = order_reg["id"]
|
||||
order_reg["id"] = order_id
|
||||
order_reg["type"] = "stoploss"
|
||||
order_reg["status_stop"] = "triggered"
|
||||
return order_reg
|
||||
order = self._order_contracts_to_amount(order)
|
||||
order['type'] = 'stoploss'
|
||||
order["type"] = "stoploss"
|
||||
return order
|
||||
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
if self._config['dry_run']:
|
||||
if self._config["dry_run"]:
|
||||
return self.fetch_dry_run_order(order_id)
|
||||
|
||||
try:
|
||||
params1 = {'stop': True}
|
||||
params1 = {"stop": True}
|
||||
order_reg = self._api.fetch_order(order_id, pair, params=params1)
|
||||
self._log_exchange_response('fetch_stoploss_order', order_reg)
|
||||
self._log_exchange_response("fetch_stoploss_order", order_reg)
|
||||
return self._convert_stop_order(pair, order_id, order_reg)
|
||||
except ccxt.OrderNotFound:
|
||||
pass
|
||||
params2 = {'stop': True, 'ordType': 'conditional'}
|
||||
for method in (self._api.fetch_open_orders, self._api.fetch_closed_orders,
|
||||
self._api.fetch_canceled_orders):
|
||||
params2 = {"stop": True, "ordType": "conditional"}
|
||||
for method in (
|
||||
self._api.fetch_open_orders,
|
||||
self._api.fetch_closed_orders,
|
||||
self._api.fetch_canceled_orders,
|
||||
):
|
||||
try:
|
||||
orders = method(pair, params=params2)
|
||||
orders_f = [order for order in orders if order['id'] == order_id]
|
||||
orders_f = [order for order in orders if order["id"] == order_id]
|
||||
if orders_f:
|
||||
order = orders_f[0]
|
||||
return self._convert_stop_order(pair, order_id, order)
|
||||
except ccxt.BaseError:
|
||||
pass
|
||||
raise RetryableOrderError(
|
||||
f'StoplossOrder not found (pair: {pair} id: {order_id}).')
|
||||
raise RetryableOrderError(f"StoplossOrder not found (pair: {pair} id: {order_id}).")
|
||||
|
||||
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
||||
if order.get('type', '') == 'stop':
|
||||
return safe_value_fallback2(order, order, 'id_stop', 'id')
|
||||
return order['id']
|
||||
if order.get("type", "") == "stop":
|
||||
return safe_value_fallback2(order, order, "id_stop", "id")
|
||||
return order["id"]
|
||||
|
||||
def cancel_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
params1 = {'stop': True}
|
||||
self, order_id: str, pair: str, params: Optional[Dict] = None
|
||||
) -> Dict:
|
||||
params1 = {"stop": True}
|
||||
# 'ordType': 'conditional'
|
||||
#
|
||||
return self.cancel_order(
|
||||
@@ -247,10 +252,10 @@ class Okx(Exchange):
|
||||
orders = []
|
||||
|
||||
orders = self._api.fetch_closed_orders(pair, since=since_ms)
|
||||
if (since_ms < dt_ts(dt_now() - timedelta(days=6, hours=23))):
|
||||
if since_ms < dt_ts(dt_now() - timedelta(days=6, hours=23)):
|
||||
# Regular fetch_closed_orders only returns 7 days of data.
|
||||
# Force usage of "archive" endpoint, which returns 3 months of data.
|
||||
params = {'method': 'privateGetTradeOrdersHistoryArchive'}
|
||||
params = {"method": "privateGetTradeOrdersHistoryArchive"}
|
||||
orders_hist = self._api.fetch_closed_orders(pair, since=since_ms, params=params)
|
||||
orders.extend(orders_hist)
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ class Base3ActionRLEnv(BaseEnvironment):
|
||||
"""
|
||||
Base class for a 3 action environment
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.actions = Actions
|
||||
@@ -73,11 +74,18 @@ class Base3ActionRLEnv(BaseEnvironment):
|
||||
|
||||
if trade_type is not None:
|
||||
self.trade_history.append(
|
||||
{'price': self.current_price(), 'index': self._current_tick,
|
||||
'type': trade_type, 'profit': self.get_unrealized_profit()})
|
||||
{
|
||||
"price": self.current_price(),
|
||||
"index": self._current_tick,
|
||||
"type": trade_type,
|
||||
"profit": self.get_unrealized_profit(),
|
||||
}
|
||||
)
|
||||
|
||||
if (self._total_profit < self.max_drawdown or
|
||||
self._total_unrealized_profit < self.max_drawdown):
|
||||
if (
|
||||
self._total_profit < self.max_drawdown
|
||||
or self._total_unrealized_profit < self.max_drawdown
|
||||
):
|
||||
self._done = True
|
||||
|
||||
self._position_history.append(self._position)
|
||||
@@ -89,7 +97,7 @@ class Base3ActionRLEnv(BaseEnvironment):
|
||||
total_profit=self._total_profit,
|
||||
position=self._position.value,
|
||||
trade_duration=self.get_trade_duration(),
|
||||
current_profit_pct=self.get_unrealized_profit()
|
||||
current_profit_pct=self.get_unrealized_profit(),
|
||||
)
|
||||
|
||||
observation = self._get_observation()
|
||||
@@ -109,10 +117,14 @@ class Base3ActionRLEnv(BaseEnvironment):
|
||||
return (
|
||||
(action == Actions.Buy.value and self._position == Positions.Neutral)
|
||||
or (action == Actions.Sell.value and self._position == Positions.Long)
|
||||
or (action == Actions.Sell.value and self._position == Positions.Neutral
|
||||
and self.can_short)
|
||||
or (action == Actions.Buy.value and self._position == Positions.Short
|
||||
and self.can_short)
|
||||
or (
|
||||
action == Actions.Sell.value
|
||||
and self._position == Positions.Neutral
|
||||
and self.can_short
|
||||
)
|
||||
or (
|
||||
action == Actions.Buy.value and self._position == Positions.Short and self.can_short
|
||||
)
|
||||
)
|
||||
|
||||
def _is_valid(self, action: int) -> bool:
|
||||
|
||||
@@ -20,6 +20,7 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
"""
|
||||
Base class for a 4 action environment
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.actions = Actions
|
||||
@@ -52,7 +53,6 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
|
||||
trade_type = None
|
||||
if self.is_tradesignal(action):
|
||||
|
||||
if action == Actions.Neutral.value:
|
||||
self._position = Positions.Neutral
|
||||
trade_type = "neutral"
|
||||
@@ -75,11 +75,18 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
|
||||
if trade_type is not None:
|
||||
self.trade_history.append(
|
||||
{'price': self.current_price(), 'index': self._current_tick,
|
||||
'type': trade_type, 'profit': self.get_unrealized_profit()})
|
||||
{
|
||||
"price": self.current_price(),
|
||||
"index": self._current_tick,
|
||||
"type": trade_type,
|
||||
"profit": self.get_unrealized_profit(),
|
||||
}
|
||||
)
|
||||
|
||||
if (self._total_profit < self.max_drawdown or
|
||||
self._total_unrealized_profit < self.max_drawdown):
|
||||
if (
|
||||
self._total_profit < self.max_drawdown
|
||||
or self._total_unrealized_profit < self.max_drawdown
|
||||
):
|
||||
self._done = True
|
||||
|
||||
self._position_history.append(self._position)
|
||||
@@ -91,7 +98,7 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
total_profit=self._total_profit,
|
||||
position=self._position.value,
|
||||
trade_duration=self.get_trade_duration(),
|
||||
current_profit_pct=self.get_unrealized_profit()
|
||||
current_profit_pct=self.get_unrealized_profit(),
|
||||
)
|
||||
|
||||
observation = self._get_observation()
|
||||
@@ -108,14 +115,16 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
Determine if the signal is a trade signal
|
||||
e.g.: agent wants a Actions.Long_exit while it is in a Positions.short
|
||||
"""
|
||||
return not ((action == Actions.Neutral.value and self._position == Positions.Neutral) or
|
||||
(action == Actions.Neutral.value and self._position == Positions.Short) or
|
||||
(action == Actions.Neutral.value and self._position == Positions.Long) or
|
||||
(action == Actions.Short_enter.value and self._position == Positions.Short) or
|
||||
(action == Actions.Short_enter.value and self._position == Positions.Long) or
|
||||
(action == Actions.Exit.value and self._position == Positions.Neutral) or
|
||||
(action == Actions.Long_enter.value and self._position == Positions.Long) or
|
||||
(action == Actions.Long_enter.value and self._position == Positions.Short))
|
||||
return not (
|
||||
(action == Actions.Neutral.value and self._position == Positions.Neutral)
|
||||
or (action == Actions.Neutral.value and self._position == Positions.Short)
|
||||
or (action == Actions.Neutral.value and self._position == Positions.Long)
|
||||
or (action == Actions.Short_enter.value and self._position == Positions.Short)
|
||||
or (action == Actions.Short_enter.value and self._position == Positions.Long)
|
||||
or (action == Actions.Exit.value and self._position == Positions.Neutral)
|
||||
or (action == Actions.Long_enter.value and self._position == Positions.Long)
|
||||
or (action == Actions.Long_enter.value and self._position == Positions.Short)
|
||||
)
|
||||
|
||||
def _is_valid(self, action: int) -> bool:
|
||||
"""
|
||||
|
||||
@@ -21,6 +21,7 @@ class Base5ActionRLEnv(BaseEnvironment):
|
||||
"""
|
||||
Base class for a 5 action environment
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.actions = Actions
|
||||
@@ -53,7 +54,6 @@ class Base5ActionRLEnv(BaseEnvironment):
|
||||
|
||||
trade_type = None
|
||||
if self.is_tradesignal(action):
|
||||
|
||||
if action == Actions.Neutral.value:
|
||||
self._position = Positions.Neutral
|
||||
trade_type = "neutral"
|
||||
@@ -81,11 +81,18 @@ class Base5ActionRLEnv(BaseEnvironment):
|
||||
|
||||
if trade_type is not None:
|
||||
self.trade_history.append(
|
||||
{'price': self.current_price(), 'index': self._current_tick,
|
||||
'type': trade_type, 'profit': self.get_unrealized_profit()})
|
||||
{
|
||||
"price": self.current_price(),
|
||||
"index": self._current_tick,
|
||||
"type": trade_type,
|
||||
"profit": self.get_unrealized_profit(),
|
||||
}
|
||||
)
|
||||
|
||||
if (self._total_profit < self.max_drawdown or
|
||||
self._total_unrealized_profit < self.max_drawdown):
|
||||
if (
|
||||
self._total_profit < self.max_drawdown
|
||||
or self._total_unrealized_profit < self.max_drawdown
|
||||
):
|
||||
self._done = True
|
||||
|
||||
self._position_history.append(self._position)
|
||||
@@ -97,7 +104,7 @@ class Base5ActionRLEnv(BaseEnvironment):
|
||||
total_profit=self._total_profit,
|
||||
position=self._position.value,
|
||||
trade_duration=self.get_trade_duration(),
|
||||
current_profit_pct=self.get_unrealized_profit()
|
||||
current_profit_pct=self.get_unrealized_profit(),
|
||||
)
|
||||
|
||||
observation = self._get_observation()
|
||||
@@ -113,17 +120,19 @@ class Base5ActionRLEnv(BaseEnvironment):
|
||||
Determine if the signal is a trade signal
|
||||
e.g.: agent wants a Actions.Long_exit while it is in a Positions.short
|
||||
"""
|
||||
return not ((action == Actions.Neutral.value and self._position == Positions.Neutral) or
|
||||
(action == Actions.Neutral.value and self._position == Positions.Short) or
|
||||
(action == Actions.Neutral.value and self._position == Positions.Long) or
|
||||
(action == Actions.Short_enter.value and self._position == Positions.Short) or
|
||||
(action == Actions.Short_enter.value and self._position == Positions.Long) or
|
||||
(action == Actions.Short_exit.value and self._position == Positions.Long) or
|
||||
(action == Actions.Short_exit.value and self._position == Positions.Neutral) or
|
||||
(action == Actions.Long_enter.value and self._position == Positions.Long) or
|
||||
(action == Actions.Long_enter.value and self._position == Positions.Short) or
|
||||
(action == Actions.Long_exit.value and self._position == Positions.Short) or
|
||||
(action == Actions.Long_exit.value and self._position == Positions.Neutral))
|
||||
return not (
|
||||
(action == Actions.Neutral.value and self._position == Positions.Neutral)
|
||||
or (action == Actions.Neutral.value and self._position == Positions.Short)
|
||||
or (action == Actions.Neutral.value and self._position == Positions.Long)
|
||||
or (action == Actions.Short_enter.value and self._position == Positions.Short)
|
||||
or (action == Actions.Short_enter.value and self._position == Positions.Long)
|
||||
or (action == Actions.Short_exit.value and self._position == Positions.Long)
|
||||
or (action == Actions.Short_exit.value and self._position == Positions.Neutral)
|
||||
or (action == Actions.Long_enter.value and self._position == Positions.Long)
|
||||
or (action == Actions.Long_enter.value and self._position == Positions.Short)
|
||||
or (action == Actions.Long_exit.value and self._position == Positions.Short)
|
||||
or (action == Actions.Long_exit.value and self._position == Positions.Neutral)
|
||||
)
|
||||
|
||||
def _is_valid(self, action: int) -> bool:
|
||||
# trade signal
|
||||
|
||||
@@ -21,6 +21,7 @@ class BaseActions(Enum):
|
||||
"""
|
||||
Default action space, mostly used for type handling.
|
||||
"""
|
||||
|
||||
Neutral = 0
|
||||
Long_enter = 1
|
||||
Long_exit = 2
|
||||
@@ -44,11 +45,22 @@ class BaseEnvironment(gym.Env):
|
||||
See RL/Base5ActionRLEnv.py and RL/Base4ActionRLEnv.py
|
||||
"""
|
||||
|
||||
def __init__(self, df: DataFrame = DataFrame(), prices: DataFrame = DataFrame(),
|
||||
reward_kwargs: dict = {}, window_size=10, starting_point=True,
|
||||
id: str = 'baseenv-1', seed: int = 1, config: dict = {}, live: bool = False,
|
||||
fee: float = 0.0015, can_short: bool = False, pair: str = "",
|
||||
df_raw: DataFrame = DataFrame()):
|
||||
def __init__(
|
||||
self,
|
||||
df: DataFrame = DataFrame(),
|
||||
prices: DataFrame = DataFrame(),
|
||||
reward_kwargs: dict = {},
|
||||
window_size=10,
|
||||
starting_point=True,
|
||||
id: str = "baseenv-1",
|
||||
seed: int = 1,
|
||||
config: dict = {},
|
||||
live: bool = False,
|
||||
fee: float = 0.0015,
|
||||
can_short: bool = False,
|
||||
pair: str = "",
|
||||
df_raw: DataFrame = DataFrame(),
|
||||
):
|
||||
"""
|
||||
Initializes the training/eval environment.
|
||||
:param df: dataframe of features
|
||||
@@ -64,15 +76,15 @@ class BaseEnvironment(gym.Env):
|
||||
:param can_short: Whether or not the environment can short
|
||||
"""
|
||||
self.config: dict = config
|
||||
self.rl_config: dict = config['freqai']['rl_config']
|
||||
self.add_state_info: bool = self.rl_config.get('add_state_info', False)
|
||||
self.rl_config: dict = config["freqai"]["rl_config"]
|
||||
self.add_state_info: bool = self.rl_config.get("add_state_info", False)
|
||||
self.id: str = id
|
||||
self.max_drawdown: float = 1 - self.rl_config.get('max_training_drawdown_pct', 0.8)
|
||||
self.compound_trades: bool = config['stake_amount'] == 'unlimited'
|
||||
self.max_drawdown: float = 1 - self.rl_config.get("max_training_drawdown_pct", 0.8)
|
||||
self.compound_trades: bool = config["stake_amount"] == "unlimited"
|
||||
self.pair: str = pair
|
||||
self.raw_features: DataFrame = df_raw
|
||||
if self.config.get('fee', None) is not None:
|
||||
self.fee = self.config['fee']
|
||||
if self.config.get("fee", None) is not None:
|
||||
self.fee = self.config["fee"]
|
||||
else:
|
||||
self.fee = fee
|
||||
|
||||
@@ -82,14 +94,22 @@ class BaseEnvironment(gym.Env):
|
||||
self.can_short: bool = can_short
|
||||
self.live: bool = live
|
||||
if not self.live and self.add_state_info:
|
||||
raise OperationalException("`add_state_info` is not available in backtesting. Change "
|
||||
"parameter to false in your rl_config. See `add_state_info` "
|
||||
"docs for more info.")
|
||||
raise OperationalException(
|
||||
"`add_state_info` is not available in backtesting. Change "
|
||||
"parameter to false in your rl_config. See `add_state_info` "
|
||||
"docs for more info."
|
||||
)
|
||||
self.seed(seed)
|
||||
self.reset_env(df, prices, window_size, reward_kwargs, starting_point)
|
||||
|
||||
def reset_env(self, df: DataFrame, prices: DataFrame, window_size: int,
|
||||
reward_kwargs: dict, starting_point=True):
|
||||
def reset_env(
|
||||
self,
|
||||
df: DataFrame,
|
||||
prices: DataFrame,
|
||||
window_size: int,
|
||||
reward_kwargs: dict,
|
||||
starting_point=True,
|
||||
):
|
||||
"""
|
||||
Resets the environment when the agent fails (in our case, if the drawdown
|
||||
exceeds the user set max_training_drawdown_pct)
|
||||
@@ -113,8 +133,7 @@ class BaseEnvironment(gym.Env):
|
||||
self.total_features = self.signal_features.shape[1]
|
||||
self.shape = (window_size, self.total_features)
|
||||
self.set_action_space()
|
||||
self.observation_space = spaces.Box(
|
||||
low=-1, high=1, shape=self.shape, dtype=np.float32)
|
||||
self.observation_space = spaces.Box(low=-1, high=1, shape=self.shape, dtype=np.float32)
|
||||
|
||||
# episode
|
||||
self._start_tick: int = self.window_size
|
||||
@@ -151,8 +170,13 @@ class BaseEnvironment(gym.Env):
|
||||
self.np_random, seed = seeding.np_random(seed)
|
||||
return [seed]
|
||||
|
||||
def tensorboard_log(self, metric: str, value: Optional[Union[int, float]] = None,
|
||||
inc: Optional[bool] = None, category: str = "custom"):
|
||||
def tensorboard_log(
|
||||
self,
|
||||
metric: str,
|
||||
value: Optional[Union[int, float]] = None,
|
||||
inc: Optional[bool] = None,
|
||||
category: str = "custom",
|
||||
):
|
||||
"""
|
||||
Function builds the tensorboard_metrics dictionary
|
||||
to be parsed by the TensorboardCallback. This
|
||||
@@ -195,7 +219,7 @@ class BaseEnvironment(gym.Env):
|
||||
self._done = False
|
||||
|
||||
if self.starting_point is True:
|
||||
if self.rl_config.get('randomize_starting_position', False):
|
||||
if self.rl_config.get("randomize_starting_position", False):
|
||||
length_of_data = int(self._end_tick / 4)
|
||||
start_tick = random.randint(self.window_size + 1, length_of_data)
|
||||
self._start_tick = start_tick
|
||||
@@ -207,8 +231,8 @@ class BaseEnvironment(gym.Env):
|
||||
self._last_trade_tick = None
|
||||
self._position = Positions.Neutral
|
||||
|
||||
self.total_reward = 0.
|
||||
self._total_profit = 1. # unit
|
||||
self.total_reward = 0.0
|
||||
self._total_profit = 1.0 # unit
|
||||
self.history = {}
|
||||
self.trade_history = []
|
||||
self.portfolio_log_returns = np.zeros(len(self.prices))
|
||||
@@ -231,18 +255,19 @@ class BaseEnvironment(gym.Env):
|
||||
This may or may not be independent of action types, user can inherit
|
||||
this in their custom "MyRLEnv"
|
||||
"""
|
||||
features_window = self.signal_features[(
|
||||
self._current_tick - self.window_size):self._current_tick]
|
||||
features_window = self.signal_features[
|
||||
(self._current_tick - self.window_size) : self._current_tick
|
||||
]
|
||||
if self.add_state_info:
|
||||
features_and_state = DataFrame(np.zeros((len(features_window), 3)),
|
||||
columns=['current_profit_pct',
|
||||
'position',
|
||||
'trade_duration'],
|
||||
index=features_window.index)
|
||||
features_and_state = DataFrame(
|
||||
np.zeros((len(features_window), 3)),
|
||||
columns=["current_profit_pct", "position", "trade_duration"],
|
||||
index=features_window.index,
|
||||
)
|
||||
|
||||
features_and_state['current_profit_pct'] = self.get_unrealized_profit()
|
||||
features_and_state['position'] = self._position.value
|
||||
features_and_state['trade_duration'] = self.get_trade_duration()
|
||||
features_and_state["current_profit_pct"] = self.get_unrealized_profit()
|
||||
features_and_state["position"] = self._position.value
|
||||
features_and_state["trade_duration"] = self.get_trade_duration()
|
||||
features_and_state = pd.concat([features_window, features_and_state], axis=1)
|
||||
return features_and_state
|
||||
else:
|
||||
@@ -262,10 +287,10 @@ class BaseEnvironment(gym.Env):
|
||||
Get the unrealized profit if the agent is in a trade
|
||||
"""
|
||||
if self._last_trade_tick is None:
|
||||
return 0.
|
||||
return 0.0
|
||||
|
||||
if self._position == Positions.Neutral:
|
||||
return 0.
|
||||
return 0.0
|
||||
elif self._position == Positions.Short:
|
||||
current_price = self.add_entry_fee(self.prices.iloc[self._current_tick].open)
|
||||
last_trade_price = self.add_exit_fee(self.prices.iloc[self._last_trade_tick].open)
|
||||
@@ -275,7 +300,7 @@ class BaseEnvironment(gym.Env):
|
||||
last_trade_price = self.add_entry_fee(self.prices.iloc[self._last_trade_tick].open)
|
||||
return (current_price - last_trade_price) / last_trade_price
|
||||
else:
|
||||
return 0.
|
||||
return 0.0
|
||||
|
||||
@abstractmethod
|
||||
def is_tradesignal(self, action: int) -> bool:
|
||||
|
||||
@@ -30,10 +30,10 @@ from freqtrade.persistence import Trade
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
torch.multiprocessing.set_sharing_strategy('file_system')
|
||||
torch.multiprocessing.set_sharing_strategy("file_system")
|
||||
|
||||
SB3_MODELS = ['PPO', 'A2C', 'DQN']
|
||||
SB3_CONTRIB_MODELS = ['TRPO', 'ARS', 'RecurrentPPO', 'MaskablePPO', 'QRDQN']
|
||||
SB3_MODELS = ["PPO", "A2C", "DQN"]
|
||||
SB3_CONTRIB_MODELS = ["TRPO", "ARS", "RecurrentPPO", "MaskablePPO", "QRDQN"]
|
||||
|
||||
|
||||
class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
@@ -42,57 +42,60 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs) -> None:
|
||||
super().__init__(config=kwargs['config'])
|
||||
self.max_threads = min(self.freqai_info['rl_config'].get(
|
||||
'cpu_count', 1), max(int(self.max_system_threads / 2), 1))
|
||||
super().__init__(config=kwargs["config"])
|
||||
self.max_threads = min(
|
||||
self.freqai_info["rl_config"].get("cpu_count", 1),
|
||||
max(int(self.max_system_threads / 2), 1),
|
||||
)
|
||||
th.set_num_threads(self.max_threads)
|
||||
self.reward_params = self.freqai_info['rl_config']['model_reward_parameters']
|
||||
self.reward_params = self.freqai_info["rl_config"]["model_reward_parameters"]
|
||||
self.train_env: Union[VecMonitor, SubprocVecEnv, gym.Env] = gym.Env()
|
||||
self.eval_env: Union[VecMonitor, SubprocVecEnv, gym.Env] = gym.Env()
|
||||
self.eval_callback: Optional[MaskableEvalCallback] = None
|
||||
self.model_type = self.freqai_info['rl_config']['model_type']
|
||||
self.rl_config = self.freqai_info['rl_config']
|
||||
self.model_type = self.freqai_info["rl_config"]["model_type"]
|
||||
self.rl_config = self.freqai_info["rl_config"]
|
||||
self.df_raw: DataFrame = DataFrame()
|
||||
self.continual_learning = self.freqai_info.get('continual_learning', False)
|
||||
self.continual_learning = self.freqai_info.get("continual_learning", False)
|
||||
if self.model_type in SB3_MODELS:
|
||||
import_str = 'stable_baselines3'
|
||||
import_str = "stable_baselines3"
|
||||
elif self.model_type in SB3_CONTRIB_MODELS:
|
||||
import_str = 'sb3_contrib'
|
||||
import_str = "sb3_contrib"
|
||||
else:
|
||||
raise OperationalException(f'{self.model_type} not available in stable_baselines3 or '
|
||||
f'sb3_contrib. please choose one of {SB3_MODELS} or '
|
||||
f'{SB3_CONTRIB_MODELS}')
|
||||
raise OperationalException(
|
||||
f"{self.model_type} not available in stable_baselines3 or "
|
||||
f"sb3_contrib. please choose one of {SB3_MODELS} or "
|
||||
f"{SB3_CONTRIB_MODELS}"
|
||||
)
|
||||
|
||||
mod = importlib.import_module(import_str, self.model_type)
|
||||
self.MODELCLASS = getattr(mod, self.model_type)
|
||||
self.policy_type = self.freqai_info['rl_config']['policy_type']
|
||||
self.policy_type = self.freqai_info["rl_config"]["policy_type"]
|
||||
self.unset_outlier_removal()
|
||||
self.net_arch = self.rl_config.get('net_arch', [128, 128])
|
||||
self.net_arch = self.rl_config.get("net_arch", [128, 128])
|
||||
self.dd.model_type = import_str
|
||||
self.tensorboard_callback: TensorboardCallback = \
|
||||
TensorboardCallback(verbose=1, actions=BaseActions)
|
||||
self.tensorboard_callback: TensorboardCallback = TensorboardCallback(
|
||||
verbose=1, actions=BaseActions
|
||||
)
|
||||
|
||||
def unset_outlier_removal(self):
|
||||
"""
|
||||
If user has activated any function that may remove training points, this
|
||||
function will set them to false and warn them
|
||||
"""
|
||||
if self.ft_params.get('use_SVM_to_remove_outliers', False):
|
||||
self.ft_params.update({'use_SVM_to_remove_outliers': False})
|
||||
logger.warning('User tried to use SVM with RL. Deactivating SVM.')
|
||||
if self.ft_params.get('use_DBSCAN_to_remove_outliers', False):
|
||||
self.ft_params.update({'use_DBSCAN_to_remove_outliers': False})
|
||||
logger.warning('User tried to use DBSCAN with RL. Deactivating DBSCAN.')
|
||||
if self.ft_params.get('DI_threshold', False):
|
||||
self.ft_params.update({'DI_threshold': False})
|
||||
logger.warning('User tried to use DI_threshold with RL. Deactivating DI_threshold.')
|
||||
if self.freqai_info['data_split_parameters'].get('shuffle', False):
|
||||
self.freqai_info['data_split_parameters'].update({'shuffle': False})
|
||||
logger.warning('User tried to shuffle training data. Setting shuffle to False')
|
||||
if self.ft_params.get("use_SVM_to_remove_outliers", False):
|
||||
self.ft_params.update({"use_SVM_to_remove_outliers": False})
|
||||
logger.warning("User tried to use SVM with RL. Deactivating SVM.")
|
||||
if self.ft_params.get("use_DBSCAN_to_remove_outliers", False):
|
||||
self.ft_params.update({"use_DBSCAN_to_remove_outliers": False})
|
||||
logger.warning("User tried to use DBSCAN with RL. Deactivating DBSCAN.")
|
||||
if self.ft_params.get("DI_threshold", False):
|
||||
self.ft_params.update({"DI_threshold": False})
|
||||
logger.warning("User tried to use DI_threshold with RL. Deactivating DI_threshold.")
|
||||
if self.freqai_info["data_split_parameters"].get("shuffle", False):
|
||||
self.freqai_info["data_split_parameters"].update({"shuffle": False})
|
||||
logger.warning("User tried to shuffle training data. Setting shuffle to False")
|
||||
|
||||
def train(
|
||||
self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Any:
|
||||
def train(self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datakitchen
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
@@ -102,7 +105,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
:model: Trained model which can be used to inference (self.predict)
|
||||
"""
|
||||
|
||||
logger.info("--------------------Starting training " f"{pair} --------------------")
|
||||
logger.info(f"--------------------Starting training {pair} --------------------")
|
||||
|
||||
features_filtered, labels_filtered = dk.filter_features(
|
||||
unfiltered_df,
|
||||
@@ -111,8 +114,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
training_filter=True,
|
||||
)
|
||||
|
||||
dd: Dict[str, Any] = dk.make_train_test_datasets(
|
||||
features_filtered, labels_filtered)
|
||||
dd: Dict[str, Any] = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
||||
self.df_raw = copy.deepcopy(dd["train_features"])
|
||||
dk.fit_labels() # FIXME useless for now, but just satiating append methods
|
||||
|
||||
@@ -121,18 +123,18 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
|
||||
dk.feature_pipeline = self.define_data_pipeline(threads=dk.thread_count)
|
||||
|
||||
(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"]) = dk.feature_pipeline.fit_transform(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"])
|
||||
(dd["train_features"], dd["train_labels"], dd["train_weights"]) = (
|
||||
dk.feature_pipeline.fit_transform(
|
||||
dd["train_features"], dd["train_labels"], dd["train_weights"]
|
||||
)
|
||||
)
|
||||
|
||||
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) != 0:
|
||||
(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"]) = dk.feature_pipeline.transform(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"])
|
||||
if self.freqai_info.get("data_split_parameters", {}).get("test_size", 0.1) != 0:
|
||||
(dd["test_features"], dd["test_labels"], dd["test_weights"]) = (
|
||||
dk.feature_pipeline.transform(
|
||||
dd["test_features"], dd["test_labels"], dd["test_weights"]
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f'Training model on {len(dk.data_dictionary["train_features"].columns)}'
|
||||
@@ -147,9 +149,13 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
|
||||
return model
|
||||
|
||||
def set_train_and_eval_environments(self, data_dictionary: Dict[str, DataFrame],
|
||||
prices_train: DataFrame, prices_test: DataFrame,
|
||||
dk: FreqaiDataKitchen):
|
||||
def set_train_and_eval_environments(
|
||||
self,
|
||||
data_dictionary: Dict[str, DataFrame],
|
||||
prices_train: DataFrame,
|
||||
prices_test: DataFrame,
|
||||
dk: FreqaiDataKitchen,
|
||||
):
|
||||
"""
|
||||
User can override this if they are using a custom MyRLEnv
|
||||
:param data_dictionary: dict = common data dictionary containing train and test
|
||||
@@ -165,11 +171,14 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
|
||||
self.train_env = self.MyRLEnv(df=train_df, prices=prices_train, **env_info)
|
||||
self.eval_env = Monitor(self.MyRLEnv(df=test_df, prices=prices_test, **env_info))
|
||||
self.eval_callback = MaskableEvalCallback(self.eval_env, deterministic=True,
|
||||
render=False, eval_freq=len(train_df),
|
||||
best_model_save_path=str(dk.data_path),
|
||||
use_masking=(self.model_type == 'MaskablePPO' and
|
||||
is_masking_supported(self.eval_env)))
|
||||
self.eval_callback = MaskableEvalCallback(
|
||||
self.eval_env,
|
||||
deterministic=True,
|
||||
render=False,
|
||||
eval_freq=len(train_df),
|
||||
best_model_save_path=str(dk.data_path),
|
||||
use_masking=(self.model_type == "MaskablePPO" and is_masking_supported(self.eval_env)),
|
||||
)
|
||||
|
||||
actions = self.train_env.get_actions()
|
||||
self.tensorboard_callback = TensorboardCallback(verbose=1, actions=actions)
|
||||
@@ -178,16 +187,19 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
"""
|
||||
Create dictionary of environment arguments
|
||||
"""
|
||||
env_info = {"window_size": self.CONV_WIDTH,
|
||||
"reward_kwargs": self.reward_params,
|
||||
"config": self.config,
|
||||
"live": self.live,
|
||||
"can_short": self.can_short,
|
||||
"pair": pair,
|
||||
"df_raw": self.df_raw}
|
||||
env_info = {
|
||||
"window_size": self.CONV_WIDTH,
|
||||
"reward_kwargs": self.reward_params,
|
||||
"config": self.config,
|
||||
"live": self.live,
|
||||
"can_short": self.can_short,
|
||||
"pair": pair,
|
||||
"df_raw": self.df_raw,
|
||||
}
|
||||
if self.data_provider:
|
||||
env_info["fee"] = self.data_provider._exchange \
|
||||
.get_fee(symbol=self.data_provider.current_whitelist()[0]) # type: ignore
|
||||
env_info["fee"] = self.data_provider._exchange.get_fee( # type: ignore
|
||||
symbol=self.data_provider.current_whitelist()[0]
|
||||
)
|
||||
|
||||
return env_info
|
||||
|
||||
@@ -219,11 +231,12 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
for trade in open_trades:
|
||||
if trade.pair == pair:
|
||||
if self.data_provider._exchange is None: # type: ignore
|
||||
logger.error('No exchange available.')
|
||||
logger.error("No exchange available.")
|
||||
return 0, 0, 0
|
||||
else:
|
||||
current_rate = self.data_provider._exchange.get_rate( # type: ignore
|
||||
pair, refresh=False, side="exit", is_short=trade.is_short)
|
||||
pair, refresh=False, side="exit", is_short=trade.is_short
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc).timestamp()
|
||||
trade_duration = int((now - trade.open_date_utc.timestamp()) / self.base_tf_seconds)
|
||||
@@ -255,16 +268,17 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
dk.data_dictionary["prediction_features"] = self.drop_ohlc_from_df(filtered_dataframe, dk)
|
||||
|
||||
dk.data_dictionary["prediction_features"], _, _ = dk.feature_pipeline.transform(
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True)
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True
|
||||
)
|
||||
|
||||
pred_df = self.rl_model_predict(
|
||||
dk.data_dictionary["prediction_features"], dk, self.model)
|
||||
pred_df = self.rl_model_predict(dk.data_dictionary["prediction_features"], dk, self.model)
|
||||
pred_df.fillna(0, inplace=True)
|
||||
|
||||
return (pred_df, dk.do_predict)
|
||||
|
||||
def rl_model_predict(self, dataframe: DataFrame,
|
||||
dk: FreqaiDataKitchen, model: Any) -> DataFrame:
|
||||
def rl_model_predict(
|
||||
self, dataframe: DataFrame, dk: FreqaiDataKitchen, model: Any
|
||||
) -> DataFrame:
|
||||
"""
|
||||
A helper function to make predictions in the Reinforcement learning module.
|
||||
:param dataframe: DataFrame = the dataframe of features to make the predictions on
|
||||
@@ -275,11 +289,11 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
|
||||
def _predict(window):
|
||||
observations = dataframe.iloc[window.index]
|
||||
if self.live and self.rl_config.get('add_state_info', False):
|
||||
if self.live and self.rl_config.get("add_state_info", False):
|
||||
market_side, current_profit, trade_duration = self.get_state_info(dk.pair)
|
||||
observations['current_profit_pct'] = current_profit
|
||||
observations['position'] = market_side
|
||||
observations['trade_duration'] = trade_duration
|
||||
observations["current_profit_pct"] = current_profit
|
||||
observations["position"] = market_side
|
||||
observations["trade_duration"] = trade_duration
|
||||
res, _ = model.predict(observations, deterministic=True)
|
||||
return res
|
||||
|
||||
@@ -287,23 +301,31 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
|
||||
return output
|
||||
|
||||
def build_ohlc_price_dataframes(self, data_dictionary: dict,
|
||||
pair: str, dk: FreqaiDataKitchen) -> Tuple[DataFrame,
|
||||
DataFrame]:
|
||||
def build_ohlc_price_dataframes(
|
||||
self, data_dictionary: dict, pair: str, dk: FreqaiDataKitchen
|
||||
) -> Tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
Builds the train prices and test prices for the environment.
|
||||
"""
|
||||
|
||||
pair = pair.replace(':', '')
|
||||
pair = pair.replace(":", "")
|
||||
train_df = data_dictionary["train_features"]
|
||||
test_df = data_dictionary["test_features"]
|
||||
|
||||
# price data for model training and evaluation
|
||||
tf = self.config['timeframe']
|
||||
rename_dict = {'%-raw_open': 'open', '%-raw_low': 'low',
|
||||
'%-raw_high': ' high', '%-raw_close': 'close'}
|
||||
rename_dict_old = {f'%-{pair}raw_open_{tf}': 'open', f'%-{pair}raw_low_{tf}': 'low',
|
||||
f'%-{pair}raw_high_{tf}': ' high', f'%-{pair}raw_close_{tf}': 'close'}
|
||||
tf = self.config["timeframe"]
|
||||
rename_dict = {
|
||||
"%-raw_open": "open",
|
||||
"%-raw_low": "low",
|
||||
"%-raw_high": " high",
|
||||
"%-raw_close": "close",
|
||||
}
|
||||
rename_dict_old = {
|
||||
f"%-{pair}raw_open_{tf}": "open",
|
||||
f"%-{pair}raw_low_{tf}": "low",
|
||||
f"%-{pair}raw_high_{tf}": " high",
|
||||
f"%-{pair}raw_close_{tf}": "close",
|
||||
}
|
||||
|
||||
prices_train = train_df.filter(rename_dict.keys(), axis=1)
|
||||
prices_train_old = train_df.filter(rename_dict_old.keys(), axis=1)
|
||||
@@ -311,17 +333,21 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
if not prices_train_old.empty:
|
||||
prices_train = prices_train_old
|
||||
rename_dict = rename_dict_old
|
||||
logger.warning('Reinforcement learning module didn\'t find the correct raw prices '
|
||||
'assigned in feature_engineering_standard(). '
|
||||
'Please assign them with:\n'
|
||||
'dataframe["%-raw_close"] = dataframe["close"]\n'
|
||||
'dataframe["%-raw_open"] = dataframe["open"]\n'
|
||||
'dataframe["%-raw_high"] = dataframe["high"]\n'
|
||||
'dataframe["%-raw_low"] = dataframe["low"]\n'
|
||||
'inside `feature_engineering_standard()')
|
||||
logger.warning(
|
||||
"Reinforcement learning module didn't find the correct raw prices "
|
||||
"assigned in feature_engineering_standard(). "
|
||||
"Please assign them with:\n"
|
||||
'dataframe["%-raw_close"] = dataframe["close"]\n'
|
||||
'dataframe["%-raw_open"] = dataframe["open"]\n'
|
||||
'dataframe["%-raw_high"] = dataframe["high"]\n'
|
||||
'dataframe["%-raw_low"] = dataframe["low"]\n'
|
||||
"inside `feature_engineering_standard()"
|
||||
)
|
||||
elif prices_train.empty:
|
||||
raise OperationalException("No prices found, please follow log warning "
|
||||
"instructions to correct the strategy.")
|
||||
raise OperationalException(
|
||||
"No prices found, please follow log warning "
|
||||
"instructions to correct the strategy."
|
||||
)
|
||||
|
||||
prices_train.rename(columns=rename_dict, inplace=True)
|
||||
prices_train.reset_index(drop=True)
|
||||
@@ -339,7 +365,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
"""
|
||||
Given a dataframe, drop the ohlc data
|
||||
"""
|
||||
drop_list = ['%-raw_open', '%-raw_low', '%-raw_high', '%-raw_close']
|
||||
drop_list = ["%-raw_open", "%-raw_low", "%-raw_high", "%-raw_close"]
|
||||
|
||||
if self.rl_config["drop_ohlc_from_features"]:
|
||||
df.drop(drop_list, axis=1, inplace=True)
|
||||
@@ -358,7 +384,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
if exists:
|
||||
model = self.MODELCLASS.load(dk.data_path / f"{dk.model_filename}_model")
|
||||
else:
|
||||
logger.info('No model file on disk to continue learning from.')
|
||||
logger.info("No model file on disk to continue learning from.")
|
||||
|
||||
return model
|
||||
|
||||
@@ -400,15 +426,18 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
return -2
|
||||
|
||||
pnl = self.get_unrealized_profit()
|
||||
factor = 100.
|
||||
factor = 100.0
|
||||
|
||||
# you can use feature values from dataframe
|
||||
rsi_now = self.raw_features[f"%-rsi-period-10_shift-1_{self.pair}_"
|
||||
f"{self.config['timeframe']}"].iloc[self._current_tick]
|
||||
rsi_now = self.raw_features[
|
||||
f"%-rsi-period-10_shift-1_{self.pair}_{self.config['timeframe']}"
|
||||
].iloc[self._current_tick]
|
||||
|
||||
# reward agent for entering trades
|
||||
if (action in (Actions.Long_enter.value, Actions.Short_enter.value)
|
||||
and self._position == Positions.Neutral):
|
||||
if (
|
||||
action in (Actions.Long_enter.value, Actions.Short_enter.value)
|
||||
and self._position == Positions.Neutral
|
||||
):
|
||||
if rsi_now < 40:
|
||||
factor = 40 / rsi_now
|
||||
else:
|
||||
@@ -419,7 +448,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
if action == Actions.Neutral.value and self._position == Positions.Neutral:
|
||||
return -1
|
||||
|
||||
max_trade_duration = self.rl_config.get('max_trade_duration_candles', 300)
|
||||
max_trade_duration = self.rl_config.get("max_trade_duration_candles", 300)
|
||||
if self._last_trade_tick:
|
||||
trade_duration = self._current_tick - self._last_trade_tick
|
||||
else:
|
||||
@@ -431,28 +460,36 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
factor *= 0.5
|
||||
|
||||
# discourage sitting in position
|
||||
if (self._position in (Positions.Short, Positions.Long) and
|
||||
action == Actions.Neutral.value):
|
||||
if (
|
||||
self._position in (Positions.Short, Positions.Long)
|
||||
and action == Actions.Neutral.value
|
||||
):
|
||||
return -1 * trade_duration / max_trade_duration
|
||||
|
||||
# close long
|
||||
if action == Actions.Long_exit.value and self._position == Positions.Long:
|
||||
if pnl > self.profit_aim * self.rr:
|
||||
factor *= self.rl_config['model_reward_parameters'].get('win_reward_factor', 2)
|
||||
factor *= self.rl_config["model_reward_parameters"].get("win_reward_factor", 2)
|
||||
return float(pnl * factor)
|
||||
|
||||
# close short
|
||||
if action == Actions.Short_exit.value and self._position == Positions.Short:
|
||||
if pnl > self.profit_aim * self.rr:
|
||||
factor *= self.rl_config['model_reward_parameters'].get('win_reward_factor', 2)
|
||||
factor *= self.rl_config["model_reward_parameters"].get("win_reward_factor", 2)
|
||||
return float(pnl * factor)
|
||||
|
||||
return 0.
|
||||
return 0.0
|
||||
|
||||
|
||||
def make_env(MyRLEnv: Type[BaseEnvironment], env_id: str, rank: int,
|
||||
seed: int, train_df: DataFrame, price: DataFrame,
|
||||
env_info: Dict[str, Any] = {}) -> Callable:
|
||||
def make_env(
|
||||
MyRLEnv: Type[BaseEnvironment],
|
||||
env_id: str,
|
||||
rank: int,
|
||||
seed: int,
|
||||
train_df: DataFrame,
|
||||
price: DataFrame,
|
||||
env_info: Dict[str, Any] = {},
|
||||
) -> Callable:
|
||||
"""
|
||||
Utility function for multiprocessed env.
|
||||
|
||||
@@ -465,10 +502,9 @@ def make_env(MyRLEnv: Type[BaseEnvironment], env_id: str, rank: int,
|
||||
"""
|
||||
|
||||
def _init() -> gym.Env:
|
||||
|
||||
env = MyRLEnv(df=train_df, prices=price, id=env_id, seed=seed + rank,
|
||||
**env_info)
|
||||
env = MyRLEnv(df=train_df, prices=price, id=env_id, seed=seed + rank, **env_info)
|
||||
|
||||
return env
|
||||
|
||||
set_random_seed(seed)
|
||||
return _init
|
||||
|
||||
@@ -21,9 +21,7 @@ class BaseClassifierModel(IFreqaiModel):
|
||||
such as prediction_models/CatboostClassifier.py for guidance.
|
||||
"""
|
||||
|
||||
def train(
|
||||
self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Any:
|
||||
def train(self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datakitchen
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
@@ -47,26 +45,28 @@ class BaseClassifierModel(IFreqaiModel):
|
||||
|
||||
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
|
||||
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
|
||||
logger.info(f"-------------------- Training on data from {start_date} to "
|
||||
f"{end_date} --------------------")
|
||||
logger.info(
|
||||
f"-------------------- Training on data from {start_date} to "
|
||||
f"{end_date} --------------------"
|
||||
)
|
||||
# split data into train/test data.
|
||||
dd = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
||||
if not self.freqai_info.get("fit_live_predictions_candles", 0) or not self.live:
|
||||
dk.fit_labels()
|
||||
dk.feature_pipeline = self.define_data_pipeline(threads=dk.thread_count)
|
||||
|
||||
(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"]) = dk.feature_pipeline.fit_transform(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"])
|
||||
(dd["train_features"], dd["train_labels"], dd["train_weights"]) = (
|
||||
dk.feature_pipeline.fit_transform(
|
||||
dd["train_features"], dd["train_labels"], dd["train_weights"]
|
||||
)
|
||||
)
|
||||
|
||||
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) != 0:
|
||||
(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"]) = dk.feature_pipeline.transform(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"])
|
||||
if self.freqai_info.get("data_split_parameters", {}).get("test_size", 0.1) != 0:
|
||||
(dd["test_features"], dd["test_labels"], dd["test_weights"]) = (
|
||||
dk.feature_pipeline.transform(
|
||||
dd["test_features"], dd["test_labels"], dd["test_weights"]
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
|
||||
@@ -77,8 +77,10 @@ class BaseClassifierModel(IFreqaiModel):
|
||||
|
||||
end_time = time()
|
||||
|
||||
logger.info(f"-------------------- Done training {pair} "
|
||||
f"({end_time - start_time:.2f} secs) --------------------")
|
||||
logger.info(
|
||||
f"-------------------- Done training {pair} "
|
||||
f"({end_time - start_time:.2f} secs) --------------------"
|
||||
)
|
||||
|
||||
return model
|
||||
|
||||
@@ -102,7 +104,8 @@ class BaseClassifierModel(IFreqaiModel):
|
||||
dk.data_dictionary["prediction_features"] = filtered_df
|
||||
|
||||
dk.data_dictionary["prediction_features"], outliers, _ = dk.feature_pipeline.transform(
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True)
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True
|
||||
)
|
||||
|
||||
predictions = self.model.predict(dk.data_dictionary["prediction_features"])
|
||||
if self.CONV_WIDTH == 1:
|
||||
|
||||
@@ -59,8 +59,7 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
||||
class_names = self.model.model_meta_data.get("class_names", None)
|
||||
if not class_names:
|
||||
raise ValueError(
|
||||
"Missing class names. "
|
||||
"self.model.model_meta_data['class_names'] is None."
|
||||
"Missing class names. self.model.model_meta_data['class_names'] is None."
|
||||
)
|
||||
|
||||
if not self.class_name_to_index:
|
||||
@@ -74,11 +73,11 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
||||
dk.data_dictionary["prediction_features"] = filtered_df
|
||||
|
||||
dk.data_dictionary["prediction_features"], outliers, _ = dk.feature_pipeline.transform(
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True)
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True
|
||||
)
|
||||
|
||||
x = self.data_convertor.convert_x(
|
||||
dk.data_dictionary["prediction_features"],
|
||||
device=self.device
|
||||
dk.data_dictionary["prediction_features"], device=self.device
|
||||
)
|
||||
self.model.model.eval()
|
||||
logits = self.model.model(x)
|
||||
@@ -100,10 +99,10 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
||||
return (pred_df, dk.do_predict)
|
||||
|
||||
def encode_class_names(
|
||||
self,
|
||||
data_dictionary: Dict[str, pd.DataFrame],
|
||||
dk: FreqaiDataKitchen,
|
||||
class_names: List[str],
|
||||
self,
|
||||
data_dictionary: Dict[str, pd.DataFrame],
|
||||
dk: FreqaiDataKitchen,
|
||||
class_names: List[str],
|
||||
):
|
||||
"""
|
||||
encode class name, str -> int
|
||||
@@ -120,15 +119,12 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def assert_valid_class_names(
|
||||
target_column: pd.Series,
|
||||
class_names: List[str]
|
||||
):
|
||||
def assert_valid_class_names(target_column: pd.Series, class_names: List[str]):
|
||||
non_defined_labels = set(target_column) - set(class_names)
|
||||
if len(non_defined_labels) != 0:
|
||||
raise OperationalException(
|
||||
f"Found non defined labels: {non_defined_labels}, ",
|
||||
f"expecting labels: {class_names}"
|
||||
f"expecting labels: {class_names}",
|
||||
)
|
||||
|
||||
def decode_class_names(self, class_ints: torch.Tensor) -> List[str]:
|
||||
@@ -144,10 +140,10 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
||||
logger.info(f"encoded class name to index: {self.class_name_to_index}")
|
||||
|
||||
def convert_label_column_to_int(
|
||||
self,
|
||||
data_dictionary: Dict[str, pd.DataFrame],
|
||||
dk: FreqaiDataKitchen,
|
||||
class_names: List[str]
|
||||
self,
|
||||
data_dictionary: Dict[str, pd.DataFrame],
|
||||
dk: FreqaiDataKitchen,
|
||||
class_names: List[str],
|
||||
):
|
||||
self.init_class_names_to_index_mapping(class_names)
|
||||
self.encode_class_names(data_dictionary, dk, class_names)
|
||||
@@ -162,9 +158,7 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
||||
|
||||
return self.class_names
|
||||
|
||||
def train(
|
||||
self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Any:
|
||||
def train(self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datakitchen
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
@@ -191,18 +185,18 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
||||
|
||||
dk.feature_pipeline = self.define_data_pipeline(threads=dk.thread_count)
|
||||
|
||||
(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"]) = dk.feature_pipeline.fit_transform(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"])
|
||||
(dd["train_features"], dd["train_labels"], dd["train_weights"]) = (
|
||||
dk.feature_pipeline.fit_transform(
|
||||
dd["train_features"], dd["train_labels"], dd["train_weights"]
|
||||
)
|
||||
)
|
||||
|
||||
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) != 0:
|
||||
(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"]) = dk.feature_pipeline.transform(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"])
|
||||
if self.freqai_info.get("data_split_parameters", {}).get("test_size", 0.1) != 0:
|
||||
(dd["test_features"], dd["test_labels"], dd["test_weights"]) = (
|
||||
dk.feature_pipeline.transform(
|
||||
dd["test_features"], dd["test_labels"], dd["test_weights"]
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
|
||||
@@ -212,7 +206,9 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
||||
model = self.fit(dd, dk)
|
||||
end_time = time()
|
||||
|
||||
logger.info(f"-------------------- Done training {pair} "
|
||||
f"({end_time - start_time:.2f} secs) --------------------")
|
||||
logger.info(
|
||||
f"-------------------- Done training {pair} "
|
||||
f"({end_time - start_time:.2f} secs) --------------------"
|
||||
)
|
||||
|
||||
return model
|
||||
|
||||
@@ -21,7 +21,7 @@ class BasePyTorchModel(IFreqaiModel, ABC):
|
||||
super().__init__(config=kwargs["config"])
|
||||
self.dd.model_type = "pytorch"
|
||||
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||
test_size = self.freqai_info.get('data_split_parameters', {}).get('test_size')
|
||||
test_size = self.freqai_info.get("data_split_parameters", {}).get("test_size")
|
||||
self.splits = ["train", "test"] if test_size != 0 else ["train"]
|
||||
self.window_size = self.freqai_info.get("conv_width", 1)
|
||||
|
||||
|
||||
@@ -41,11 +41,11 @@ class BasePyTorchRegressor(BasePyTorchModel):
|
||||
dk.data_dictionary["prediction_features"] = filtered_df
|
||||
|
||||
dk.data_dictionary["prediction_features"], outliers, _ = dk.feature_pipeline.transform(
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True)
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True
|
||||
)
|
||||
|
||||
x = self.data_convertor.convert_x(
|
||||
dk.data_dictionary["prediction_features"],
|
||||
device=self.device
|
||||
dk.data_dictionary["prediction_features"], device=self.device
|
||||
)
|
||||
self.model.model.eval()
|
||||
y = self.model.model(x)
|
||||
@@ -59,9 +59,7 @@ class BasePyTorchRegressor(BasePyTorchModel):
|
||||
dk.do_predict = outliers
|
||||
return (pred_df, dk.do_predict)
|
||||
|
||||
def train(
|
||||
self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Any:
|
||||
def train(self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datakitchen
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
@@ -91,19 +89,19 @@ class BasePyTorchRegressor(BasePyTorchModel):
|
||||
dd["train_labels"], _, _ = dk.label_pipeline.fit_transform(dd["train_labels"])
|
||||
dd["test_labels"], _, _ = dk.label_pipeline.transform(dd["test_labels"])
|
||||
|
||||
(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"]) = dk.feature_pipeline.fit_transform(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"])
|
||||
(dd["train_features"], dd["train_labels"], dd["train_weights"]) = (
|
||||
dk.feature_pipeline.fit_transform(
|
||||
dd["train_features"], dd["train_labels"], dd["train_weights"]
|
||||
)
|
||||
)
|
||||
dd["train_labels"], _, _ = dk.label_pipeline.fit_transform(dd["train_labels"])
|
||||
|
||||
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) != 0:
|
||||
(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"]) = dk.feature_pipeline.transform(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"])
|
||||
if self.freqai_info.get("data_split_parameters", {}).get("test_size", 0.1) != 0:
|
||||
(dd["test_features"], dd["test_labels"], dd["test_weights"]) = (
|
||||
dk.feature_pipeline.transform(
|
||||
dd["test_features"], dd["test_labels"], dd["test_weights"]
|
||||
)
|
||||
)
|
||||
dd["test_labels"], _, _ = dk.label_pipeline.transform(dd["test_labels"])
|
||||
|
||||
logger.info(
|
||||
@@ -114,7 +112,9 @@ class BasePyTorchRegressor(BasePyTorchModel):
|
||||
model = self.fit(dd, dk)
|
||||
end_time = time()
|
||||
|
||||
logger.info(f"-------------------- Done training {pair} "
|
||||
f"({end_time - start_time:.2f} secs) --------------------")
|
||||
logger.info(
|
||||
f"-------------------- Done training {pair} "
|
||||
f"({end_time - start_time:.2f} secs) --------------------"
|
||||
)
|
||||
|
||||
return model
|
||||
|
||||
@@ -20,9 +20,7 @@ class BaseRegressionModel(IFreqaiModel):
|
||||
such as prediction_models/CatboostRegressor.py for guidance.
|
||||
"""
|
||||
|
||||
def train(
|
||||
self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Any:
|
||||
def train(self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datakitchen
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
@@ -46,8 +44,10 @@ class BaseRegressionModel(IFreqaiModel):
|
||||
|
||||
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
|
||||
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
|
||||
logger.info(f"-------------------- Training on data from {start_date} to "
|
||||
f"{end_date} --------------------")
|
||||
logger.info(
|
||||
f"-------------------- Training on data from {start_date} to "
|
||||
f"{end_date} --------------------"
|
||||
)
|
||||
# split data into train/test data.
|
||||
dd = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
||||
if not self.freqai_info.get("fit_live_predictions_candles", 0) or not self.live:
|
||||
@@ -55,19 +55,19 @@ class BaseRegressionModel(IFreqaiModel):
|
||||
dk.feature_pipeline = self.define_data_pipeline(threads=dk.thread_count)
|
||||
dk.label_pipeline = self.define_label_pipeline(threads=dk.thread_count)
|
||||
|
||||
(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"]) = dk.feature_pipeline.fit_transform(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"])
|
||||
(dd["train_features"], dd["train_labels"], dd["train_weights"]) = (
|
||||
dk.feature_pipeline.fit_transform(
|
||||
dd["train_features"], dd["train_labels"], dd["train_weights"]
|
||||
)
|
||||
)
|
||||
dd["train_labels"], _, _ = dk.label_pipeline.fit_transform(dd["train_labels"])
|
||||
|
||||
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) != 0:
|
||||
(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"]) = dk.feature_pipeline.transform(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"])
|
||||
if self.freqai_info.get("data_split_parameters", {}).get("test_size", 0.1) != 0:
|
||||
(dd["test_features"], dd["test_labels"], dd["test_weights"]) = (
|
||||
dk.feature_pipeline.transform(
|
||||
dd["test_features"], dd["test_labels"], dd["test_weights"]
|
||||
)
|
||||
)
|
||||
dd["test_labels"], _, _ = dk.label_pipeline.transform(dd["test_labels"])
|
||||
|
||||
logger.info(
|
||||
@@ -79,8 +79,10 @@ class BaseRegressionModel(IFreqaiModel):
|
||||
|
||||
end_time = time()
|
||||
|
||||
logger.info(f"-------------------- Done training {pair} "
|
||||
f"({end_time - start_time:.2f} secs) --------------------")
|
||||
logger.info(
|
||||
f"-------------------- Done training {pair} "
|
||||
f"({end_time - start_time:.2f} secs) --------------------"
|
||||
)
|
||||
|
||||
return model
|
||||
|
||||
@@ -102,7 +104,8 @@ class BaseRegressionModel(IFreqaiModel):
|
||||
)
|
||||
|
||||
dk.data_dictionary["prediction_features"], outliers, _ = dk.feature_pipeline.transform(
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True)
|
||||
dk.data_dictionary["prediction_features"], outlier_check=True
|
||||
)
|
||||
|
||||
predictions = self.model.predict(dk.data_dictionary["prediction_features"])
|
||||
if self.CONV_WIDTH == 1:
|
||||
|
||||
@@ -9,7 +9,6 @@ from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
class FreqaiMultiOutputClassifier(MultiOutputClassifier):
|
||||
|
||||
def fit(self, X, y, sample_weight=None, fit_params=None):
|
||||
"""Fit the model to data, separately for each output variable.
|
||||
Parameters
|
||||
@@ -48,18 +47,14 @@ class FreqaiMultiOutputClassifier(MultiOutputClassifier):
|
||||
"multi-output regression but has only one."
|
||||
)
|
||||
|
||||
if sample_weight is not None and not has_fit_parameter(
|
||||
self.estimator, "sample_weight"
|
||||
):
|
||||
if sample_weight is not None and not has_fit_parameter(self.estimator, "sample_weight"):
|
||||
raise ValueError("Underlying estimator does not support sample weights.")
|
||||
|
||||
if not fit_params:
|
||||
fit_params = [None] * y.shape[1]
|
||||
|
||||
self.estimators_ = Parallel(n_jobs=self.n_jobs)(
|
||||
delayed(_fit_estimator)(
|
||||
self.estimator, X, y[:, i], sample_weight, **fit_params[i]
|
||||
)
|
||||
delayed(_fit_estimator)(self.estimator, X, y[:, i], sample_weight, **fit_params[i])
|
||||
for i in range(y.shape[1])
|
||||
)
|
||||
|
||||
@@ -67,8 +62,9 @@ class FreqaiMultiOutputClassifier(MultiOutputClassifier):
|
||||
for estimator in self.estimators_:
|
||||
self.classes_.extend(estimator.classes_)
|
||||
if len(set(self.classes_)) != len(self.classes_):
|
||||
raise OperationalException(f"Class labels must be unique across targets: "
|
||||
f"{self.classes_}")
|
||||
raise OperationalException(
|
||||
f"Class labels must be unique across targets: {self.classes_}"
|
||||
)
|
||||
|
||||
if hasattr(self.estimators_[0], "n_features_in_"):
|
||||
self.n_features_in_ = self.estimators_[0].n_features_in_
|
||||
|
||||
@@ -4,7 +4,6 @@ from sklearn.utils.validation import has_fit_parameter
|
||||
|
||||
|
||||
class FreqaiMultiOutputRegressor(MultiOutputRegressor):
|
||||
|
||||
def fit(self, X, y, sample_weight=None, fit_params=None):
|
||||
"""Fit the model to data, separately for each output variable.
|
||||
Parameters
|
||||
@@ -40,18 +39,14 @@ class FreqaiMultiOutputRegressor(MultiOutputRegressor):
|
||||
"multi-output regression but has only one."
|
||||
)
|
||||
|
||||
if sample_weight is not None and not has_fit_parameter(
|
||||
self.estimator, "sample_weight"
|
||||
):
|
||||
if sample_weight is not None and not has_fit_parameter(self.estimator, "sample_weight"):
|
||||
raise ValueError("Underlying estimator does not support sample weights.")
|
||||
|
||||
if not fit_params:
|
||||
fit_params = [None] * y.shape[1]
|
||||
|
||||
self.estimators_ = Parallel(n_jobs=self.n_jobs)(
|
||||
delayed(_fit_estimator)(
|
||||
self.estimator, X, y[:, i], sample_weight, **fit_params[i]
|
||||
)
|
||||
delayed(_fit_estimator)(self.estimator, X, y[:, i], sample_weight, **fit_params[i])
|
||||
for i in range(y.shape[1])
|
||||
)
|
||||
|
||||
|
||||
@@ -66,7 +66,6 @@ class FreqaiDataDrawer:
|
||||
"""
|
||||
|
||||
def __init__(self, full_path: Path, config: Config):
|
||||
|
||||
self.config = config
|
||||
self.freqai_info = config.get("freqai", {})
|
||||
# dictionary holding all pair metadata necessary to load in from disk
|
||||
@@ -81,7 +80,8 @@ class FreqaiDataDrawer:
|
||||
self.full_path = full_path
|
||||
self.historic_predictions_path = Path(self.full_path / "historic_predictions.pkl")
|
||||
self.historic_predictions_bkp_path = Path(
|
||||
self.full_path / "historic_predictions.backup.pkl")
|
||||
self.full_path / "historic_predictions.backup.pkl"
|
||||
)
|
||||
self.pair_dictionary_path = Path(self.full_path / "pair_dictionary.json")
|
||||
self.global_metadata_path = Path(self.full_path / "global_metadata.json")
|
||||
self.metric_tracker_path = Path(self.full_path / "metric_tracker.json")
|
||||
@@ -96,9 +96,12 @@ class FreqaiDataDrawer:
|
||||
self.metric_tracker_lock = threading.Lock()
|
||||
self.old_DBSCAN_eps: Dict[str, float] = {}
|
||||
self.empty_pair_dict: pair_info = {
|
||||
"model_filename": "", "trained_timestamp": 0,
|
||||
"data_path": "", "extras": {}}
|
||||
self.model_type = self.freqai_info.get('model_save_type', 'joblib')
|
||||
"model_filename": "",
|
||||
"trained_timestamp": 0,
|
||||
"data_path": "",
|
||||
"extras": {},
|
||||
}
|
||||
self.model_type = self.freqai_info.get("model_save_type", "joblib")
|
||||
|
||||
def update_metric_tracker(self, metric: str, value: float, pair: str) -> None:
|
||||
"""
|
||||
@@ -109,11 +112,11 @@ class FreqaiDataDrawer:
|
||||
if pair not in self.metric_tracker:
|
||||
self.metric_tracker[pair] = {}
|
||||
if metric not in self.metric_tracker[pair]:
|
||||
self.metric_tracker[pair][metric] = {'timestamp': [], 'value': []}
|
||||
self.metric_tracker[pair][metric] = {"timestamp": [], "value": []}
|
||||
|
||||
timestamp = int(datetime.now(timezone.utc).timestamp())
|
||||
self.metric_tracker[pair][metric]['value'].append(value)
|
||||
self.metric_tracker[pair][metric]['timestamp'].append(timestamp)
|
||||
self.metric_tracker[pair][metric]["value"].append(value)
|
||||
self.metric_tracker[pair][metric]["timestamp"].append(timestamp)
|
||||
|
||||
def collect_metrics(self, time_spent: float, pair: str):
|
||||
"""
|
||||
@@ -121,10 +124,10 @@ class FreqaiDataDrawer:
|
||||
"""
|
||||
load1, load5, load15 = psutil.getloadavg()
|
||||
cpus = psutil.cpu_count()
|
||||
self.update_metric_tracker('train_time', time_spent, pair)
|
||||
self.update_metric_tracker('cpu_load1min', load1 / cpus, pair)
|
||||
self.update_metric_tracker('cpu_load5min', load5 / cpus, pair)
|
||||
self.update_metric_tracker('cpu_load15min', load15 / cpus, pair)
|
||||
self.update_metric_tracker("train_time", time_spent, pair)
|
||||
self.update_metric_tracker("cpu_load1min", load1 / cpus, pair)
|
||||
self.update_metric_tracker("cpu_load5min", load5 / cpus, pair)
|
||||
self.update_metric_tracker("cpu_load15min", load15 / cpus, pair)
|
||||
|
||||
def load_global_metadata_from_disk(self):
|
||||
"""
|
||||
@@ -155,7 +158,7 @@ class FreqaiDataDrawer:
|
||||
Tries to load an existing metrics dictionary if the user
|
||||
wants to collect metrics.
|
||||
"""
|
||||
if self.freqai_info.get('write_metrics_to_disk', False):
|
||||
if self.freqai_info.get("write_metrics_to_disk", False):
|
||||
exists = self.metric_tracker_path.is_file()
|
||||
if exists:
|
||||
with self.metric_tracker_path.open("r") as fp:
|
||||
@@ -181,10 +184,11 @@ class FreqaiDataDrawer:
|
||||
)
|
||||
except EOFError:
|
||||
logger.warning(
|
||||
'Historical prediction file was corrupted. Trying to load backup file.')
|
||||
"Historical prediction file was corrupted. Trying to load backup file."
|
||||
)
|
||||
with self.historic_predictions_bkp_path.open("rb") as fp:
|
||||
self.historic_predictions = cloudpickle.load(fp)
|
||||
logger.warning('FreqAI successfully loaded the backup historical predictions file.')
|
||||
logger.warning("FreqAI successfully loaded the backup historical predictions file.")
|
||||
|
||||
else:
|
||||
logger.info("Could not find existing historic_predictions, starting from scratch")
|
||||
@@ -206,27 +210,33 @@ class FreqaiDataDrawer:
|
||||
Save metric tracker of all pair metrics collected.
|
||||
"""
|
||||
with self.save_lock:
|
||||
with self.metric_tracker_path.open('w') as fp:
|
||||
rapidjson.dump(self.metric_tracker, fp, default=self.np_encoder,
|
||||
number_mode=rapidjson.NM_NATIVE)
|
||||
with self.metric_tracker_path.open("w") as fp:
|
||||
rapidjson.dump(
|
||||
self.metric_tracker,
|
||||
fp,
|
||||
default=self.np_encoder,
|
||||
number_mode=rapidjson.NM_NATIVE,
|
||||
)
|
||||
|
||||
def save_drawer_to_disk(self) -> None:
|
||||
"""
|
||||
Save data drawer full of all pair model metadata in present model folder.
|
||||
"""
|
||||
with self.save_lock:
|
||||
with self.pair_dictionary_path.open('w') as fp:
|
||||
rapidjson.dump(self.pair_dict, fp, default=self.np_encoder,
|
||||
number_mode=rapidjson.NM_NATIVE)
|
||||
with self.pair_dictionary_path.open("w") as fp:
|
||||
rapidjson.dump(
|
||||
self.pair_dict, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE
|
||||
)
|
||||
|
||||
def save_global_metadata_to_disk(self, metadata: Dict[str, Any]):
|
||||
"""
|
||||
Save global metadata json to disk
|
||||
"""
|
||||
with self.save_lock:
|
||||
with self.global_metadata_path.open('w') as fp:
|
||||
rapidjson.dump(metadata, fp, default=self.np_encoder,
|
||||
number_mode=rapidjson.NM_NATIVE)
|
||||
with self.global_metadata_path.open("w") as fp:
|
||||
rapidjson.dump(
|
||||
metadata, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE
|
||||
)
|
||||
|
||||
def np_encoder(self, object):
|
||||
if isinstance(object, np.generic):
|
||||
@@ -264,9 +274,7 @@ class FreqaiDataDrawer:
|
||||
return
|
||||
|
||||
def set_initial_return_values(
|
||||
self, pair: str,
|
||||
pred_df: DataFrame,
|
||||
dataframe: DataFrame
|
||||
self, pair: str, pred_df: DataFrame, dataframe: DataFrame
|
||||
) -> None:
|
||||
"""
|
||||
Set the initial return values to the historical predictions dataframe. This avoids needing
|
||||
@@ -285,7 +293,7 @@ class FreqaiDataDrawer:
|
||||
|
||||
new_pred["date_pred"] = dataframe["date"]
|
||||
# set everything to nan except date_pred
|
||||
columns_to_nan = new_pred.columns.difference(['date_pred', 'date'])
|
||||
columns_to_nan = new_pred.columns.difference(["date_pred", "date"])
|
||||
new_pred[columns_to_nan] = None
|
||||
|
||||
hist_preds = self.historic_predictions[pair].copy()
|
||||
@@ -296,14 +304,15 @@ class FreqaiDataDrawer:
|
||||
|
||||
# find the closest common date between new_pred and historic predictions
|
||||
# and cut off the new_pred dataframe at that date
|
||||
common_dates = pd.merge(new_pred, hist_preds,
|
||||
on="date_pred", how="inner")
|
||||
common_dates = pd.merge(new_pred, hist_preds, on="date_pred", how="inner")
|
||||
if len(common_dates.index) > 0:
|
||||
new_pred = new_pred.iloc[len(common_dates):]
|
||||
new_pred = new_pred.iloc[len(common_dates) :]
|
||||
else:
|
||||
logger.warning("No common dates found between new predictions and historic "
|
||||
"predictions. You likely left your FreqAI instance offline "
|
||||
f"for more than {len(dataframe.index)} candles.")
|
||||
logger.warning(
|
||||
"No common dates found between new predictions and historic "
|
||||
"predictions. You likely left your FreqAI instance offline "
|
||||
f"for more than {len(dataframe.index)} candles."
|
||||
)
|
||||
|
||||
# Pandas warns that its keeping dtypes of non NaN columns...
|
||||
# yea we know and we already want that behavior. Ignoring.
|
||||
@@ -311,21 +320,22 @@ class FreqaiDataDrawer:
|
||||
warnings.filterwarnings("ignore", category=FutureWarning)
|
||||
# reindex new_pred columns to match the historic predictions dataframe
|
||||
new_pred_reindexed = new_pred.reindex(columns=hist_preds.columns)
|
||||
df_concat = pd.concat(
|
||||
[hist_preds, new_pred_reindexed],
|
||||
ignore_index=True
|
||||
)
|
||||
df_concat = pd.concat([hist_preds, new_pred_reindexed], ignore_index=True)
|
||||
|
||||
# any missing values will get zeroed out so users can see the exact
|
||||
# downtime in FreqUI
|
||||
df_concat = df_concat.fillna(0)
|
||||
self.historic_predictions[pair] = df_concat
|
||||
self.model_return_values[pair] = df_concat.tail(
|
||||
len(dataframe.index)).reset_index(drop=True)
|
||||
self.model_return_values[pair] = df_concat.tail(len(dataframe.index)).reset_index(drop=True)
|
||||
|
||||
def append_model_predictions(self, pair: str, predictions: DataFrame,
|
||||
do_preds: NDArray[np.int_],
|
||||
dk: FreqaiDataKitchen, strat_df: DataFrame) -> None:
|
||||
def append_model_predictions(
|
||||
self,
|
||||
pair: str,
|
||||
predictions: DataFrame,
|
||||
do_preds: NDArray[np.int_],
|
||||
dk: FreqaiDataKitchen,
|
||||
strat_df: DataFrame,
|
||||
) -> None:
|
||||
"""
|
||||
Append model predictions to historic predictions dataframe, then set the
|
||||
strategy return dataframe to the tail of the historic predictions. The length of
|
||||
@@ -338,15 +348,9 @@ class FreqaiDataDrawer:
|
||||
index = self.historic_predictions[pair].index[-1:]
|
||||
columns = self.historic_predictions[pair].columns
|
||||
|
||||
zeros_df = pd.DataFrame(
|
||||
np.zeros((1, len(columns))),
|
||||
index=index,
|
||||
columns=columns
|
||||
)
|
||||
zeros_df = pd.DataFrame(np.zeros((1, len(columns))), index=index, columns=columns)
|
||||
self.historic_predictions[pair] = pd.concat(
|
||||
[self.historic_predictions[pair], zeros_df],
|
||||
ignore_index=True,
|
||||
axis=0
|
||||
[self.historic_predictions[pair], zeros_df], ignore_index=True, axis=0
|
||||
)
|
||||
df = self.historic_predictions[pair]
|
||||
|
||||
@@ -370,8 +374,8 @@ class FreqaiDataDrawer:
|
||||
df.iloc[-1, DI_values_loc] = dk.DI_values[-1]
|
||||
|
||||
# extra values the user added within custom prediction model
|
||||
if dk.data['extra_returns_per_train']:
|
||||
rets = dk.data['extra_returns_per_train']
|
||||
if dk.data["extra_returns_per_train"]:
|
||||
rets = dk.data["extra_returns_per_train"]
|
||||
for return_str in rets:
|
||||
return_loc = df.columns.get_loc(return_str)
|
||||
df.iloc[-1, return_loc] = rets[return_str]
|
||||
@@ -392,7 +396,8 @@ class FreqaiDataDrawer:
|
||||
self.model_return_values[pair] = df.tail(len_df).reset_index(drop=True)
|
||||
|
||||
def attach_return_values_to_return_dataframe(
|
||||
self, pair: str, dataframe: DataFrame) -> DataFrame:
|
||||
self, pair: str, dataframe: DataFrame
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Attach the return values to the strat dataframe
|
||||
:param dataframe: DataFrame = strategy dataframe
|
||||
@@ -423,15 +428,14 @@ class FreqaiDataDrawer:
|
||||
if self.freqai_info["feature_parameters"].get("DI_threshold", 0) > 0:
|
||||
dataframe["DI_values"] = 0
|
||||
|
||||
if dk.data['extra_returns_per_train']:
|
||||
rets = dk.data['extra_returns_per_train']
|
||||
if dk.data["extra_returns_per_train"]:
|
||||
rets = dk.data["extra_returns_per_train"]
|
||||
for return_str in rets:
|
||||
dataframe[return_str] = 0
|
||||
|
||||
dk.return_dataframe = dataframe
|
||||
|
||||
def purge_old_models(self) -> None:
|
||||
|
||||
num_keep = self.freqai_info["purge_old_models"]
|
||||
if not num_keep:
|
||||
return
|
||||
@@ -508,10 +512,10 @@ class FreqaiDataDrawer:
|
||||
save_path = Path(dk.data_path)
|
||||
|
||||
# Save the trained model
|
||||
if self.model_type == 'joblib':
|
||||
if self.model_type == "joblib":
|
||||
with (save_path / f"{dk.model_filename}_model.joblib").open("wb") as fp:
|
||||
cloudpickle.dump(model, fp)
|
||||
elif self.model_type == 'keras':
|
||||
elif self.model_type == "keras":
|
||||
model.save(save_path / f"{dk.model_filename}_model.h5")
|
||||
elif self.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
|
||||
model.save(save_path / f"{dk.model_filename}_model.zip")
|
||||
@@ -596,23 +600,25 @@ class FreqaiDataDrawer:
|
||||
# try to access model in memory instead of loading object from disk to save time
|
||||
if dk.live and coin in self.model_dictionary:
|
||||
model = self.model_dictionary[coin]
|
||||
elif self.model_type == 'joblib':
|
||||
elif self.model_type == "joblib":
|
||||
with (dk.data_path / f"{dk.model_filename}_model.joblib").open("rb") as fp:
|
||||
model = cloudpickle.load(fp)
|
||||
elif 'stable_baselines' in self.model_type or 'sb3_contrib' == self.model_type:
|
||||
elif "stable_baselines" in self.model_type or "sb3_contrib" == self.model_type:
|
||||
mod = importlib.import_module(
|
||||
self.model_type, self.freqai_info['rl_config']['model_type'])
|
||||
MODELCLASS = getattr(mod, self.freqai_info['rl_config']['model_type'])
|
||||
self.model_type, self.freqai_info["rl_config"]["model_type"]
|
||||
)
|
||||
MODELCLASS = getattr(mod, self.freqai_info["rl_config"]["model_type"])
|
||||
model = MODELCLASS.load(dk.data_path / f"{dk.model_filename}_model")
|
||||
elif self.model_type == 'pytorch':
|
||||
elif self.model_type == "pytorch":
|
||||
import torch
|
||||
|
||||
zip = torch.load(dk.data_path / f"{dk.model_filename}_model.zip")
|
||||
model = zip["pytrainer"]
|
||||
model = model.load_from_checkpoint(zip)
|
||||
|
||||
if not model:
|
||||
raise OperationalException(
|
||||
f"Unable to load model, ensure model exists at " f"{dk.data_path} "
|
||||
f"Unable to load model, ensure model exists at {dk.data_path} "
|
||||
)
|
||||
|
||||
# load it into ram if it was loaded from disk
|
||||
@@ -639,23 +645,18 @@ class FreqaiDataDrawer:
|
||||
df_dp = strategy.dp.get_pair_dataframe(pair, tf)
|
||||
if len(df_dp.index) == 0:
|
||||
continue
|
||||
if str(hist_df.iloc[-1]["date"]) == str(
|
||||
df_dp.iloc[-1:]["date"].iloc[-1]
|
||||
):
|
||||
if str(hist_df.iloc[-1]["date"]) == str(df_dp.iloc[-1:]["date"].iloc[-1]):
|
||||
continue
|
||||
|
||||
try:
|
||||
index = (
|
||||
df_dp.loc[
|
||||
df_dp["date"] == hist_df.iloc[-1]["date"]
|
||||
].index[0]
|
||||
+ 1
|
||||
)
|
||||
index = df_dp.loc[df_dp["date"] == hist_df.iloc[-1]["date"]].index[0] + 1
|
||||
except IndexError:
|
||||
if hist_df.iloc[-1]['date'] < df_dp['date'].iloc[0]:
|
||||
raise OperationalException("In memory historical data is older than "
|
||||
f"oldest DataProvider candle for {pair} on "
|
||||
f"timeframe {tf}")
|
||||
if hist_df.iloc[-1]["date"] < df_dp["date"].iloc[0]:
|
||||
raise OperationalException(
|
||||
"In memory historical data is older than "
|
||||
f"oldest DataProvider candle for {pair} on "
|
||||
f"timeframe {tf}"
|
||||
)
|
||||
else:
|
||||
index = -1
|
||||
logger.warning(
|
||||
@@ -677,7 +678,7 @@ class FreqaiDataDrawer:
|
||||
axis=0,
|
||||
)
|
||||
|
||||
self.current_candle = history_data[dk.pair][self.config['timeframe']].iloc[-1]['date']
|
||||
self.current_candle = history_data[dk.pair][self.config["timeframe"]].iloc[-1]["date"]
|
||||
|
||||
def load_all_pair_histories(self, timerange: TimeRange, dk: FreqaiDataKitchen) -> None:
|
||||
"""
|
||||
@@ -715,13 +716,12 @@ class FreqaiDataDrawer:
|
||||
corr_dataframes: Dict[Any, Any] = {}
|
||||
base_dataframes: Dict[Any, Any] = {}
|
||||
historic_data = self.historic_data
|
||||
pairs = self.freqai_info["feature_parameters"].get(
|
||||
"include_corr_pairlist", []
|
||||
)
|
||||
pairs = self.freqai_info["feature_parameters"].get("include_corr_pairlist", [])
|
||||
|
||||
for tf in self.freqai_info["feature_parameters"].get("include_timeframes"):
|
||||
base_dataframes[tf] = dk.slice_dataframe(
|
||||
timerange, historic_data[pair][tf]).reset_index(drop=True)
|
||||
timerange, historic_data[pair][tf]
|
||||
).reset_index(drop=True)
|
||||
if pairs:
|
||||
for p in pairs:
|
||||
if pair in p:
|
||||
@@ -741,8 +741,8 @@ class FreqaiDataDrawer:
|
||||
"""
|
||||
if not self.historic_predictions_path.is_file():
|
||||
raise OperationalException(
|
||||
'Historic predictions not found. Historic predictions data is required '
|
||||
'to run backtest with the freqai-backtest-live-models option '
|
||||
"Historic predictions not found. Historic predictions data is required "
|
||||
"to run backtest with the freqai-backtest-live-models option "
|
||||
)
|
||||
|
||||
self.load_historic_predictions_from_disk()
|
||||
@@ -758,6 +758,6 @@ class FreqaiDataDrawer:
|
||||
# add 1 day to string timerange to ensure BT module will load all dataframe data
|
||||
end_date = end_date + timedelta(days=1)
|
||||
backtesting_timerange = TimeRange(
|
||||
'date', 'date', int(start_date.timestamp()), int(end_date.timestamp())
|
||||
"date", "date", int(start_date.timestamp()), int(end_date.timestamp())
|
||||
)
|
||||
return backtesting_timerange
|
||||
|
||||
@@ -24,7 +24,7 @@ from freqtrade.strategy import merge_informative_pair
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
|
||||
|
||||
pd.set_option('future.no_silent_downcasting', True)
|
||||
pd.set_option("future.no_silent_downcasting", True)
|
||||
|
||||
SECONDS_IN_DAY = 86400
|
||||
SECONDS_IN_HOUR = 3600
|
||||
@@ -98,7 +98,7 @@ class FreqaiDataKitchen:
|
||||
config["freqai"]["backtest_period_days"],
|
||||
)
|
||||
|
||||
self.data['extra_returns_per_train'] = self.freqai_config.get('extra_returns_per_train', {})
|
||||
self.data["extra_returns_per_train"] = self.freqai_config.get("extra_returns_per_train", {})
|
||||
if not self.freqai_config.get("data_kitchen_thread_count", 0):
|
||||
self.thread_count = max(int(psutil.cpu_count() * 2 - 2), 1)
|
||||
else:
|
||||
@@ -120,8 +120,7 @@ class FreqaiDataKitchen:
|
||||
"""
|
||||
self.full_path = self.get_full_models_path(self.config)
|
||||
self.data_path = Path(
|
||||
self.full_path
|
||||
/ f"sub-train-{pair.split('/')[0]}_{trained_timestamp}"
|
||||
self.full_path / f"sub-train-{pair.split('/')[0]}_{trained_timestamp}"
|
||||
)
|
||||
|
||||
return
|
||||
@@ -138,8 +137,8 @@ class FreqaiDataKitchen:
|
||||
"""
|
||||
feat_dict = self.freqai_config["feature_parameters"]
|
||||
|
||||
if 'shuffle' not in self.freqai_config['data_split_parameters']:
|
||||
self.freqai_config["data_split_parameters"].update({'shuffle': False})
|
||||
if "shuffle" not in self.freqai_config["data_split_parameters"]:
|
||||
self.freqai_config["data_split_parameters"].update({"shuffle": False})
|
||||
|
||||
weights: npt.ArrayLike
|
||||
if feat_dict.get("weight_factor", 0) > 0:
|
||||
@@ -147,7 +146,7 @@ class FreqaiDataKitchen:
|
||||
else:
|
||||
weights = np.ones(len(filtered_dataframe))
|
||||
|
||||
if self.freqai_config.get('data_split_parameters', {}).get('test_size', 0.1) != 0:
|
||||
if self.freqai_config.get("data_split_parameters", {}).get("test_size", 0.1) != 0:
|
||||
(
|
||||
train_features,
|
||||
test_features,
|
||||
@@ -172,26 +171,43 @@ class FreqaiDataKitchen:
|
||||
if feat_dict["shuffle_after_split"]:
|
||||
rint1 = random.randint(0, 100)
|
||||
rint2 = random.randint(0, 100)
|
||||
train_features = train_features.sample(
|
||||
frac=1, random_state=rint1).reset_index(drop=True)
|
||||
train_features = train_features.sample(frac=1, random_state=rint1).reset_index(
|
||||
drop=True
|
||||
)
|
||||
train_labels = train_labels.sample(frac=1, random_state=rint1).reset_index(drop=True)
|
||||
train_weights = pd.DataFrame(train_weights).sample(
|
||||
frac=1, random_state=rint1).reset_index(drop=True).to_numpy()[:, 0]
|
||||
train_weights = (
|
||||
pd.DataFrame(train_weights)
|
||||
.sample(frac=1, random_state=rint1)
|
||||
.reset_index(drop=True)
|
||||
.to_numpy()[:, 0]
|
||||
)
|
||||
test_features = test_features.sample(frac=1, random_state=rint2).reset_index(drop=True)
|
||||
test_labels = test_labels.sample(frac=1, random_state=rint2).reset_index(drop=True)
|
||||
test_weights = pd.DataFrame(test_weights).sample(
|
||||
frac=1, random_state=rint2).reset_index(drop=True).to_numpy()[:, 0]
|
||||
test_weights = (
|
||||
pd.DataFrame(test_weights)
|
||||
.sample(frac=1, random_state=rint2)
|
||||
.reset_index(drop=True)
|
||||
.to_numpy()[:, 0]
|
||||
)
|
||||
|
||||
# Simplest way to reverse the order of training and test data:
|
||||
if self.freqai_config['feature_parameters'].get('reverse_train_test_order', False):
|
||||
if self.freqai_config["feature_parameters"].get("reverse_train_test_order", False):
|
||||
return self.build_data_dictionary(
|
||||
test_features, train_features, test_labels,
|
||||
train_labels, test_weights, train_weights
|
||||
)
|
||||
test_features,
|
||||
train_features,
|
||||
test_labels,
|
||||
train_labels,
|
||||
test_weights,
|
||||
train_weights,
|
||||
)
|
||||
else:
|
||||
return self.build_data_dictionary(
|
||||
train_features, test_features, train_labels,
|
||||
test_labels, train_weights, test_weights
|
||||
train_features,
|
||||
test_features,
|
||||
train_labels,
|
||||
test_labels,
|
||||
train_weights,
|
||||
test_weights,
|
||||
)
|
||||
|
||||
def filter_features(
|
||||
@@ -224,26 +240,23 @@ class FreqaiDataKitchen:
|
||||
|
||||
drop_index = pd.isnull(filtered_df).any(axis=1) # get the rows that have NaNs,
|
||||
drop_index = drop_index.replace(True, 1).replace(False, 0).infer_objects(copy=False)
|
||||
if (training_filter):
|
||||
|
||||
if training_filter:
|
||||
# we don't care about total row number (total no. datapoints) in training, we only care
|
||||
# about removing any row with NaNs
|
||||
# if labels has multiple columns (user wants to train multiple modelEs), we detect here
|
||||
labels = unfiltered_df.filter(label_list, axis=1)
|
||||
drop_index_labels = pd.isnull(labels).any(axis=1)
|
||||
drop_index_labels = drop_index_labels.replace(
|
||||
True, 1
|
||||
).replace(False, 0).infer_objects(copy=False)
|
||||
dates = unfiltered_df['date']
|
||||
drop_index_labels = (
|
||||
drop_index_labels.replace(True, 1).replace(False, 0).infer_objects(copy=False)
|
||||
)
|
||||
dates = unfiltered_df["date"]
|
||||
filtered_df = filtered_df[
|
||||
(drop_index == 0) & (drop_index_labels == 0)
|
||||
] # dropping values
|
||||
labels = labels[
|
||||
(drop_index == 0) & (drop_index_labels == 0)
|
||||
] # assuming the labels depend entirely on the dataframe here.
|
||||
self.train_dates = dates[
|
||||
(drop_index == 0) & (drop_index_labels == 0)
|
||||
]
|
||||
self.train_dates = dates[(drop_index == 0) & (drop_index_labels == 0)]
|
||||
logger.info(
|
||||
f"{self.pair}: dropped {len(unfiltered_df) - len(filtered_df)} training points"
|
||||
f" due to NaNs in populated dataset {len(unfiltered_df)}."
|
||||
@@ -266,7 +279,6 @@ class FreqaiDataKitchen:
|
||||
self.data["filter_drop_index_training"] = drop_index
|
||||
|
||||
else:
|
||||
|
||||
# we are backtesting so we need to preserve row number to send back to strategy,
|
||||
# so now we use do_predict to avoid any prediction based on a NaN
|
||||
drop_index = pd.isnull(filtered_df).any(axis=1)
|
||||
@@ -295,7 +307,6 @@ class FreqaiDataKitchen:
|
||||
train_weights: Any,
|
||||
test_weights: Any,
|
||||
) -> Dict:
|
||||
|
||||
self.data_dictionary = {
|
||||
"train_features": train_df,
|
||||
"test_features": test_df,
|
||||
@@ -303,7 +314,7 @@ class FreqaiDataKitchen:
|
||||
"test_labels": test_labels,
|
||||
"train_weights": train_weights,
|
||||
"test_weights": test_weights,
|
||||
"train_dates": self.train_dates
|
||||
"train_dates": self.train_dates,
|
||||
}
|
||||
|
||||
return self.data_dictionary
|
||||
@@ -330,9 +341,7 @@ class FreqaiDataKitchen:
|
||||
full_timerange = TimeRange.parse_timerange(tr)
|
||||
config_timerange = TimeRange.parse_timerange(self.config["timerange"])
|
||||
if config_timerange.stopts == 0:
|
||||
config_timerange.stopts = int(
|
||||
datetime.now(tz=timezone.utc).timestamp()
|
||||
)
|
||||
config_timerange.stopts = int(datetime.now(tz=timezone.utc).timestamp())
|
||||
timerange_train = copy.deepcopy(full_timerange)
|
||||
timerange_backtest = copy.deepcopy(full_timerange)
|
||||
|
||||
@@ -412,9 +421,9 @@ class FreqaiDataKitchen:
|
||||
weights = np.exp(-np.arange(num_weights) / (wfactor * num_weights))[::-1]
|
||||
return weights
|
||||
|
||||
def get_predictions_to_append(self, predictions: DataFrame,
|
||||
do_predict: npt.ArrayLike,
|
||||
dataframe_backtest: DataFrame) -> DataFrame:
|
||||
def get_predictions_to_append(
|
||||
self, predictions: DataFrame, do_predict: npt.ArrayLike, dataframe_backtest: DataFrame
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Get backtest prediction from current backtest period
|
||||
"""
|
||||
@@ -459,18 +468,18 @@ class FreqaiDataKitchen:
|
||||
Back fill values to before the backtesting range so that the dataframe matches size
|
||||
when it goes back to the strategy. These rows are not included in the backtest.
|
||||
"""
|
||||
to_keep = [col for col in dataframe.columns if
|
||||
not col.startswith("&") and not col.startswith("%%")]
|
||||
self.return_dataframe = pd.merge(dataframe[to_keep],
|
||||
self.full_df, how='left', on='date')
|
||||
self.return_dataframe[self.full_df.columns] = (
|
||||
self.return_dataframe[self.full_df.columns].fillna(value=0))
|
||||
to_keep = [
|
||||
col for col in dataframe.columns if not col.startswith("&") and not col.startswith("%%")
|
||||
]
|
||||
self.return_dataframe = pd.merge(dataframe[to_keep], self.full_df, how="left", on="date")
|
||||
self.return_dataframe[self.full_df.columns] = self.return_dataframe[
|
||||
self.full_df.columns
|
||||
].fillna(value=0)
|
||||
self.full_df = DataFrame()
|
||||
|
||||
return
|
||||
|
||||
def create_fulltimerange(self, backtest_tr: str, backtest_period_days: int) -> str:
|
||||
|
||||
if not isinstance(backtest_period_days, int):
|
||||
raise OperationalException("backtest_period_days must be an integer")
|
||||
|
||||
@@ -484,9 +493,11 @@ class FreqaiDataKitchen:
|
||||
# it does not. accommodating these kinds of edge cases just to allow open-ended
|
||||
# timerange is not high enough priority to warrant the effort. It is safer for now
|
||||
# to simply ask user to add their end date
|
||||
raise OperationalException("FreqAI backtesting does not allow open ended timeranges. "
|
||||
"Please indicate the end date of your desired backtesting. "
|
||||
"timerange.")
|
||||
raise OperationalException(
|
||||
"FreqAI backtesting does not allow open ended timeranges. "
|
||||
"Please indicate the end date of your desired backtesting. "
|
||||
"timerange."
|
||||
)
|
||||
# backtest_timerange.stopts = int(
|
||||
# datetime.now(tz=timezone.utc).timestamp()
|
||||
# )
|
||||
@@ -525,7 +536,6 @@ class FreqaiDataKitchen:
|
||||
def check_if_new_training_required(
|
||||
self, trained_timestamp: int
|
||||
) -> Tuple[bool, TimeRange, TimeRange]:
|
||||
|
||||
time = datetime.now(tz=timezone.utc).timestamp()
|
||||
trained_timerange = TimeRange()
|
||||
data_load_timerange = TimeRange()
|
||||
@@ -541,7 +551,7 @@ class FreqaiDataKitchen:
|
||||
# We notice that users like to use exotic indicators where
|
||||
# they do not know the required timeperiod. Here we include a factor
|
||||
# of safety by multiplying the user considered "max" by 2.
|
||||
max_period = self.config.get('startup_candle_count', 20) * 2
|
||||
max_period = self.config.get("startup_candle_count", 20) * 2
|
||||
additional_seconds = max_period * max_tf_seconds
|
||||
|
||||
if trained_timestamp != 0:
|
||||
@@ -578,17 +588,12 @@ class FreqaiDataKitchen:
|
||||
return retrain, trained_timerange, data_load_timerange
|
||||
|
||||
def set_new_model_names(self, pair: str, timestamp_id: int):
|
||||
|
||||
coin, _ = pair.split("/")
|
||||
self.data_path = Path(
|
||||
self.full_path
|
||||
/ f"sub-train-{pair.split('/')[0]}_{timestamp_id}"
|
||||
)
|
||||
self.data_path = Path(self.full_path / f"sub-train-{pair.split('/')[0]}_{timestamp_id}")
|
||||
|
||||
self.model_filename = f"cb_{coin.lower()}_{timestamp_id}"
|
||||
|
||||
def set_all_pairs(self) -> None:
|
||||
|
||||
self.all_pairs = copy.deepcopy(
|
||||
self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||
)
|
||||
@@ -597,8 +602,7 @@ class FreqaiDataKitchen:
|
||||
self.all_pairs.append(pair)
|
||||
|
||||
def extract_corr_pair_columns_from_populated_indicators(
|
||||
self,
|
||||
dataframe: DataFrame
|
||||
self, dataframe: DataFrame
|
||||
) -> Dict[str, DataFrame]:
|
||||
"""
|
||||
Find the columns of the dataframe corresponding to the corr_pairlist, save them
|
||||
@@ -612,19 +616,20 @@ class FreqaiDataKitchen:
|
||||
pairs = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||
|
||||
for pair in pairs:
|
||||
pair = pair.replace(':', '') # lightgbm does not like colons
|
||||
pair_cols = [col for col in dataframe.columns if col.startswith("%")
|
||||
and f"{pair}_" in col]
|
||||
pair = pair.replace(":", "") # lightgbm does not like colons
|
||||
pair_cols = [
|
||||
col for col in dataframe.columns if col.startswith("%") and f"{pair}_" in col
|
||||
]
|
||||
|
||||
if pair_cols:
|
||||
pair_cols.insert(0, 'date')
|
||||
pair_cols.insert(0, "date")
|
||||
corr_dataframes[pair] = dataframe.filter(pair_cols, axis=1)
|
||||
|
||||
return corr_dataframes
|
||||
|
||||
def attach_corr_pair_columns(self, dataframe: DataFrame,
|
||||
corr_dataframes: Dict[str, DataFrame],
|
||||
current_pair: str) -> DataFrame:
|
||||
def attach_corr_pair_columns(
|
||||
self, dataframe: DataFrame, corr_dataframes: Dict[str, DataFrame], current_pair: str
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Attach the existing corr_pair dataframes to the current pair dataframe before training
|
||||
|
||||
@@ -636,21 +641,23 @@ class FreqaiDataKitchen:
|
||||
ready for training
|
||||
"""
|
||||
pairs = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||
current_pair = current_pair.replace(':', '')
|
||||
current_pair = current_pair.replace(":", "")
|
||||
for pair in pairs:
|
||||
pair = pair.replace(':', '') # lightgbm does not work with colons
|
||||
pair = pair.replace(":", "") # lightgbm does not work with colons
|
||||
if current_pair != pair:
|
||||
dataframe = dataframe.merge(corr_dataframes[pair], how='left', on='date')
|
||||
dataframe = dataframe.merge(corr_dataframes[pair], how="left", on="date")
|
||||
|
||||
return dataframe
|
||||
|
||||
def get_pair_data_for_features(self,
|
||||
pair: str,
|
||||
tf: str,
|
||||
strategy: IStrategy,
|
||||
corr_dataframes: dict = {},
|
||||
base_dataframes: dict = {},
|
||||
is_corr_pairs: bool = False) -> DataFrame:
|
||||
def get_pair_data_for_features(
|
||||
self,
|
||||
pair: str,
|
||||
tf: str,
|
||||
strategy: IStrategy,
|
||||
corr_dataframes: dict = {},
|
||||
base_dataframes: dict = {},
|
||||
is_corr_pairs: bool = False,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Get the data for the pair. If it's not in the dictionary, get it from the data provider
|
||||
:param pair: str = pair to get data for
|
||||
@@ -678,8 +685,9 @@ class FreqaiDataKitchen:
|
||||
dataframe = strategy.dp.get_pair_dataframe(pair=pair, timeframe=tf)
|
||||
return dataframe
|
||||
|
||||
def merge_features(self, df_main: DataFrame, df_to_merge: DataFrame,
|
||||
tf: str, timeframe_inf: str, suffix: str) -> DataFrame:
|
||||
def merge_features(
|
||||
self, df_main: DataFrame, df_to_merge: DataFrame, tf: str, timeframe_inf: str, suffix: str
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Merge the features of the dataframe and remove HLCV and date added columns
|
||||
:param df_main: DataFrame = main dataframe
|
||||
@@ -689,17 +697,30 @@ class FreqaiDataKitchen:
|
||||
:param suffix: str = suffix to add to the columns of the dataframe to merge
|
||||
:return: dataframe = merged dataframe
|
||||
"""
|
||||
dataframe = merge_informative_pair(df_main, df_to_merge, tf, timeframe_inf=timeframe_inf,
|
||||
append_timeframe=False, suffix=suffix, ffill=True)
|
||||
dataframe = merge_informative_pair(
|
||||
df_main,
|
||||
df_to_merge,
|
||||
tf,
|
||||
timeframe_inf=timeframe_inf,
|
||||
append_timeframe=False,
|
||||
suffix=suffix,
|
||||
ffill=True,
|
||||
)
|
||||
skip_columns = [
|
||||
(f"{s}_{suffix}") for s in ["date", "open", "high", "low", "close", "volume"]
|
||||
]
|
||||
dataframe = dataframe.drop(columns=skip_columns)
|
||||
return dataframe
|
||||
|
||||
def populate_features(self, dataframe: DataFrame, pair: str, strategy: IStrategy,
|
||||
corr_dataframes: dict, base_dataframes: dict,
|
||||
is_corr_pairs: bool = False) -> DataFrame:
|
||||
def populate_features(
|
||||
self,
|
||||
dataframe: DataFrame,
|
||||
pair: str,
|
||||
strategy: IStrategy,
|
||||
corr_dataframes: dict,
|
||||
base_dataframes: dict,
|
||||
is_corr_pairs: bool = False,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Use the user defined strategy functions for populating features
|
||||
:param dataframe: DataFrame = dataframe to populate
|
||||
@@ -715,19 +736,22 @@ class FreqaiDataKitchen:
|
||||
for tf in tfs:
|
||||
metadata = {"pair": pair, "tf": tf}
|
||||
informative_df = self.get_pair_data_for_features(
|
||||
pair, tf, strategy, corr_dataframes, base_dataframes, is_corr_pairs)
|
||||
pair, tf, strategy, corr_dataframes, base_dataframes, is_corr_pairs
|
||||
)
|
||||
informative_copy = informative_df.copy()
|
||||
|
||||
logger.debug(f"Populating features for {pair} {tf}")
|
||||
|
||||
for t in self.freqai_config["feature_parameters"]["indicator_periods_candles"]:
|
||||
df_features = strategy.feature_engineering_expand_all(
|
||||
informative_copy.copy(), t, metadata=metadata)
|
||||
informative_copy.copy(), t, metadata=metadata
|
||||
)
|
||||
suffix = f"{t}"
|
||||
informative_df = self.merge_features(informative_df, df_features, tf, tf, suffix)
|
||||
|
||||
generic_df = strategy.feature_engineering_expand_basic(
|
||||
informative_copy.copy(), metadata=metadata)
|
||||
informative_copy.copy(), metadata=metadata
|
||||
)
|
||||
suffix = "gen"
|
||||
|
||||
informative_df = self.merge_features(informative_df, generic_df, tf, tf, suffix)
|
||||
@@ -740,8 +764,9 @@ class FreqaiDataKitchen:
|
||||
df_shift = df_shift.add_suffix("_shift-" + str(n))
|
||||
informative_df = pd.concat((informative_df, df_shift), axis=1)
|
||||
|
||||
dataframe = self.merge_features(dataframe.copy(), informative_df,
|
||||
self.config["timeframe"], tf, f'{pair}_{tf}')
|
||||
dataframe = self.merge_features(
|
||||
dataframe.copy(), informative_df, self.config["timeframe"], tf, f"{pair}_{tf}"
|
||||
)
|
||||
|
||||
return dataframe
|
||||
|
||||
@@ -771,7 +796,8 @@ class FreqaiDataKitchen:
|
||||
|
||||
# check if the user is using the deprecated populate_any_indicators function
|
||||
new_version = inspect.getsource(strategy.populate_any_indicators) == (
|
||||
inspect.getsource(IStrategy.populate_any_indicators))
|
||||
inspect.getsource(IStrategy.populate_any_indicators)
|
||||
)
|
||||
|
||||
if not new_version:
|
||||
raise OperationalException(
|
||||
@@ -782,11 +808,10 @@ class FreqaiDataKitchen:
|
||||
f"{DOCS_LINK}/strategy_migration/#freqai-strategy \n"
|
||||
"And the feature_engineering_* documentation: \n"
|
||||
f"{DOCS_LINK}/freqai-feature-engineering/"
|
||||
)
|
||||
)
|
||||
|
||||
tfs: List[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
|
||||
pairs: List[str] = self.freqai_config["feature_parameters"].get(
|
||||
"include_corr_pairlist", [])
|
||||
pairs: List[str] = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||
|
||||
for tf in tfs:
|
||||
if tf not in base_dataframes:
|
||||
@@ -804,9 +829,11 @@ class FreqaiDataKitchen:
|
||||
dataframe = base_dataframes[self.config["timeframe"]].copy()
|
||||
|
||||
corr_pairs: List[str] = self.freqai_config["feature_parameters"].get(
|
||||
"include_corr_pairlist", [])
|
||||
dataframe = self.populate_features(dataframe.copy(), pair, strategy,
|
||||
corr_dataframes, base_dataframes)
|
||||
"include_corr_pairlist", []
|
||||
)
|
||||
dataframe = self.populate_features(
|
||||
dataframe.copy(), pair, strategy, corr_dataframes, base_dataframes
|
||||
)
|
||||
metadata = {"pair": pair}
|
||||
dataframe = strategy.feature_engineering_standard(dataframe.copy(), metadata=metadata)
|
||||
# ensure corr pairs are always last
|
||||
@@ -814,8 +841,9 @@ class FreqaiDataKitchen:
|
||||
if pair == corr_pair:
|
||||
continue # dont repeat anything from whitelist
|
||||
if corr_pairs and do_corr_pairs:
|
||||
dataframe = self.populate_features(dataframe.copy(), corr_pair, strategy,
|
||||
corr_dataframes, base_dataframes, True)
|
||||
dataframe = self.populate_features(
|
||||
dataframe.copy(), corr_pair, strategy, corr_dataframes, base_dataframes, True
|
||||
)
|
||||
|
||||
if self.live:
|
||||
dataframe = strategy.set_freqai_targets(dataframe.copy(), metadata=metadata)
|
||||
@@ -823,7 +851,7 @@ class FreqaiDataKitchen:
|
||||
|
||||
self.get_unique_classes_from_labels(dataframe)
|
||||
|
||||
if self.config.get('reduce_df_footprint', False):
|
||||
if self.config.get("reduce_df_footprint", False):
|
||||
dataframe = reduce_dataframe_footprint(dataframe)
|
||||
|
||||
return dataframe
|
||||
@@ -858,7 +886,6 @@ class FreqaiDataKitchen:
|
||||
return dataframe[to_keep]
|
||||
|
||||
def get_unique_classes_from_labels(self, dataframe: DataFrame) -> None:
|
||||
|
||||
# self.find_features(dataframe)
|
||||
self.find_labels(dataframe)
|
||||
|
||||
@@ -870,9 +897,7 @@ class FreqaiDataKitchen:
|
||||
for label in self.unique_classes:
|
||||
self.unique_class_list += list(self.unique_classes[label])
|
||||
|
||||
def save_backtesting_prediction(
|
||||
self, append_df: DataFrame
|
||||
) -> None:
|
||||
def save_backtesting_prediction(self, append_df: DataFrame) -> None:
|
||||
"""
|
||||
Save prediction dataframe from backtesting to feather file format
|
||||
:param append_df: dataframe for backtesting period
|
||||
@@ -883,19 +908,14 @@ class FreqaiDataKitchen:
|
||||
|
||||
append_df.to_feather(self.backtesting_results_path)
|
||||
|
||||
def get_backtesting_prediction(
|
||||
self
|
||||
) -> DataFrame:
|
||||
def get_backtesting_prediction(self) -> DataFrame:
|
||||
"""
|
||||
Get prediction dataframe from feather file format
|
||||
"""
|
||||
append_df = pd.read_feather(self.backtesting_results_path)
|
||||
return append_df
|
||||
|
||||
def check_if_backtest_prediction_is_valid(
|
||||
self,
|
||||
len_backtest_df: int
|
||||
) -> bool:
|
||||
def check_if_backtest_prediction_is_valid(self, len_backtest_df: int) -> bool:
|
||||
"""
|
||||
Check if a backtesting prediction already exists and if the predictions
|
||||
to append have the same size as the backtesting dataframe slice
|
||||
@@ -903,27 +923,29 @@ class FreqaiDataKitchen:
|
||||
:return:
|
||||
:boolean: whether the prediction file is valid.
|
||||
"""
|
||||
path_to_predictionfile = Path(self.full_path /
|
||||
self.backtest_predictions_folder /
|
||||
f"{self.model_filename}_prediction.feather")
|
||||
path_to_predictionfile = Path(
|
||||
self.full_path
|
||||
/ self.backtest_predictions_folder
|
||||
/ f"{self.model_filename}_prediction.feather"
|
||||
)
|
||||
self.backtesting_results_path = path_to_predictionfile
|
||||
|
||||
file_exists = path_to_predictionfile.is_file()
|
||||
|
||||
if file_exists:
|
||||
append_df = self.get_backtesting_prediction()
|
||||
if len(append_df) == len_backtest_df and 'date' in append_df:
|
||||
if len(append_df) == len_backtest_df and "date" in append_df:
|
||||
logger.info(f"Found backtesting prediction file at {path_to_predictionfile}")
|
||||
return True
|
||||
else:
|
||||
logger.info("A new backtesting prediction file is required. "
|
||||
"(Number of predictions is different from dataframe length or "
|
||||
"old prediction file version).")
|
||||
logger.info(
|
||||
"A new backtesting prediction file is required. "
|
||||
"(Number of predictions is different from dataframe length or "
|
||||
"old prediction file version)."
|
||||
)
|
||||
return False
|
||||
else:
|
||||
logger.info(
|
||||
f"Could not find backtesting prediction file at {path_to_predictionfile}"
|
||||
)
|
||||
logger.info(f"Could not find backtesting prediction file at {path_to_predictionfile}")
|
||||
return False
|
||||
|
||||
def get_full_models_path(self, config: Config) -> Path:
|
||||
@@ -932,9 +954,7 @@ class FreqaiDataKitchen:
|
||||
:param config: Configuration dictionary
|
||||
"""
|
||||
freqai_config: Dict[str, Any] = config["freqai"]
|
||||
return Path(
|
||||
config["user_data_dir"] / "models" / str(freqai_config.get("identifier"))
|
||||
)
|
||||
return Path(config["user_data_dir"] / "models" / str(freqai_config.get("identifier")))
|
||||
|
||||
def remove_special_chars_from_feature_names(self, dataframe: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
@@ -943,7 +963,7 @@ class FreqaiDataKitchen:
|
||||
:return: dataframe with cleaned featrue names
|
||||
"""
|
||||
|
||||
spec_chars = [':']
|
||||
spec_chars = [":"]
|
||||
for c in spec_chars:
|
||||
dataframe.columns = dataframe.columns.str.replace(c, "")
|
||||
|
||||
@@ -976,12 +996,14 @@ class FreqaiDataKitchen:
|
||||
"""
|
||||
Deprecation warning, migration assistance
|
||||
"""
|
||||
logger.warning(f"Your custom IFreqaiModel relies on the deprecated"
|
||||
" data pipeline. Please update your model to use the new data pipeline."
|
||||
" This can be achieved by following the migration guide at "
|
||||
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline "
|
||||
"We added a basic pipeline for you, but this will be removed "
|
||||
"in a future version.")
|
||||
logger.warning(
|
||||
f"Your custom IFreqaiModel relies on the deprecated"
|
||||
" data pipeline. Please update your model to use the new data pipeline."
|
||||
" This can be achieved by following the migration guide at "
|
||||
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline "
|
||||
"We added a basic pipeline for you, but this will be removed "
|
||||
"in a future version."
|
||||
)
|
||||
|
||||
return data_dictionary
|
||||
|
||||
@@ -989,12 +1011,14 @@ class FreqaiDataKitchen:
|
||||
"""
|
||||
Deprecation warning, migration assistance
|
||||
"""
|
||||
logger.warning(f"Your custom IFreqaiModel relies on the deprecated"
|
||||
" data pipeline. Please update your model to use the new data pipeline."
|
||||
" This can be achieved by following the migration guide at "
|
||||
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline "
|
||||
"We added a basic pipeline for you, but this will be removed "
|
||||
"in a future version.")
|
||||
logger.warning(
|
||||
f"Your custom IFreqaiModel relies on the deprecated"
|
||||
" data pipeline. Please update your model to use the new data pipeline."
|
||||
" This can be achieved by following the migration guide at "
|
||||
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline "
|
||||
"We added a basic pipeline for you, but this will be removed "
|
||||
"in a future version."
|
||||
)
|
||||
|
||||
pred_df, _, _ = self.label_pipeline.inverse_transform(df)
|
||||
|
||||
|
||||
@@ -57,21 +57,22 @@ class IFreqaiModel(ABC):
|
||||
"""
|
||||
|
||||
def __init__(self, config: Config) -> None:
|
||||
|
||||
self.config = config
|
||||
self.assert_config(self.config)
|
||||
self.freqai_info: Dict[str, Any] = config["freqai"]
|
||||
self.data_split_parameters: Dict[str, Any] = config.get("freqai", {}).get(
|
||||
"data_split_parameters", {})
|
||||
"data_split_parameters", {}
|
||||
)
|
||||
self.model_training_parameters: Dict[str, Any] = config.get("freqai", {}).get(
|
||||
"model_training_parameters", {})
|
||||
"model_training_parameters", {}
|
||||
)
|
||||
self.identifier: str = self.freqai_info.get("identifier", "no_id_provided")
|
||||
self.retrain = False
|
||||
self.first = True
|
||||
self.set_full_path()
|
||||
self.save_backtest_models: bool = self.freqai_info.get("save_backtest_models", True)
|
||||
if self.save_backtest_models:
|
||||
logger.info('Backtesting module configured to save all models.')
|
||||
logger.info("Backtesting module configured to save all models.")
|
||||
|
||||
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config)
|
||||
# set current candle to arbitrary historical date
|
||||
@@ -85,7 +86,7 @@ class IFreqaiModel(ABC):
|
||||
self.ft_params["DI_threshold"] = 0
|
||||
logger.warning("DI threshold is not configured for Keras models yet. Deactivating.")
|
||||
|
||||
self.CONV_WIDTH = self.freqai_info.get('conv_width', 1)
|
||||
self.CONV_WIDTH = self.freqai_info.get("conv_width", 1)
|
||||
self.class_names: List[str] = [] # used in classification subclasses
|
||||
self.pair_it = 0
|
||||
self.pair_it_train = 0
|
||||
@@ -95,8 +96,8 @@ class IFreqaiModel(ABC):
|
||||
self.train_time: float = 0
|
||||
self.begin_time: float = 0
|
||||
self.begin_time_train: float = 0
|
||||
self.base_tf_seconds = timeframe_to_seconds(self.config['timeframe'])
|
||||
self.continual_learning = self.freqai_info.get('continual_learning', False)
|
||||
self.base_tf_seconds = timeframe_to_seconds(self.config["timeframe"])
|
||||
self.continual_learning = self.freqai_info.get("continual_learning", False)
|
||||
self.plot_features = self.ft_params.get("plot_feature_importances", 0)
|
||||
self.corr_dataframes: Dict[str, DataFrame] = {}
|
||||
# get_corr_dataframes is controlling the caching of corr_dataframes
|
||||
@@ -109,10 +110,10 @@ class IFreqaiModel(ABC):
|
||||
self.max_system_threads = max(int(psutil.cpu_count() * 2 - 2), 1)
|
||||
self.can_short = True # overridden in start() with strategy.can_short
|
||||
self.model: Any = None
|
||||
if self.ft_params.get('principal_component_analysis', False) and self.continual_learning:
|
||||
self.ft_params.update({'principal_component_analysis': False})
|
||||
logger.warning('User tried to use PCA with continual learning. Deactivating PCA.')
|
||||
self.activate_tensorboard: bool = self.freqai_info.get('activate_tensorboard', True)
|
||||
if self.ft_params.get("principal_component_analysis", False) and self.continual_learning:
|
||||
self.ft_params.update({"principal_component_analysis": False})
|
||||
logger.warning("User tried to use PCA with continual learning. Deactivating PCA.")
|
||||
self.activate_tensorboard: bool = self.freqai_info.get("activate_tensorboard", True)
|
||||
|
||||
record_params(config, self.full_path)
|
||||
|
||||
@@ -120,10 +121,9 @@ class IFreqaiModel(ABC):
|
||||
"""
|
||||
Return an empty state to be pickled in hyperopt
|
||||
"""
|
||||
return ({})
|
||||
return {}
|
||||
|
||||
def assert_config(self, config: Config) -> None:
|
||||
|
||||
if not config.get("freqai", {}):
|
||||
raise OperationalException("No freqai parameters found in configuration file.")
|
||||
|
||||
@@ -144,7 +144,7 @@ class IFreqaiModel(ABC):
|
||||
self.can_short = strategy.can_short
|
||||
|
||||
if self.live:
|
||||
self.inference_timer('start')
|
||||
self.inference_timer("start")
|
||||
self.dk = FreqaiDataKitchen(self.config, self.live, metadata["pair"])
|
||||
dk = self.start_live(dataframe, metadata, strategy, self.dk)
|
||||
dataframe = dk.remove_features_from_df(dk.return_dataframe)
|
||||
@@ -162,13 +162,12 @@ class IFreqaiModel(ABC):
|
||||
dataframe = dk.remove_features_from_df(dk.return_dataframe)
|
||||
else:
|
||||
logger.info("Backtesting using historic predictions (live models)")
|
||||
dk = self.start_backtesting_from_historic_predictions(
|
||||
dataframe, metadata, self.dk)
|
||||
dk = self.start_backtesting_from_historic_predictions(dataframe, metadata, self.dk)
|
||||
dataframe = dk.return_dataframe
|
||||
|
||||
self.clean_up()
|
||||
if self.live:
|
||||
self.inference_timer('stop', metadata["pair"])
|
||||
self.inference_timer("stop", metadata["pair"])
|
||||
|
||||
return dataframe
|
||||
|
||||
@@ -225,7 +224,7 @@ class IFreqaiModel(ABC):
|
||||
# ensure pair is available in dp
|
||||
if pair not in strategy.dp.current_whitelist():
|
||||
self.train_queue.popleft()
|
||||
logger.warning(f'{pair} not in current whitelist, removing from train queue.')
|
||||
logger.warning(f"{pair} not in current whitelist, removing from train queue.")
|
||||
continue
|
||||
|
||||
(_, trained_timestamp) = self.dd.get_pair_dict_info(pair)
|
||||
@@ -238,23 +237,25 @@ class IFreqaiModel(ABC):
|
||||
) = dk.check_if_new_training_required(trained_timestamp)
|
||||
|
||||
if retrain:
|
||||
self.train_timer('start')
|
||||
self.train_timer("start")
|
||||
dk.set_paths(pair, new_trained_timerange.stopts)
|
||||
try:
|
||||
self.extract_data_and_train_model(
|
||||
new_trained_timerange, pair, strategy, dk, data_load_timerange
|
||||
)
|
||||
except Exception as msg:
|
||||
logger.exception(f"Training {pair} raised exception {msg.__class__.__name__}. "
|
||||
f"Message: {msg}, skipping.")
|
||||
logger.exception(
|
||||
f"Training {pair} raised exception {msg.__class__.__name__}. "
|
||||
f"Message: {msg}, skipping."
|
||||
)
|
||||
|
||||
self.train_timer('stop', pair)
|
||||
self.train_timer("stop", pair)
|
||||
|
||||
# only rotate the queue after the first has been trained.
|
||||
self.train_queue.rotate(-1)
|
||||
|
||||
self.dd.save_historic_predictions_to_disk()
|
||||
if self.freqai_info.get('write_metrics_to_disk', False):
|
||||
if self.freqai_info.get("write_metrics_to_disk", False):
|
||||
self.dd.save_metric_tracker_to_disk()
|
||||
|
||||
def start_backtesting(
|
||||
@@ -290,8 +291,13 @@ class IFreqaiModel(ABC):
|
||||
train_it += 1
|
||||
total_trains = len(dk.backtesting_timeranges)
|
||||
self.training_timerange = tr_train
|
||||
len_backtest_df = len(dataframe.loc[(dataframe["date"] >= tr_backtest.startdt) & (
|
||||
dataframe["date"] < tr_backtest.stopdt), :])
|
||||
len_backtest_df = len(
|
||||
dataframe.loc[
|
||||
(dataframe["date"] >= tr_backtest.startdt)
|
||||
& (dataframe["date"] < tr_backtest.stopdt),
|
||||
:,
|
||||
]
|
||||
)
|
||||
|
||||
if not self.ensure_data_exists(len_backtest_df, tr_backtest, pair):
|
||||
continue
|
||||
@@ -327,10 +333,12 @@ class IFreqaiModel(ABC):
|
||||
|
||||
dataframe_base_train = dataframe.loc[dataframe["date"] < tr_train.stopdt, :]
|
||||
dataframe_base_train = strategy.set_freqai_targets(
|
||||
dataframe_base_train, metadata=metadata)
|
||||
dataframe_base_train, metadata=metadata
|
||||
)
|
||||
dataframe_base_backtest = dataframe.loc[dataframe["date"] < tr_backtest.stopdt, :]
|
||||
dataframe_base_backtest = strategy.set_freqai_targets(
|
||||
dataframe_base_backtest, metadata=metadata)
|
||||
dataframe_base_backtest, metadata=metadata
|
||||
)
|
||||
|
||||
tr_train = dk.buffer_timerange(tr_train)
|
||||
|
||||
@@ -346,25 +354,27 @@ class IFreqaiModel(ABC):
|
||||
dk.find_labels(dataframe_train)
|
||||
|
||||
try:
|
||||
self.tb_logger = get_tb_logger(self.dd.model_type, dk.data_path,
|
||||
self.activate_tensorboard)
|
||||
self.tb_logger = get_tb_logger(
|
||||
self.dd.model_type, dk.data_path, self.activate_tensorboard
|
||||
)
|
||||
self.model = self.train(dataframe_train, pair, dk)
|
||||
self.tb_logger.close()
|
||||
except Exception as msg:
|
||||
logger.warning(
|
||||
f"Training {pair} raised exception {msg.__class__.__name__}. "
|
||||
f"Message: {msg}, skipping.", exc_info=True)
|
||||
f"Message: {msg}, skipping.",
|
||||
exc_info=True,
|
||||
)
|
||||
self.model = None
|
||||
|
||||
self.dd.pair_dict[pair]["trained_timestamp"] = int(
|
||||
tr_train.stopts)
|
||||
self.dd.pair_dict[pair]["trained_timestamp"] = int(tr_train.stopts)
|
||||
if self.plot_features and self.model is not None:
|
||||
plot_feature_importance(self.model, pair, dk, self.plot_features)
|
||||
if self.save_backtest_models and self.model is not None:
|
||||
logger.info('Saving backtest model to disk.')
|
||||
logger.info("Saving backtest model to disk.")
|
||||
self.dd.save_data(self.model, pair, dk)
|
||||
else:
|
||||
logger.info('Saving metadata to disk.')
|
||||
logger.info("Saving metadata to disk.")
|
||||
self.dd.save_metadata(dk)
|
||||
else:
|
||||
self.model = self.dd.load_data(pair, dk)
|
||||
@@ -394,9 +404,11 @@ class IFreqaiModel(ABC):
|
||||
"""
|
||||
|
||||
if not strategy.process_only_new_candles:
|
||||
raise OperationalException("You are trying to use a FreqAI strategy with "
|
||||
"process_only_new_candles = False. This is not supported "
|
||||
"by FreqAI, and it is therefore aborting.")
|
||||
raise OperationalException(
|
||||
"You are trying to use a FreqAI strategy with "
|
||||
"process_only_new_candles = False. This is not supported "
|
||||
"by FreqAI, and it is therefore aborting."
|
||||
)
|
||||
|
||||
# get the model metadata associated with the current pair
|
||||
(_, trained_timestamp) = self.dd.get_pair_dict_info(metadata["pair"])
|
||||
@@ -424,8 +436,10 @@ class IFreqaiModel(ABC):
|
||||
self.model = self.dd.load_data(metadata["pair"], dk)
|
||||
|
||||
dataframe = dk.use_strategy_to_populate_indicators(
|
||||
strategy, prediction_dataframe=dataframe, pair=metadata["pair"],
|
||||
do_corr_pairs=self.get_corr_dataframes
|
||||
strategy,
|
||||
prediction_dataframe=dataframe,
|
||||
pair=metadata["pair"],
|
||||
do_corr_pairs=self.get_corr_dataframes,
|
||||
)
|
||||
|
||||
if not self.model:
|
||||
@@ -447,7 +461,6 @@ class IFreqaiModel(ABC):
|
||||
def build_strategy_return_arrays(
|
||||
self, dataframe: DataFrame, dk: FreqaiDataKitchen, pair: str, trained_timestamp: int
|
||||
) -> None:
|
||||
|
||||
# hold the historical predictions in memory so we are sending back
|
||||
# correct array to strategy
|
||||
|
||||
@@ -473,18 +486,16 @@ class IFreqaiModel(ABC):
|
||||
else:
|
||||
# remaining predictions are made only on the most recent candles for performance and
|
||||
# historical accuracy reasons.
|
||||
pred_df, do_preds = self.predict(dataframe.iloc[-self.CONV_WIDTH:], dk, first=False)
|
||||
pred_df, do_preds = self.predict(dataframe.iloc[-self.CONV_WIDTH :], dk, first=False)
|
||||
|
||||
if self.freqai_info.get('fit_live_predictions_candles', 0) and self.live:
|
||||
if self.freqai_info.get("fit_live_predictions_candles", 0) and self.live:
|
||||
self.fit_live_predictions(dk, pair)
|
||||
self.dd.append_model_predictions(pair, pred_df, do_preds, dk, dataframe)
|
||||
dk.return_dataframe = self.dd.attach_return_values_to_return_dataframe(pair, dataframe)
|
||||
|
||||
return
|
||||
|
||||
def check_if_feature_list_matches_strategy(
|
||||
self, dk: FreqaiDataKitchen
|
||||
) -> None:
|
||||
def check_if_feature_list_matches_strategy(self, dk: FreqaiDataKitchen) -> None:
|
||||
"""
|
||||
Ensure user is passing the proper feature set if they are reusing an `identifier` pointing
|
||||
to a folder holding existing models.
|
||||
@@ -496,7 +507,7 @@ class IFreqaiModel(ABC):
|
||||
if "training_features_list_raw" in dk.data:
|
||||
feature_list = dk.data["training_features_list_raw"]
|
||||
else:
|
||||
feature_list = dk.data['training_features_list']
|
||||
feature_list = dk.data["training_features_list"]
|
||||
|
||||
if dk.training_features_list != feature_list:
|
||||
raise OperationalException(
|
||||
@@ -512,38 +523,35 @@ class IFreqaiModel(ABC):
|
||||
def define_data_pipeline(self, threads=-1) -> Pipeline:
|
||||
ft_params = self.freqai_info["feature_parameters"]
|
||||
pipe_steps = [
|
||||
('const', ds.VarianceThreshold(threshold=0)),
|
||||
('scaler', SKLearnWrapper(MinMaxScaler(feature_range=(-1, 1))))
|
||||
]
|
||||
("const", ds.VarianceThreshold(threshold=0)),
|
||||
("scaler", SKLearnWrapper(MinMaxScaler(feature_range=(-1, 1)))),
|
||||
]
|
||||
|
||||
if ft_params.get("principal_component_analysis", False):
|
||||
pipe_steps.append(('pca', ds.PCA(n_components=0.999)))
|
||||
pipe_steps.append(('post-pca-scaler',
|
||||
SKLearnWrapper(MinMaxScaler(feature_range=(-1, 1)))))
|
||||
pipe_steps.append(("pca", ds.PCA(n_components=0.999)))
|
||||
pipe_steps.append(
|
||||
("post-pca-scaler", SKLearnWrapper(MinMaxScaler(feature_range=(-1, 1))))
|
||||
)
|
||||
|
||||
if ft_params.get("use_SVM_to_remove_outliers", False):
|
||||
svm_params = ft_params.get(
|
||||
"svm_params", {"shuffle": False, "nu": 0.01})
|
||||
pipe_steps.append(('svm', ds.SVMOutlierExtractor(**svm_params)))
|
||||
svm_params = ft_params.get("svm_params", {"shuffle": False, "nu": 0.01})
|
||||
pipe_steps.append(("svm", ds.SVMOutlierExtractor(**svm_params)))
|
||||
|
||||
di = ft_params.get("DI_threshold", 0)
|
||||
if di:
|
||||
pipe_steps.append(('di', ds.DissimilarityIndex(di_threshold=di, n_jobs=threads)))
|
||||
pipe_steps.append(("di", ds.DissimilarityIndex(di_threshold=di, n_jobs=threads)))
|
||||
|
||||
if ft_params.get("use_DBSCAN_to_remove_outliers", False):
|
||||
pipe_steps.append(('dbscan', ds.DBSCAN(n_jobs=threads)))
|
||||
pipe_steps.append(("dbscan", ds.DBSCAN(n_jobs=threads)))
|
||||
|
||||
sigma = self.freqai_info["feature_parameters"].get('noise_standard_deviation', 0)
|
||||
sigma = self.freqai_info["feature_parameters"].get("noise_standard_deviation", 0)
|
||||
if sigma:
|
||||
pipe_steps.append(('noise', ds.Noise(sigma=sigma)))
|
||||
pipe_steps.append(("noise", ds.Noise(sigma=sigma)))
|
||||
|
||||
return Pipeline(pipe_steps)
|
||||
|
||||
def define_label_pipeline(self, threads=-1) -> Pipeline:
|
||||
|
||||
label_pipeline = Pipeline([
|
||||
('scaler', SKLearnWrapper(MinMaxScaler(feature_range=(-1, 1))))
|
||||
])
|
||||
label_pipeline = Pipeline([("scaler", SKLearnWrapper(MinMaxScaler(feature_range=(-1, 1))))])
|
||||
|
||||
return label_pipeline
|
||||
|
||||
@@ -555,7 +563,7 @@ class IFreqaiModel(ABC):
|
||||
:return:
|
||||
:boolean: whether the model file exists or not.
|
||||
"""
|
||||
if self.dd.model_type == 'joblib':
|
||||
if self.dd.model_type == "joblib":
|
||||
file_type = ".joblib"
|
||||
elif self.dd.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
|
||||
file_type = ".zip"
|
||||
@@ -572,9 +580,7 @@ class IFreqaiModel(ABC):
|
||||
"""
|
||||
Creates and sets the full path for the identifier
|
||||
"""
|
||||
self.full_path = Path(
|
||||
self.config["user_data_dir"] / "models" / f"{self.identifier}"
|
||||
)
|
||||
self.full_path = Path(self.config["user_data_dir"] / "models" / f"{self.identifier}")
|
||||
self.full_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def extract_data_and_train_model(
|
||||
@@ -615,8 +621,7 @@ class IFreqaiModel(ABC):
|
||||
dk.find_features(unfiltered_dataframe)
|
||||
dk.find_labels(unfiltered_dataframe)
|
||||
|
||||
self.tb_logger = get_tb_logger(self.dd.model_type, dk.data_path,
|
||||
self.activate_tensorboard)
|
||||
self.tb_logger = get_tb_logger(self.dd.model_type, dk.data_path, self.activate_tensorboard)
|
||||
model = self.train(unfiltered_dataframe, pair, dk)
|
||||
self.tb_logger.close()
|
||||
|
||||
@@ -664,21 +669,21 @@ class IFreqaiModel(ABC):
|
||||
for label in hist_preds_df.columns:
|
||||
if hist_preds_df[label].dtype == object:
|
||||
continue
|
||||
hist_preds_df[f'{label}_mean'] = 0
|
||||
hist_preds_df[f'{label}_std'] = 0
|
||||
hist_preds_df[f"{label}_mean"] = 0
|
||||
hist_preds_df[f"{label}_std"] = 0
|
||||
|
||||
hist_preds_df['do_predict'] = 0
|
||||
hist_preds_df["do_predict"] = 0
|
||||
|
||||
if self.freqai_info['feature_parameters'].get('DI_threshold', 0) > 0:
|
||||
hist_preds_df['DI_values'] = 0
|
||||
if self.freqai_info["feature_parameters"].get("DI_threshold", 0) > 0:
|
||||
hist_preds_df["DI_values"] = 0
|
||||
|
||||
for return_str in dk.data['extra_returns_per_train']:
|
||||
hist_preds_df[return_str] = dk.data['extra_returns_per_train'][return_str]
|
||||
for return_str in dk.data["extra_returns_per_train"]:
|
||||
hist_preds_df[return_str] = dk.data["extra_returns_per_train"][return_str]
|
||||
|
||||
hist_preds_df['high_price'] = strat_df['high']
|
||||
hist_preds_df['low_price'] = strat_df['low']
|
||||
hist_preds_df['close_price'] = strat_df['close']
|
||||
hist_preds_df['date_pred'] = strat_df['date']
|
||||
hist_preds_df["high_price"] = strat_df["high"]
|
||||
hist_preds_df["low_price"] = strat_df["low"]
|
||||
hist_preds_df["close_price"] = strat_df["close"]
|
||||
hist_preds_df["date_pred"] = strat_df["date"]
|
||||
|
||||
def fit_live_predictions(self, dk: FreqaiDataKitchen, pair: str) -> None:
|
||||
"""
|
||||
@@ -694,52 +699,51 @@ class IFreqaiModel(ABC):
|
||||
for label in full_labels:
|
||||
if self.dd.historic_predictions[dk.pair][label].dtype == object:
|
||||
continue
|
||||
f = spy.stats.norm.fit(
|
||||
self.dd.historic_predictions[dk.pair][label].tail(num_candles))
|
||||
f = spy.stats.norm.fit(self.dd.historic_predictions[dk.pair][label].tail(num_candles))
|
||||
dk.data["labels_mean"][label], dk.data["labels_std"][label] = f[0], f[1]
|
||||
|
||||
return
|
||||
|
||||
def inference_timer(self, do: Literal['start', 'stop'] = 'start', pair: str = ''):
|
||||
def inference_timer(self, do: Literal["start", "stop"] = "start", pair: str = ""):
|
||||
"""
|
||||
Timer designed to track the cumulative time spent in FreqAI for one pass through
|
||||
the whitelist. This will check if the time spent is more than 1/4 the time
|
||||
of a single candle, and if so, it will warn the user of degraded performance
|
||||
"""
|
||||
if do == 'start':
|
||||
if do == "start":
|
||||
self.pair_it += 1
|
||||
self.begin_time = time.time()
|
||||
elif do == 'stop':
|
||||
elif do == "stop":
|
||||
end = time.time()
|
||||
time_spent = (end - self.begin_time)
|
||||
if self.freqai_info.get('write_metrics_to_disk', False):
|
||||
self.dd.update_metric_tracker('inference_time', time_spent, pair)
|
||||
time_spent = end - self.begin_time
|
||||
if self.freqai_info.get("write_metrics_to_disk", False):
|
||||
self.dd.update_metric_tracker("inference_time", time_spent, pair)
|
||||
self.inference_time += time_spent
|
||||
if self.pair_it == self.total_pairs:
|
||||
logger.info(
|
||||
f'Total time spent inferencing pairlist {self.inference_time:.2f} seconds')
|
||||
f"Total time spent inferencing pairlist {self.inference_time:.2f} seconds"
|
||||
)
|
||||
self.pair_it = 0
|
||||
self.inference_time = 0
|
||||
return
|
||||
|
||||
def train_timer(self, do: Literal['start', 'stop'] = 'start', pair: str = ''):
|
||||
def train_timer(self, do: Literal["start", "stop"] = "start", pair: str = ""):
|
||||
"""
|
||||
Timer designed to track the cumulative time spent training the full pairlist in
|
||||
FreqAI.
|
||||
"""
|
||||
if do == 'start':
|
||||
if do == "start":
|
||||
self.pair_it_train += 1
|
||||
self.begin_time_train = time.time()
|
||||
elif do == 'stop':
|
||||
elif do == "stop":
|
||||
end = time.time()
|
||||
time_spent = (end - self.begin_time_train)
|
||||
if self.freqai_info.get('write_metrics_to_disk', False):
|
||||
time_spent = end - self.begin_time_train
|
||||
if self.freqai_info.get("write_metrics_to_disk", False):
|
||||
self.dd.collect_metrics(time_spent, pair)
|
||||
|
||||
self.train_time += time_spent
|
||||
if self.pair_it_train == self.total_pairs:
|
||||
logger.info(
|
||||
f'Total time spent training pairlist {self.train_time:.2f} seconds')
|
||||
logger.info(f"Total time spent training pairlist {self.train_time:.2f} seconds")
|
||||
self.pair_it_train = 0
|
||||
self.train_time = 0
|
||||
return
|
||||
@@ -759,14 +763,14 @@ class IFreqaiModel(ABC):
|
||||
"""
|
||||
current_pairlist = self.config.get("exchange", {}).get("pair_whitelist")
|
||||
if not self.dd.pair_dict:
|
||||
logger.info('Set fresh train queue from whitelist. '
|
||||
f'Queue: {current_pairlist}')
|
||||
logger.info("Set fresh train queue from whitelist. Queue: {current_pairlist}")
|
||||
return deque(current_pairlist)
|
||||
|
||||
best_queue = deque()
|
||||
|
||||
pair_dict_sorted = sorted(self.dd.pair_dict.items(),
|
||||
key=lambda k: k[1]['trained_timestamp'])
|
||||
pair_dict_sorted = sorted(
|
||||
self.dd.pair_dict.items(), key=lambda k: k[1]["trained_timestamp"]
|
||||
)
|
||||
for pair in pair_dict_sorted:
|
||||
if pair[0] in current_pairlist:
|
||||
best_queue.append(pair[0])
|
||||
@@ -774,8 +778,9 @@ class IFreqaiModel(ABC):
|
||||
if pair not in best_queue:
|
||||
best_queue.appendleft(pair)
|
||||
|
||||
logger.info('Set existing queue from trained timestamps. '
|
||||
f'Best approximation queue: {best_queue}')
|
||||
logger.info(
|
||||
"Set existing queue from trained timestamps. Best approximation queue: {best_queue}"
|
||||
)
|
||||
return best_queue
|
||||
|
||||
def cache_corr_pairlist_dfs(self, dataframe: DataFrame, dk: FreqaiDataKitchen) -> DataFrame:
|
||||
@@ -790,14 +795,15 @@ class IFreqaiModel(ABC):
|
||||
if self.get_corr_dataframes:
|
||||
self.corr_dataframes = dk.extract_corr_pair_columns_from_populated_indicators(dataframe)
|
||||
if not self.corr_dataframes:
|
||||
logger.warning("Couldn't cache corr_pair dataframes for improved performance. "
|
||||
"Consider ensuring that the full coin/stake, e.g. XYZ/USD, "
|
||||
"is included in the column names when you are creating features "
|
||||
"in `feature_engineering_*` functions.")
|
||||
logger.warning(
|
||||
"Couldn't cache corr_pair dataframes for improved performance. "
|
||||
"Consider ensuring that the full coin/stake, e.g. XYZ/USD, "
|
||||
"is included in the column names when you are creating features "
|
||||
"in `feature_engineering_*` functions."
|
||||
)
|
||||
self.get_corr_dataframes = not bool(self.corr_dataframes)
|
||||
elif self.corr_dataframes:
|
||||
dataframe = dk.attach_corr_pair_columns(
|
||||
dataframe, self.corr_dataframes, dk.pair)
|
||||
dataframe = dk.attach_corr_pair_columns(dataframe, self.corr_dataframes, dk.pair)
|
||||
|
||||
return dataframe
|
||||
|
||||
@@ -813,8 +819,9 @@ class IFreqaiModel(ABC):
|
||||
self.pair_it = 1
|
||||
self.current_candle = self.dd.current_candle
|
||||
|
||||
def ensure_data_exists(self, len_dataframe_backtest: int,
|
||||
tr_backtest: TimeRange, pair: str) -> bool:
|
||||
def ensure_data_exists(
|
||||
self, len_dataframe_backtest: int, tr_backtest: TimeRange, pair: str
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the dataframe is empty, if not, report useful information to user.
|
||||
:param len_dataframe_backtest: the len of backtesting dataframe
|
||||
@@ -823,14 +830,17 @@ class IFreqaiModel(ABC):
|
||||
:return: if the data exists or not
|
||||
"""
|
||||
if self.config.get("freqai_backtest_live_models", False) and len_dataframe_backtest == 0:
|
||||
logger.info(f"No data found for pair {pair} from "
|
||||
f"from {tr_backtest.start_fmt} to {tr_backtest.stop_fmt}. "
|
||||
"Probably more than one training within the same candle period.")
|
||||
logger.info(
|
||||
f"No data found for pair {pair} from "
|
||||
f"from {tr_backtest.start_fmt} to {tr_backtest.stop_fmt}. "
|
||||
"Probably more than one training within the same candle period."
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
def log_backtesting_progress(self, tr_train: TimeRange, pair: str,
|
||||
train_it: int, total_trains: int):
|
||||
def log_backtesting_progress(
|
||||
self, tr_train: TimeRange, pair: str, train_it: int, total_trains: int
|
||||
):
|
||||
"""
|
||||
Log the backtesting progress so user knows how many pairs have been trained and
|
||||
how many more pairs/trains remain.
|
||||
@@ -857,30 +867,37 @@ class IFreqaiModel(ABC):
|
||||
fit_live_predictions_candles = self.freqai_info.get("fit_live_predictions_candles", 0)
|
||||
if fit_live_predictions_candles:
|
||||
logger.info("Applying fit_live_predictions in backtesting")
|
||||
label_columns = [col for col in dk.full_df.columns if (
|
||||
col.startswith("&") and
|
||||
not (col.startswith("&") and col.endswith("_mean")) and
|
||||
not (col.startswith("&") and col.endswith("_std")) and
|
||||
col not in self.dk.data["extra_returns_per_train"])
|
||||
label_columns = [
|
||||
col
|
||||
for col in dk.full_df.columns
|
||||
if (
|
||||
col.startswith("&")
|
||||
and not (col.startswith("&") and col.endswith("_mean"))
|
||||
and not (col.startswith("&") and col.endswith("_std"))
|
||||
and col not in self.dk.data["extra_returns_per_train"]
|
||||
)
|
||||
]
|
||||
|
||||
for index in range(len(dk.full_df)):
|
||||
if index >= fit_live_predictions_candles:
|
||||
self.dd.historic_predictions[self.dk.pair] = (
|
||||
dk.full_df.iloc[index - fit_live_predictions_candles:index])
|
||||
self.dd.historic_predictions[self.dk.pair] = dk.full_df.iloc[
|
||||
index - fit_live_predictions_candles : index
|
||||
]
|
||||
self.fit_live_predictions(self.dk, self.dk.pair)
|
||||
for label in label_columns:
|
||||
if dk.full_df[label].dtype == object:
|
||||
continue
|
||||
if "labels_mean" in self.dk.data:
|
||||
dk.full_df.at[index, f"{label}_mean"] = (
|
||||
self.dk.data["labels_mean"][label])
|
||||
dk.full_df.at[index, f"{label}_mean"] = self.dk.data["labels_mean"][
|
||||
label
|
||||
]
|
||||
if "labels_std" in self.dk.data:
|
||||
dk.full_df.at[index, f"{label}_std"] = self.dk.data["labels_std"][label]
|
||||
|
||||
for extra_col in self.dk.data["extra_returns_per_train"]:
|
||||
dk.full_df.at[index, f"{extra_col}"] = (
|
||||
self.dk.data["extra_returns_per_train"][extra_col])
|
||||
dk.full_df.at[index, f"{extra_col}"] = self.dk.data[
|
||||
"extra_returns_per_train"
|
||||
][extra_col]
|
||||
|
||||
return
|
||||
|
||||
@@ -897,7 +914,8 @@ class IFreqaiModel(ABC):
|
||||
if key_name not in self.metadata:
|
||||
metadata = self.metadata
|
||||
metadata[key_name] = int(
|
||||
pd.to_datetime(live_dataframe.tail(1)["date"].values[0]).timestamp())
|
||||
pd.to_datetime(live_dataframe.tail(1)["date"].values[0]).timestamp()
|
||||
)
|
||||
self.update_metadata(metadata)
|
||||
|
||||
def start_backtesting_from_historic_predictions(
|
||||
@@ -913,19 +931,20 @@ class IFreqaiModel(ABC):
|
||||
pair = metadata["pair"]
|
||||
dk.return_dataframe = dataframe
|
||||
saved_dataframe = self.dd.historic_predictions[pair]
|
||||
columns_to_drop = list(set(saved_dataframe.columns).intersection(
|
||||
dk.return_dataframe.columns))
|
||||
columns_to_drop = list(
|
||||
set(saved_dataframe.columns).intersection(dk.return_dataframe.columns)
|
||||
)
|
||||
dk.return_dataframe = dk.return_dataframe.drop(columns=list(columns_to_drop))
|
||||
dk.return_dataframe = pd.merge(
|
||||
dk.return_dataframe, saved_dataframe, how='left', left_on='date', right_on="date_pred")
|
||||
dk.return_dataframe, saved_dataframe, how="left", left_on="date", right_on="date_pred"
|
||||
)
|
||||
return dk
|
||||
|
||||
# Following methods which are overridden by user made prediction models.
|
||||
# See freqai/prediction_models/CatboostPredictionModel.py for an example.
|
||||
|
||||
@abstractmethod
|
||||
def train(self, unfiltered_df: DataFrame, pair: str,
|
||||
dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def train(self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datahandler
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
@@ -966,23 +985,25 @@ class IFreqaiModel(ABC):
|
||||
"""
|
||||
throw deprecation warning if this function is called
|
||||
"""
|
||||
logger.warning(f"Your model {self.__class__.__name__} relies on the deprecated"
|
||||
" data pipeline. Please update your model to use the new data pipeline."
|
||||
" This can be achieved by following the migration guide at "
|
||||
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline")
|
||||
logger.warning(
|
||||
f"Your model {self.__class__.__name__} relies on the deprecated"
|
||||
" data pipeline. Please update your model to use the new data pipeline."
|
||||
" This can be achieved by following the migration guide at "
|
||||
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline"
|
||||
)
|
||||
dk.feature_pipeline = self.define_data_pipeline(threads=dk.thread_count)
|
||||
dd = dk.data_dictionary
|
||||
(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"]) = dk.feature_pipeline.fit_transform(dd["train_features"],
|
||||
dd["train_labels"],
|
||||
dd["train_weights"])
|
||||
(dd["train_features"], dd["train_labels"], dd["train_weights"]) = (
|
||||
dk.feature_pipeline.fit_transform(
|
||||
dd["train_features"], dd["train_labels"], dd["train_weights"]
|
||||
)
|
||||
)
|
||||
|
||||
(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"]) = dk.feature_pipeline.transform(dd["test_features"],
|
||||
dd["test_labels"],
|
||||
dd["test_weights"])
|
||||
(dd["test_features"], dd["test_labels"], dd["test_weights"]) = (
|
||||
dk.feature_pipeline.transform(
|
||||
dd["test_features"], dd["test_labels"], dd["test_weights"]
|
||||
)
|
||||
)
|
||||
|
||||
dk.label_pipeline = self.define_label_pipeline(threads=dk.thread_count)
|
||||
|
||||
@@ -994,13 +1015,16 @@ class IFreqaiModel(ABC):
|
||||
"""
|
||||
throw deprecation warning if this function is called
|
||||
"""
|
||||
logger.warning(f"Your model {self.__class__.__name__} relies on the deprecated"
|
||||
" data pipeline. Please update your model to use the new data pipeline."
|
||||
" This can be achieved by following the migration guide at "
|
||||
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline")
|
||||
logger.warning(
|
||||
f"Your model {self.__class__.__name__} relies on the deprecated"
|
||||
" data pipeline. Please update your model to use the new data pipeline."
|
||||
" This can be achieved by following the migration guide at "
|
||||
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline"
|
||||
)
|
||||
dd = dk.data_dictionary
|
||||
dd["predict_features"], outliers, _ = dk.feature_pipeline.transform(
|
||||
dd["predict_features"], outlier_check=True)
|
||||
dd["predict_features"], outlier_check=True
|
||||
)
|
||||
if self.freqai_info.get("DI_threshold", 0) > 0:
|
||||
dk.DI_values = dk.feature_pipeline["di"].di_values
|
||||
else:
|
||||
|
||||
@@ -46,14 +46,19 @@ class CatboostClassifier(BaseClassifierModel):
|
||||
|
||||
cbr = CatBoostClassifier(
|
||||
allow_writing_files=True,
|
||||
loss_function='MultiClass',
|
||||
loss_function="MultiClass",
|
||||
train_dir=Path(dk.data_path),
|
||||
**self.model_training_parameters,
|
||||
)
|
||||
|
||||
init_model = self.get_init_model(dk.pair)
|
||||
|
||||
cbr.fit(X=train_data, eval_set=test_data, init_model=init_model,
|
||||
log_cout=sys.stdout, log_cerr=sys.stderr)
|
||||
cbr.fit(
|
||||
X=train_data,
|
||||
eval_set=test_data,
|
||||
init_model=init_model,
|
||||
log_cout=sys.stdout,
|
||||
log_cerr=sys.stderr,
|
||||
)
|
||||
|
||||
return cbr
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user