mirror of
https://github.com/freqtrade/freqtrade.git
synced 2026-01-20 05:50:36 +00:00
Merge branch 'develop' into api-server-list-custom-data
This commit is contained in:
@@ -16,7 +16,7 @@ repos:
|
||||
additional_dependencies:
|
||||
- types-cachetools==5.5.0.20240820
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.32.0.20250301
|
||||
- types-requests==2.32.0.20250306
|
||||
- types-tabulate==0.9.0.20241207
|
||||
- types-python-dateutil==2.9.0.20241206
|
||||
- SQLAlchemy==2.0.38
|
||||
@@ -31,7 +31,7 @@ repos:
|
||||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.9.9'
|
||||
rev: 'v0.9.10'
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
@@ -70,6 +70,6 @@ repos:
|
||||
|
||||
# Ensure github actions remain safe
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.4.1
|
||||
rev: v1.5.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
@@ -257,7 +257,8 @@
|
||||
"enum": [
|
||||
"day",
|
||||
"week",
|
||||
"month"
|
||||
"month",
|
||||
"year"
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1366,10 +1367,10 @@
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"keras": {
|
||||
"description": "Use Keras for model training.",
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
"identifier": {
|
||||
"description": "A unique ID for the current model. Must be changed when modifying features.",
|
||||
"type": "string",
|
||||
"default": "example"
|
||||
},
|
||||
"write_metrics_to_disk": {
|
||||
"description": "Write metrics to disk?",
|
||||
@@ -1399,16 +1400,50 @@
|
||||
"type": "number",
|
||||
"default": 7
|
||||
},
|
||||
"identifier": {
|
||||
"description": "A unique ID for the current model. Must be changed when modifying features.",
|
||||
"type": "string",
|
||||
"default": "example"
|
||||
"live_retrain_hours": {
|
||||
"description": "Frequency of retraining during dry/live runs.",
|
||||
"type": "number",
|
||||
"default": 0
|
||||
},
|
||||
"expiration_hours": {
|
||||
"description": "Avoid making predictions if a model is more than `expiration_hours` old. Defaults to 0 (no expiration).",
|
||||
"type": "number",
|
||||
"default": 0
|
||||
},
|
||||
"save_backtest_models": {
|
||||
"description": "Save models to disk when running backtesting.",
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"fit_live_predictions_candles": {
|
||||
"description": "Number of historical candles to use for computing target (label) statistics from prediction data, instead of from the training dataset.",
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"data_kitchen_thread_count": {
|
||||
"description": "Designate the number of threads you want to use for data processing (outlier methods, normalization, etc.).",
|
||||
"type": "integer"
|
||||
},
|
||||
"activate_tensorboard": {
|
||||
"description": "Indicate whether or not to activate tensorboard",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
"wait_for_training_iteration_on_reload": {
|
||||
"description": "Wait for the next training iteration to complete after /reload or ctrl+c.",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
"continual_learning": {
|
||||
"description": "Use the final state of the most recently trained model as starting point for the new model, allowing for incremental learning.",
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"keras": {
|
||||
"description": "Use Keras for model training.",
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"feature_parameters": {
|
||||
"description": "The parameters used to engineer the feature set",
|
||||
"type": "object",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
usage: freqtrade backtesting-show [-h] [-v] [--no-color] [--logfile FILE] [-V]
|
||||
[-c PATH] [-d PATH] [--userdir PATH]
|
||||
[--export-filename PATH] [--show-pair-list]
|
||||
[--breakdown {day,week,month} [{day,week,month} ...]]
|
||||
[--breakdown {day,week,month,year} [{day,week,month,year} ...]]
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
@@ -11,8 +11,9 @@ options:
|
||||
`--export` to be set as well. Example: `--export-filen
|
||||
ame=user_data/backtest_results/backtest_today.json`
|
||||
--show-pair-list Show backtesting pairlist sorted by profit.
|
||||
--breakdown {day,week,month} [{day,week,month} ...]
|
||||
Show backtesting breakdown per [day, week, month].
|
||||
--breakdown {day,week,month,year} [{day,week,month,year} ...]
|
||||
Show backtesting breakdown per [day, week, month,
|
||||
year].
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
|
||||
@@ -15,7 +15,7 @@ usage: freqtrade backtesting [-h] [-v] [--no-color] [--logfile FILE] [-V]
|
||||
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
|
||||
[--export {none,trades,signals}]
|
||||
[--export-filename PATH]
|
||||
[--breakdown {day,week,month} [{day,week,month} ...]]
|
||||
[--breakdown {day,week,month,year} [{day,week,month,year} ...]]
|
||||
[--cache {none,day,week,month}]
|
||||
[--freqai-backtest-live-models]
|
||||
|
||||
@@ -65,8 +65,9 @@ options:
|
||||
Use this filename for backtest results.Requires
|
||||
`--export` to be set as well. Example: `--export-filen
|
||||
ame=user_data/backtest_results/backtest_today.json`
|
||||
--breakdown {day,week,month} [{day,week,month} ...]
|
||||
Show backtesting breakdown per [day, week, month].
|
||||
--breakdown {day,week,month,year} [{day,week,month,year} ...]
|
||||
Show backtesting breakdown per [day, week, month,
|
||||
year].
|
||||
--cache {none,day,week,month}
|
||||
Load a cached backtest result no older than specified
|
||||
age (default: day).
|
||||
|
||||
@@ -4,7 +4,7 @@ usage: freqtrade hyperopt-show [-h] [-v] [--no-color] [--logfile FILE] [-V]
|
||||
[--profitable] [-n INT] [--print-json]
|
||||
[--hyperopt-filename FILENAME] [--no-header]
|
||||
[--disable-param-export]
|
||||
[--breakdown {day,week,month} [{day,week,month} ...]]
|
||||
[--breakdown {day,week,month,year} [{day,week,month,year} ...]]
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
@@ -18,8 +18,9 @@ options:
|
||||
--no-header Do not print epoch details header.
|
||||
--disable-param-export
|
||||
Disable automatic hyperopt parameter export.
|
||||
--breakdown {day,week,month} [{day,week,month} ...]
|
||||
Show backtesting breakdown per [day, week, month].
|
||||
--breakdown {day,week,month,year} [{day,week,month,year} ...]
|
||||
Show backtesting breakdown per [day, week, month,
|
||||
year].
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
|
||||
@@ -224,7 +224,7 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
),
|
||||
"backtest_breakdown": Arg(
|
||||
"--breakdown",
|
||||
help="Show backtesting breakdown per [day, week, month].",
|
||||
help="Show backtesting breakdown per [day, week, month, year].",
|
||||
nargs="+",
|
||||
choices=constants.BACKTEST_BREAKDOWNS,
|
||||
),
|
||||
|
||||
@@ -17,11 +17,11 @@ def start_list_exchanges(args: dict[str, Any]) -> None:
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
from freqtrade.exchange import list_available_exchanges
|
||||
from freqtrade.loggers.rich_console import get_rich_console
|
||||
|
||||
available_exchanges: list[ValidExchangesType] = list_available_exchanges(
|
||||
args["list_exchanges_all"]
|
||||
@@ -77,15 +77,16 @@ def start_list_exchanges(args: dict[str, Any]) -> None:
|
||||
)
|
||||
# table.add_row(*[exchange[header] for header in headers])
|
||||
|
||||
console = Console()
|
||||
console = get_rich_console()
|
||||
console.print(table)
|
||||
|
||||
|
||||
def _print_objs_tabular(objs: list, print_colorized: bool) -> None:
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
from freqtrade.loggers.rich_console import get_rich_console
|
||||
|
||||
names = [s["name"] for s in objs]
|
||||
objs_to_print: list[dict[str, Text | str]] = [
|
||||
{
|
||||
@@ -118,10 +119,7 @@ def _print_objs_tabular(objs: list, print_colorized: bool) -> None:
|
||||
for row in objs_to_print:
|
||||
table.add_row(*[row[header] for header in objs_to_print[0].keys()])
|
||||
|
||||
console = Console(
|
||||
color_system="auto" if print_colorized else None,
|
||||
width=200 if "pytest" in sys.modules else None,
|
||||
)
|
||||
console = get_rich_console(color_system="auto" if print_colorized else None)
|
||||
console.print(table)
|
||||
|
||||
|
||||
@@ -219,7 +217,7 @@ def start_list_markets(args: dict[str, Any], pairs_only: bool = False) -> None:
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.exchange import market_is_active
|
||||
from freqtrade.misc import plural
|
||||
from freqtrade.misc import plural, safe_value_fallback
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
from freqtrade.util import print_rich_table
|
||||
|
||||
@@ -246,88 +244,99 @@ def start_list_markets(args: dict[str, Any], pairs_only: bool = False) -> None:
|
||||
except Exception as e:
|
||||
raise OperationalException(f"Cannot get markets. Reason: {e}") from e
|
||||
|
||||
else:
|
||||
summary_str = (
|
||||
(f"Exchange {exchange.name} has {len(pairs)} ")
|
||||
+ ("active " if active_only else "")
|
||||
+ (plural(len(pairs), "pair" if pairs_only else "market"))
|
||||
+ (
|
||||
f" with {', '.join(base_currencies)} as base "
|
||||
f"{plural(len(base_currencies), 'currency', 'currencies')}"
|
||||
if base_currencies
|
||||
else ""
|
||||
)
|
||||
+ (" and" if base_currencies and quote_currencies else "")
|
||||
+ (
|
||||
f" with {', '.join(quote_currencies)} as quote "
|
||||
f"{plural(len(quote_currencies), 'currency', 'currencies')}"
|
||||
if quote_currencies
|
||||
else ""
|
||||
)
|
||||
tickers = exchange.get_tickers()
|
||||
|
||||
summary_str = (
|
||||
(f"Exchange {exchange.name} has {len(pairs)} ")
|
||||
+ ("active " if active_only else "")
|
||||
+ (plural(len(pairs), "pair" if pairs_only else "market"))
|
||||
+ (
|
||||
f" with {', '.join(base_currencies)} as base "
|
||||
f"{plural(len(base_currencies), 'currency', 'currencies')}"
|
||||
if base_currencies
|
||||
else ""
|
||||
)
|
||||
+ (" and" if base_currencies and quote_currencies else "")
|
||||
+ (
|
||||
f" with {', '.join(quote_currencies)} as quote "
|
||||
f"{plural(len(quote_currencies), 'currency', 'currencies')}"
|
||||
if quote_currencies
|
||||
else ""
|
||||
)
|
||||
)
|
||||
|
||||
headers = [
|
||||
"Id",
|
||||
"Symbol",
|
||||
"Base",
|
||||
"Quote",
|
||||
"Active",
|
||||
"Spot",
|
||||
"Margin",
|
||||
"Future",
|
||||
"Leverage",
|
||||
]
|
||||
headers = [
|
||||
"Id",
|
||||
"Symbol",
|
||||
"Base",
|
||||
"Quote",
|
||||
"Active",
|
||||
"Spot",
|
||||
"Margin",
|
||||
"Future",
|
||||
"Leverage",
|
||||
"Min Stake",
|
||||
]
|
||||
|
||||
tabular_data = [
|
||||
{
|
||||
"Id": v["id"],
|
||||
"Symbol": v["symbol"],
|
||||
"Base": v["base"],
|
||||
"Quote": v["quote"],
|
||||
"Active": market_is_active(v),
|
||||
"Spot": "Spot" if exchange.market_is_spot(v) else "",
|
||||
"Margin": "Margin" if exchange.market_is_margin(v) else "",
|
||||
"Future": "Future" if exchange.market_is_future(v) else "",
|
||||
"Leverage": exchange.get_max_leverage(v["symbol"], 20),
|
||||
}
|
||||
for _, v in pairs.items()
|
||||
]
|
||||
tabular_data = [
|
||||
{
|
||||
"Id": v["id"],
|
||||
"Symbol": v["symbol"],
|
||||
"Base": v["base"],
|
||||
"Quote": v["quote"],
|
||||
"Active": market_is_active(v),
|
||||
"Spot": "Spot" if exchange.market_is_spot(v) else "",
|
||||
"Margin": "Margin" if exchange.market_is_margin(v) else "",
|
||||
"Future": "Future" if exchange.market_is_future(v) else "",
|
||||
"Leverage": exchange.get_max_leverage(v["symbol"], 20),
|
||||
"Min Stake": round(
|
||||
exchange.get_min_pair_stake_amount(
|
||||
v["symbol"],
|
||||
safe_value_fallback(tickers.get(v["symbol"], {}), "last", "ask", 0.0),
|
||||
0.0,
|
||||
)
|
||||
or 0.0,
|
||||
8,
|
||||
),
|
||||
}
|
||||
for _, v in pairs.items()
|
||||
]
|
||||
|
||||
if (
|
||||
args.get("print_one_column", False)
|
||||
or args.get("list_pairs_print_json", False)
|
||||
or args.get("print_csv", False)
|
||||
):
|
||||
# Print summary string in the log in case of machine-readable
|
||||
# regular formats.
|
||||
logger.info(f"{summary_str}.")
|
||||
if (
|
||||
args.get("print_one_column", False)
|
||||
or args.get("list_pairs_print_json", False)
|
||||
or args.get("print_csv", False)
|
||||
):
|
||||
# Print summary string in the log in case of machine-readable
|
||||
# regular formats.
|
||||
logger.info(f"{summary_str}.")
|
||||
else:
|
||||
# Print empty string separating leading logs and output in case of
|
||||
# human-readable formats.
|
||||
print()
|
||||
|
||||
if pairs:
|
||||
if args.get("print_list", False):
|
||||
# print data as a list, with human-readable summary
|
||||
print(f"{summary_str}: {', '.join(pairs.keys())}.")
|
||||
elif args.get("print_one_column", False):
|
||||
print("\n".join(pairs.keys()))
|
||||
elif args.get("list_pairs_print_json", False):
|
||||
import rapidjson
|
||||
|
||||
print(rapidjson.dumps(list(pairs.keys()), default=str))
|
||||
elif args.get("print_csv", False):
|
||||
writer = csv.DictWriter(sys.stdout, fieldnames=headers)
|
||||
writer.writeheader()
|
||||
writer.writerows(tabular_data)
|
||||
else:
|
||||
# Print empty string separating leading logs and output in case of
|
||||
# human-readable formats.
|
||||
print()
|
||||
|
||||
if pairs:
|
||||
if args.get("print_list", False):
|
||||
# print data as a list, with human-readable summary
|
||||
print(f"{summary_str}: {', '.join(pairs.keys())}.")
|
||||
elif args.get("print_one_column", False):
|
||||
print("\n".join(pairs.keys()))
|
||||
elif args.get("list_pairs_print_json", False):
|
||||
import rapidjson
|
||||
|
||||
print(rapidjson.dumps(list(pairs.keys()), default=str))
|
||||
elif args.get("print_csv", False):
|
||||
writer = csv.DictWriter(sys.stdout, fieldnames=headers)
|
||||
writer.writeheader()
|
||||
writer.writerows(tabular_data)
|
||||
else:
|
||||
print_rich_table(tabular_data, headers, summary_str)
|
||||
elif not (
|
||||
args.get("print_one_column", False)
|
||||
or args.get("list_pairs_print_json", False)
|
||||
or args.get("print_csv", False)
|
||||
):
|
||||
print(f"{summary_str}.")
|
||||
print_rich_table(tabular_data, headers, summary_str)
|
||||
elif not (
|
||||
args.get("print_one_column", False)
|
||||
or args.get("list_pairs_print_json", False)
|
||||
or args.get("print_csv", False)
|
||||
):
|
||||
print(f"{summary_str}.")
|
||||
|
||||
|
||||
def start_show_trades(args: dict[str, Any]) -> None:
|
||||
|
||||
@@ -965,10 +965,13 @@ CONF_SCHEMA = {
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
"keras": {
|
||||
"description": "Use Keras for model training.",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
"identifier": {
|
||||
"description": (
|
||||
"A unique ID for the current model. "
|
||||
"Must be changed when modifying features."
|
||||
),
|
||||
"type": "string",
|
||||
"default": "example",
|
||||
},
|
||||
"write_metrics_to_disk": {
|
||||
"description": "Write metrics to disk?",
|
||||
@@ -1000,13 +1003,43 @@ CONF_SCHEMA = {
|
||||
"type": "number",
|
||||
"default": 7,
|
||||
},
|
||||
"identifier": {
|
||||
"live_retrain_hours": {
|
||||
"description": "Frequency of retraining during dry/live runs.",
|
||||
"type": "number",
|
||||
"default": 0,
|
||||
},
|
||||
"expiration_hours": {
|
||||
"description": (
|
||||
"A unique ID for the current model. "
|
||||
"Must be changed when modifying features."
|
||||
"Avoid making predictions if a model is more than `expiration_hours` "
|
||||
"old. Defaults to 0 (no expiration)."
|
||||
),
|
||||
"type": "string",
|
||||
"default": "example",
|
||||
"type": "number",
|
||||
"default": 0,
|
||||
},
|
||||
"save_backtest_models": {
|
||||
"description": "Save models to disk when running backtesting.",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
"fit_live_predictions_candles": {
|
||||
"description": (
|
||||
"Number of historical candles to use for computing target (label) "
|
||||
"statistics from prediction data, instead of from the training dataset."
|
||||
),
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
"data_kitchen_thread_count": {
|
||||
"description": (
|
||||
"Designate the number of threads you want to use for data processing "
|
||||
"(outlier methods, normalization, etc.)."
|
||||
),
|
||||
"type": "integer",
|
||||
},
|
||||
"activate_tensorboard": {
|
||||
"description": "Indicate whether or not to activate tensorboard",
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
},
|
||||
"wait_for_training_iteration_on_reload": {
|
||||
"description": (
|
||||
@@ -1015,6 +1048,20 @@ CONF_SCHEMA = {
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
},
|
||||
"continual_learning": {
|
||||
"description": (
|
||||
"Use the final state of the most recently trained model "
|
||||
"as starting point for the new model, allowing for "
|
||||
"incremental learning."
|
||||
),
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
"keras": {
|
||||
"description": "Use Keras for model training.",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
"feature_parameters": {
|
||||
"description": "The parameters used to engineer the feature set",
|
||||
"type": "object",
|
||||
|
||||
@@ -59,7 +59,7 @@ AVAILABLE_PAIRLISTS = [
|
||||
"VolatilityFilter",
|
||||
]
|
||||
AVAILABLE_DATAHANDLERS = ["json", "jsongz", "feather", "parquet"]
|
||||
BACKTEST_BREAKDOWNS = ["day", "week", "month"]
|
||||
BACKTEST_BREAKDOWNS = ["day", "week", "month", "year"]
|
||||
BACKTEST_CACHE_AGE = ["none", "day", "week", "month"]
|
||||
BACKTEST_CACHE_DEFAULT = "day"
|
||||
DRY_RUN_WALLET = 1000
|
||||
|
||||
@@ -49,7 +49,7 @@ class DataProvider:
|
||||
self._pairlists = pairlists
|
||||
self.__rpc = rpc
|
||||
self.__cached_pairs: dict[PairWithTimeframe, tuple[DataFrame, datetime]] = {}
|
||||
self.__slice_index: int | None = None
|
||||
self.__slice_index: dict[str, int] = {}
|
||||
self.__slice_date: datetime | None = None
|
||||
|
||||
self.__cached_pairs_backtesting: dict[PairWithTimeframe, DataFrame] = {}
|
||||
@@ -69,13 +69,13 @@ class DataProvider:
|
||||
self.producers = self._config.get("external_message_consumer", {}).get("producers", [])
|
||||
self.external_data_enabled = len(self.producers) > 0
|
||||
|
||||
def _set_dataframe_max_index(self, limit_index: int):
|
||||
def _set_dataframe_max_index(self, pair: str, limit_index: int):
|
||||
"""
|
||||
Limit analyzed dataframe to max specified index.
|
||||
Only relevant in backtesting.
|
||||
:param limit_index: dataframe index.
|
||||
"""
|
||||
self.__slice_index = limit_index
|
||||
self.__slice_index[pair] = limit_index
|
||||
|
||||
def _set_dataframe_max_date(self, limit_date: datetime):
|
||||
"""
|
||||
@@ -393,9 +393,10 @@ class DataProvider:
|
||||
df, date = self.__cached_pairs[pair_key]
|
||||
else:
|
||||
df, date = self.__cached_pairs[pair_key]
|
||||
if self.__slice_index is not None:
|
||||
max_index = self.__slice_index
|
||||
if (max_index := self.__slice_index.get(pair)) is not None:
|
||||
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES) : max_index]
|
||||
else:
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
||||
return df, date
|
||||
else:
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
||||
@@ -430,7 +431,7 @@ class DataProvider:
|
||||
# Don't reset backtesting pairs -
|
||||
# otherwise they're reloaded each time during hyperopt due to with analyze_per_epoch
|
||||
# self.__cached_pairs_backtesting = {}
|
||||
self.__slice_index = 0
|
||||
self.__slice_index = {}
|
||||
|
||||
# Exchange functions
|
||||
|
||||
|
||||
@@ -274,12 +274,12 @@ class Binance(Exchange):
|
||||
def dry_run_liquidation_price(
|
||||
self,
|
||||
pair: str,
|
||||
open_rate: float, # Entry price of position
|
||||
open_rate: float,
|
||||
is_short: bool,
|
||||
amount: float,
|
||||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
wallet_balance: float,
|
||||
open_trades: list,
|
||||
) -> float | None:
|
||||
"""
|
||||
@@ -293,8 +293,6 @@ class Binance(Exchange):
|
||||
:param amount: Absolute value of position size incl. leverage (in base currency)
|
||||
:param stake_amount: Stake amount - Collateral in settle currency.
|
||||
:param leverage: Leverage used for this position.
|
||||
:param trading_mode: SPOT, MARGIN, FUTURES, etc.
|
||||
:param margin_mode: Either ISOLATED or CROSS
|
||||
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
||||
Cross-Margin Mode: crossWalletBalance
|
||||
Isolated-Margin Mode: isolatedWalletBalance
|
||||
|
||||
@@ -166,15 +166,16 @@ class Bybit(Exchange):
|
||||
PERPETUAL:
|
||||
bybit:
|
||||
https://www.bybithelp.com/HelpCenterKnowledge/bybitHC_Article?language=en_US&id=000001067
|
||||
https://www.bybit.com/en/help-center/article/Liquidation-Price-Calculation-under-Isolated-Mode-Unified-Trading-Account#b
|
||||
|
||||
Long:
|
||||
Liquidation Price = (
|
||||
Entry Price * (1 - Initial Margin Rate + Maintenance Margin Rate)
|
||||
- Extra Margin Added/ Contract)
|
||||
Entry Price - [(Initial Margin - Maintenance Margin)/Contract Quantity]
|
||||
- (Extra Margin Added/Contract Quantity))
|
||||
Short:
|
||||
Liquidation Price = (
|
||||
Entry Price * (1 + Initial Margin Rate - Maintenance Margin Rate)
|
||||
+ Extra Margin Added/ Contract)
|
||||
Entry Price + [(Initial Margin - Maintenance Margin)/Contract Quantity]
|
||||
+ (Extra Margin Added/Contract Quantity))
|
||||
|
||||
Implementation Note: Extra margin is currently not used.
|
||||
|
||||
@@ -184,8 +185,6 @@ class Bybit(Exchange):
|
||||
:param amount: Absolute value of position size incl. leverage (in base currency)
|
||||
:param stake_amount: Stake amount - Collateral in settle currency.
|
||||
:param leverage: Leverage used for this position.
|
||||
:param trading_mode: SPOT, MARGIN, FUTURES, etc.
|
||||
:param margin_mode: Either ISOLATED or CROSS
|
||||
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
||||
Cross-Margin Mode: crossWalletBalance
|
||||
Isolated-Margin Mode: isolatedWalletBalance
|
||||
@@ -198,13 +197,16 @@ class Bybit(Exchange):
|
||||
if self.trading_mode == TradingMode.FUTURES and self.margin_mode == MarginMode.ISOLATED:
|
||||
if market["inverse"]:
|
||||
raise OperationalException("Freqtrade does not yet support inverse contracts")
|
||||
initial_margin_rate = 1 / leverage
|
||||
position_value = amount * open_rate
|
||||
initial_margin = position_value / leverage
|
||||
maintenance_margin = position_value * mm_ratio
|
||||
margin_diff_per_contract = (initial_margin - maintenance_margin) / amount
|
||||
|
||||
# See docstring - ignores extra margin!
|
||||
if is_short:
|
||||
return open_rate * (1 + initial_margin_rate - mm_ratio)
|
||||
return open_rate + margin_diff_per_contract
|
||||
else:
|
||||
return open_rate * (1 - initial_margin_rate + mm_ratio)
|
||||
return open_rate - margin_diff_per_contract
|
||||
|
||||
else:
|
||||
raise OperationalException(
|
||||
|
||||
@@ -3688,12 +3688,12 @@ class Exchange:
|
||||
def dry_run_liquidation_price(
|
||||
self,
|
||||
pair: str,
|
||||
open_rate: float, # Entry price of position
|
||||
open_rate: float,
|
||||
is_short: bool,
|
||||
amount: float,
|
||||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
wallet_balance: float,
|
||||
open_trades: list,
|
||||
) -> float | None:
|
||||
"""
|
||||
@@ -3714,8 +3714,6 @@ class Exchange:
|
||||
:param amount: Absolute value of position size incl. leverage (in base currency)
|
||||
:param stake_amount: Stake amount - Collateral in settle currency.
|
||||
:param leverage: Leverage used for this position.
|
||||
:param trading_mode: SPOT, MARGIN, FUTURES, etc.
|
||||
:param margin_mode: Either ISOLATED or CROSS
|
||||
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
||||
Cross-Margin Mode: crossWalletBalance
|
||||
Isolated-Margin Mode: isolatedWalletBalance
|
||||
|
||||
@@ -3,12 +3,11 @@ from logging import Formatter
|
||||
from logging.handlers import RotatingFileHandler, SysLogHandler
|
||||
from pathlib import Path
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.loggers.buffering_handler import FTBufferingHandler
|
||||
from freqtrade.loggers.ft_rich_handler import FtRichHandler
|
||||
from freqtrade.loggers.rich_console import get_rich_console
|
||||
from freqtrade.loggers.set_log_levels import set_loggers
|
||||
|
||||
|
||||
@@ -22,7 +21,8 @@ LOGFORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
bufferHandler = FTBufferingHandler(1000)
|
||||
bufferHandler.setFormatter(Formatter(LOGFORMAT))
|
||||
|
||||
error_console = Console(stderr=True, color_system=None)
|
||||
|
||||
error_console = get_rich_console(stderr=True, color_system=None)
|
||||
|
||||
|
||||
def get_existing_handlers(handlertype):
|
||||
|
||||
26
freqtrade/loggers/rich_console.py
Normal file
26
freqtrade/loggers/rich_console.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import sys
|
||||
from shutil import get_terminal_size
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
|
||||
def console_width() -> int | None:
|
||||
"""
|
||||
Get the width of the console
|
||||
"""
|
||||
if any(module in ["pytest", "ipykernel"] for module in sys.modules):
|
||||
return 200
|
||||
|
||||
width, _ = get_terminal_size((1, 24))
|
||||
# Fall back to 200 if terminal size is not available.
|
||||
# This is determined by assuming an insane width of 1char, which is unlikely.
|
||||
w = None if width > 1 else 200
|
||||
return w
|
||||
|
||||
|
||||
def get_rich_console(**kwargs) -> Console:
|
||||
"""
|
||||
Get a rich console with default settings
|
||||
"""
|
||||
kwargs["width"] = kwargs.get("width", console_width())
|
||||
return Console(**kwargs)
|
||||
@@ -1552,7 +1552,9 @@ class Backtesting:
|
||||
row_index += 1
|
||||
indexes[pair] = row_index
|
||||
is_last_row = current_time == end_date
|
||||
self.dataprovider._set_dataframe_max_index(self.required_startup + row_index)
|
||||
self.dataprovider._set_dataframe_max_index(
|
||||
pair, self.required_startup + row_index
|
||||
)
|
||||
trade_dir = self.check_for_trade_entry(row)
|
||||
pair_tradedir_cache[pair] = trade_dir
|
||||
|
||||
|
||||
@@ -212,6 +212,8 @@ def _get_resample_from_period(period: str) -> str:
|
||||
return "1W-MON"
|
||||
if period == "month":
|
||||
return "1ME"
|
||||
if period == "year":
|
||||
return "1Y"
|
||||
raise ValueError(f"Period {period} is not supported.")
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ from rich.progress import (
|
||||
TimeRemainingColumn,
|
||||
)
|
||||
|
||||
from freqtrade.loggers import error_console
|
||||
from freqtrade.util.rich_progress import CustomProgress
|
||||
|
||||
|
||||
@@ -21,6 +20,8 @@ def get_progress_tracker(**kwargs) -> CustomProgress:
|
||||
"""
|
||||
Get progress Bar with custom columns.
|
||||
"""
|
||||
from freqtrade.loggers import error_console
|
||||
|
||||
return CustomProgress(
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
BarColumn(bar_width=None),
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import sys
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, TypeAlias
|
||||
|
||||
from pandas import DataFrame
|
||||
from rich.console import Console
|
||||
from rich.table import Column, Table
|
||||
from rich.text import Text
|
||||
|
||||
from freqtrade.loggers.rich_console import get_rich_console
|
||||
|
||||
|
||||
TextOrString: TypeAlias = str | Text
|
||||
|
||||
@@ -38,11 +38,7 @@ def print_rich_table(
|
||||
row_to_add: list[str | Text] = [r if isinstance(r, Text) else str(r) for r in row]
|
||||
table.add_row(*row_to_add)
|
||||
|
||||
width = None
|
||||
if any(module in ["pytest", "ipykernel"] for module in sys.modules):
|
||||
width = 200
|
||||
|
||||
console = Console(width=width)
|
||||
console = get_rich_console()
|
||||
console.print(table)
|
||||
|
||||
|
||||
@@ -74,9 +70,5 @@ def print_df_rich_table(
|
||||
row = [_format_value(x, floatfmt=".3f") for x in value_list]
|
||||
table.add_row(*row)
|
||||
|
||||
width = None
|
||||
if any(module in ["pytest", "ipykernel"] for module in sys.modules):
|
||||
width = 200
|
||||
|
||||
console = Console(width=width)
|
||||
console = get_rich_console()
|
||||
console.print(table)
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
-r docs/requirements-docs.txt
|
||||
|
||||
coveralls==4.0.1
|
||||
ruff==0.9.9
|
||||
ruff==0.9.10
|
||||
mypy==1.15.0
|
||||
pre-commit==4.1.0
|
||||
pytest==8.3.5
|
||||
@@ -27,6 +27,6 @@ nbconvert==7.16.6
|
||||
# mypy types
|
||||
types-cachetools==5.5.0.20240820
|
||||
types-filelock==3.2.7
|
||||
types-requests==2.32.0.20250301
|
||||
types-requests==2.32.0.20250306
|
||||
types-tabulate==0.9.0.20241207
|
||||
types-python-dateutil==2.9.0.20241206
|
||||
|
||||
@@ -4,7 +4,7 @@ bottleneck==1.4.2
|
||||
numexpr==2.10.2
|
||||
pandas-ta==0.3.14b
|
||||
|
||||
ccxt==4.4.64
|
||||
ccxt==4.4.65
|
||||
cryptography==44.0.2
|
||||
aiohttp==3.9.5
|
||||
SQLAlchemy==2.0.38
|
||||
@@ -55,7 +55,7 @@ pytz==2025.1
|
||||
schedule==1.2.2
|
||||
|
||||
#WS Messages
|
||||
websockets==15.0
|
||||
websockets==15.0.1
|
||||
janus==2.0.0
|
||||
|
||||
ast-comments==1.2.2
|
||||
|
||||
@@ -408,20 +408,20 @@ def test_get_analyzed_dataframe(mocker, default_conf, ohlcv_history):
|
||||
|
||||
# Test backtest mode
|
||||
default_conf["runmode"] = RunMode.BACKTEST
|
||||
dp._set_dataframe_max_index(1)
|
||||
dp._set_dataframe_max_index("XRP/BTC", 1)
|
||||
dataframe, time = dp.get_analyzed_dataframe("XRP/BTC", timeframe)
|
||||
|
||||
assert len(dataframe) == 1
|
||||
|
||||
dp._set_dataframe_max_index(2)
|
||||
dp._set_dataframe_max_index("XRP/BTC", 2)
|
||||
dataframe, time = dp.get_analyzed_dataframe("XRP/BTC", timeframe)
|
||||
assert len(dataframe) == 2
|
||||
|
||||
dp._set_dataframe_max_index(3)
|
||||
dp._set_dataframe_max_index("XRP/BTC", 3)
|
||||
dataframe, time = dp.get_analyzed_dataframe("XRP/BTC", timeframe)
|
||||
assert len(dataframe) == 3
|
||||
|
||||
dp._set_dataframe_max_index(500)
|
||||
dp._set_dataframe_max_index("XRP/BTC", 500)
|
||||
dataframe, time = dp.get_analyzed_dataframe("XRP/BTC", timeframe)
|
||||
assert len(dataframe) == len(ohlcv_history)
|
||||
|
||||
|
||||
@@ -6077,44 +6077,47 @@ def test_get_liquidation_price1(mocker, default_conf):
|
||||
|
||||
@pytest.mark.parametrize("liquidation_buffer", [0.0])
|
||||
@pytest.mark.parametrize(
|
||||
"is_short,trading_mode,exchange_name,margin_mode,leverage,open_rate,amount,expected_liq",
|
||||
"is_short,trading_mode,exchange_name,margin_mode,leverage,open_rate,amount,mramt,expected_liq",
|
||||
[
|
||||
(False, "spot", "binance", "", 5.0, 10.0, 1.0, None),
|
||||
(True, "spot", "binance", "", 5.0, 10.0, 1.0, None),
|
||||
(False, "spot", "gate", "", 5.0, 10.0, 1.0, None),
|
||||
(True, "spot", "gate", "", 5.0, 10.0, 1.0, None),
|
||||
(False, "spot", "okx", "", 5.0, 10.0, 1.0, None),
|
||||
(True, "spot", "okx", "", 5.0, 10.0, 1.0, None),
|
||||
(False, "spot", "binance", "", 5.0, 10.0, 1.0, (0.01, 0.01), None),
|
||||
(True, "spot", "binance", "", 5.0, 10.0, 1.0, (0.01, 0.01), None),
|
||||
(False, "spot", "gate", "", 5.0, 10.0, 1.0, (0.01, 0.01), None),
|
||||
(True, "spot", "gate", "", 5.0, 10.0, 1.0, (0.01, 0.01), None),
|
||||
(False, "spot", "okx", "", 5.0, 10.0, 1.0, (0.01, 0.01), None),
|
||||
(True, "spot", "okx", "", 5.0, 10.0, 1.0, (0.01, 0.01), None),
|
||||
# Binance, short
|
||||
(True, "futures", "binance", "isolated", 5.0, 10.0, 1.0, 11.89108910891089),
|
||||
(True, "futures", "binance", "isolated", 3.0, 10.0, 1.0, 13.211221122079207),
|
||||
(True, "futures", "binance", "isolated", 5.0, 8.0, 1.0, 9.514851485148514),
|
||||
(True, "futures", "binance", "isolated", 5.0, 10.0, 0.6, 11.897689768976898),
|
||||
(True, "futures", "binance", "isolated", 5.0, 10.0, 1.0, (0.01, 0.01), 11.89108910891089),
|
||||
(True, "futures", "binance", "isolated", 3.0, 10.0, 1.0, (0.01, 0.01), 13.211221122079207),
|
||||
(True, "futures", "binance", "isolated", 5.0, 8.0, 1.0, (0.01, 0.01), 9.514851485148514),
|
||||
(True, "futures", "binance", "isolated", 5.0, 10.0, 0.6, (0.01, 0.01), 11.897689768976898),
|
||||
# Binance, long
|
||||
(False, "futures", "binance", "isolated", 5, 10, 1.0, 8.070707070707071),
|
||||
(False, "futures", "binance", "isolated", 5, 8, 1.0, 6.454545454545454),
|
||||
(False, "futures", "binance", "isolated", 3, 10, 1.0, 6.723905723905723),
|
||||
(False, "futures", "binance", "isolated", 5, 10, 0.6, 8.063973063973064),
|
||||
(False, "futures", "binance", "isolated", 5, 10, 1.0, (0.01, 0.01), 8.070707070707071),
|
||||
(False, "futures", "binance", "isolated", 5, 8, 1.0, (0.01, 0.01), 6.454545454545454),
|
||||
(False, "futures", "binance", "isolated", 3, 10, 1.0, (0.01, 0.01), 6.723905723905723),
|
||||
(False, "futures", "binance", "isolated", 5, 10, 0.6, (0.01, 0.01), 8.063973063973064),
|
||||
# Gate/okx, short
|
||||
(True, "futures", "gate", "isolated", 5, 10, 1.0, 11.87413417771621),
|
||||
(True, "futures", "gate", "isolated", 5, 10, 2.0, 11.87413417771621),
|
||||
(True, "futures", "gate", "isolated", 3, 10, 1.0, 13.193482419684678),
|
||||
(True, "futures", "gate", "isolated", 5, 8, 1.0, 9.499307342172967),
|
||||
(True, "futures", "okx", "isolated", 3, 10, 1.0, 13.193482419684678),
|
||||
(True, "futures", "gate", "isolated", 5, 10, 1.0, (0.01, 0.01), 11.87413417771621),
|
||||
(True, "futures", "gate", "isolated", 5, 10, 2.0, (0.01, 0.01), 11.87413417771621),
|
||||
(True, "futures", "gate", "isolated", 3, 10, 1.0, (0.01, 0.01), 13.193482419684678),
|
||||
(True, "futures", "gate", "isolated", 5, 8, 1.0, (0.01, 0.01), 9.499307342172967),
|
||||
(True, "futures", "okx", "isolated", 3, 10, 1.0, (0.01, 0.01), 13.193482419684678),
|
||||
# Gate/okx, long
|
||||
(False, "futures", "gate", "isolated", 5.0, 10.0, 1.0, 8.085708510208207),
|
||||
(False, "futures", "gate", "isolated", 3.0, 10.0, 1.0, 6.738090425173506),
|
||||
(False, "futures", "okx", "isolated", 3.0, 10.0, 1.0, 6.738090425173506),
|
||||
(False, "futures", "gate", "isolated", 5.0, 10.0, 1.0, (0.01, 0.01), 8.085708510208207),
|
||||
(False, "futures", "gate", "isolated", 3.0, 10.0, 1.0, (0.01, 0.01), 6.738090425173506),
|
||||
(False, "futures", "okx", "isolated", 3.0, 10.0, 1.0, (0.01, 0.01), 6.738090425173506),
|
||||
# bybit, long
|
||||
(False, "futures", "bybit", "isolated", 1.0, 10.0, 1.0, 0.1),
|
||||
(False, "futures", "bybit", "isolated", 3.0, 10.0, 1.0, 6.7666666),
|
||||
(False, "futures", "bybit", "isolated", 5.0, 10.0, 1.0, 8.1),
|
||||
(False, "futures", "bybit", "isolated", 10.0, 10.0, 1.0, 9.1),
|
||||
(False, "futures", "bybit", "isolated", 1.0, 10.0, 1.0, (0.01, 0.01), 0.1),
|
||||
(False, "futures", "bybit", "isolated", 3.0, 10.0, 1.0, (0.01, 0.01), 6.7666666),
|
||||
(False, "futures", "bybit", "isolated", 5.0, 10.0, 1.0, (0.01, 0.01), 8.1),
|
||||
(False, "futures", "bybit", "isolated", 10.0, 10.0, 1.0, (0.01, 0.01), 9.1),
|
||||
# From the bybit example - without additional margin
|
||||
(False, "futures", "bybit", "isolated", 50.0, 40000.0, 1.0, (0.005, None), 39400),
|
||||
(False, "futures", "bybit", "isolated", 50.0, 20000.0, 1.0, (0.005, None), 19700),
|
||||
# bybit, short
|
||||
(True, "futures", "bybit", "isolated", 1.0, 10.0, 1.0, 19.9),
|
||||
(True, "futures", "bybit", "isolated", 3.0, 10.0, 1.0, 13.233333),
|
||||
(True, "futures", "bybit", "isolated", 5.0, 10.0, 1.0, 11.9),
|
||||
(True, "futures", "bybit", "isolated", 10.0, 10.0, 1.0, 10.9),
|
||||
(True, "futures", "bybit", "isolated", 1.0, 10.0, 1.0, (0.01, 0.01), 19.9),
|
||||
(True, "futures", "bybit", "isolated", 3.0, 10.0, 1.0, (0.01, 0.01), 13.233333),
|
||||
(True, "futures", "bybit", "isolated", 5.0, 10.0, 1.0, (0.01, 0.01), 11.9),
|
||||
(True, "futures", "bybit", "isolated", 10.0, 10.0, 1.0, (0.01, 0.01), 10.9),
|
||||
],
|
||||
)
|
||||
def test_get_liquidation_price(
|
||||
@@ -6127,6 +6130,7 @@ def test_get_liquidation_price(
|
||||
leverage,
|
||||
open_rate,
|
||||
amount,
|
||||
mramt,
|
||||
expected_liq,
|
||||
liquidation_buffer,
|
||||
):
|
||||
@@ -6190,7 +6194,7 @@ def test_get_liquidation_price(
|
||||
mocker.patch(f"{EXMS}.price_to_precision", lambda s, x, y, **kwargs: y)
|
||||
exchange = get_patched_exchange(mocker, default_conf_usdt, exchange=exchange_name)
|
||||
|
||||
exchange.get_maintenance_ratio_and_amt = MagicMock(return_value=(0.01, 0.01))
|
||||
exchange.get_maintenance_ratio_and_amt = MagicMock(return_value=mramt)
|
||||
exchange.name = exchange_name
|
||||
# default_conf_usdt.update({
|
||||
# "dry_run": False,
|
||||
|
||||
@@ -1543,8 +1543,7 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir)
|
||||
assert len(evaluate_result_multi(results["results"], "5m", 3)) == 0
|
||||
|
||||
# Cached data correctly removed amounts
|
||||
offset = 1 if tres == 0 else 0
|
||||
removed_candles = len(data[pair]) - offset
|
||||
removed_candles = len(data[pair]) - 1
|
||||
assert len(backtesting.dataprovider.get_analyzed_dataframe(pair, "5m")[0]) == removed_candles
|
||||
assert (
|
||||
len(backtesting.dataprovider.get_analyzed_dataframe("NXT/BTC", "5m")[0])
|
||||
@@ -1663,8 +1662,7 @@ def test_backtest_multi_pair_detail(
|
||||
assert len(evaluate_result_multi(results["results"], "5m", 3)) == 0
|
||||
|
||||
# Cached data correctly removed amounts
|
||||
offset = 1 if tres == 0 else 0
|
||||
removed_candles = len(data[pair]) - offset
|
||||
removed_candles = len(data[pair]) - 1
|
||||
assert len(backtesting.dataprovider.get_analyzed_dataframe(pair, "5m")[0]) == removed_candles
|
||||
assert (
|
||||
len(backtesting.dataprovider.get_analyzed_dataframe("NXT/USDT", "5m")[0])
|
||||
@@ -1793,7 +1791,7 @@ def test_backtest_multi_pair_detail_simplified(
|
||||
assert len(evaluate_result_multi(results["results"], "1m", 3)) == 0
|
||||
|
||||
# # Cached data correctly removed amounts
|
||||
offset = 1 if tres == 0 else 0
|
||||
offset = 1
|
||||
removed_candles = len(data[pair]) - offset
|
||||
assert len(backtesting.dataprovider.get_analyzed_dataframe(pair, "1h")[0]) == removed_candles
|
||||
assert (
|
||||
|
||||
Reference in New Issue
Block a user