mirror of
https://github.com/freqtrade/freqtrade.git
synced 2026-01-28 18:00:23 +00:00
Merge branch 'develop' into feat/binance_trades_fast
This commit is contained in:
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ "ubuntu-20.04", "ubuntu-22.04", "ubuntu-24.04" ]
|
||||
os: [ "ubuntu-22.04", "ubuntu-24.04" ]
|
||||
python-version: ["3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
|
||||
@@ -9,7 +9,7 @@ repos:
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: "v1.14.1"
|
||||
rev: "v1.15.0"
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: build_helpers
|
||||
@@ -19,11 +19,11 @@ repos:
|
||||
- types-requests==2.32.0.20241016
|
||||
- types-tabulate==0.9.0.20241207
|
||||
- types-python-dateutil==2.9.0.20241206
|
||||
- SQLAlchemy==2.0.37
|
||||
- SQLAlchemy==2.0.38
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: "5.13.2"
|
||||
rev: "6.0.0"
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort (python)
|
||||
@@ -31,7 +31,7 @@ repos:
|
||||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.9.2'
|
||||
rev: 'v0.9.6'
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
@@ -62,7 +62,7 @@ repos:
|
||||
- id: strip-exif
|
||||
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.3.0
|
||||
rev: v2.4.1
|
||||
hooks:
|
||||
- id: codespell
|
||||
additional_dependencies:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12.7-slim-bookworm as base
|
||||
FROM python:3.12.8-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11.10-slim-bookworm as base
|
||||
FROM python:3.11.11-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
markdown==3.7
|
||||
mkdocs==1.6.1
|
||||
mkdocs-material==9.5.50
|
||||
mkdocs-material==9.6.3
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.14.1
|
||||
pymdown-extensions==10.14.3
|
||||
jinja2==3.1.5
|
||||
mike==2.1.3
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Freqtrade bot"""
|
||||
|
||||
__version__ = "2025.1-dev"
|
||||
__version__ = "2025.2-dev"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
@@ -18,8 +18,7 @@ from freqtrade.enums import CandleType, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
|
||||
from freqtrade.util import dt_now, dt_ts, format_ms_time
|
||||
from freqtrade.util.datetime_helpers import format_ms_time_det
|
||||
from freqtrade.util import dt_now, dt_ts, format_ms_time, format_ms_time_det
|
||||
from freqtrade.util.migrations import migrate_data
|
||||
from freqtrade.util.progress_tracker import CustomProgress, retrieve_progress_tracker
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from freqtrade.exchange.bitmart import Bitmart
|
||||
from freqtrade.exchange.bitpanda import Bitpanda
|
||||
from freqtrade.exchange.bitvavo import Bitvavo
|
||||
from freqtrade.exchange.bybit import Bybit
|
||||
from freqtrade.exchange.coinbasepro import Coinbasepro
|
||||
from freqtrade.exchange.cryptocom import Cryptocom
|
||||
from freqtrade.exchange.exchange_utils import (
|
||||
ROUND_DOWN,
|
||||
|
||||
@@ -33,7 +33,6 @@ class Binance(Exchange):
|
||||
"stop_price_prop": "stopPrice",
|
||||
"stoploss_order_types": {"limit": "stop_loss_limit"},
|
||||
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"trades_pagination": "id",
|
||||
"trades_pagination_arg": "fromId",
|
||||
"trades_has_history": True,
|
||||
@@ -41,6 +40,7 @@ class Binance(Exchange):
|
||||
"ws_enabled": True,
|
||||
}
|
||||
_ft_has_futures: FtHas = {
|
||||
"funding_fee_candle_limit": 1000,
|
||||
"stoploss_order_types": {"limit": "stop", "market": "stop_market"},
|
||||
"order_time_in_force": ["GTC", "FOK", "IOC"],
|
||||
"tickers_have_price": False,
|
||||
@@ -144,9 +144,10 @@ class Binance(Exchange):
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
"""
|
||||
if is_new_pair:
|
||||
x = self.loop.run_until_complete(
|
||||
self._async_get_candle_history(pair, timeframe, candle_type, 0)
|
||||
)
|
||||
with self._loop_lock:
|
||||
x = self.loop.run_until_complete(
|
||||
self._async_get_candle_history(pair, timeframe, candle_type, 0)
|
||||
)
|
||||
if x and x[3] and x[3][0] and x[3][0][0] > since_ms:
|
||||
# Set starting date to first available candle.
|
||||
since_ms = x[3][0][0]
|
||||
@@ -205,16 +206,17 @@ class Binance(Exchange):
|
||||
"""
|
||||
Fastly fetch OHLCV data by leveraging https://data.binance.vision.
|
||||
"""
|
||||
df = self.loop.run_until_complete(
|
||||
download_archive_ohlcv(
|
||||
candle_type=candle_type,
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
since_ms=since_ms,
|
||||
until_ms=until_ms,
|
||||
markets=self.markets,
|
||||
with self._loop_lock:
|
||||
df = self.loop.run_until_complete(
|
||||
download_archive_ohlcv(
|
||||
candle_type=candle_type,
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
since_ms=since_ms,
|
||||
until_ms=until_ms,
|
||||
markets=self.markets,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# download the remaining data from rest API
|
||||
if df.empty:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -31,7 +31,6 @@ class Bybit(Exchange):
|
||||
unified_account = False
|
||||
|
||||
_ft_has: FtHas = {
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"ohlcv_has_history": True,
|
||||
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
|
||||
"ws_enabled": True,
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
"""CoinbasePro exchange subclass"""
|
||||
|
||||
import logging
|
||||
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.exchange_types import FtHas
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Coinbasepro(Exchange):
|
||||
"""
|
||||
CoinbasePro exchange class. Contains adjustments needed for Freqtrade to work
|
||||
with this exchange.
|
||||
|
||||
Please note that this exchange is not included in the list of exchanges
|
||||
officially supported by the Freqtrade development team. So some features
|
||||
may still not work as expected.
|
||||
"""
|
||||
|
||||
_ft_has: FtHas = {
|
||||
"ohlcv_candle_limit": 300,
|
||||
}
|
||||
@@ -12,7 +12,7 @@ from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import floor, isnan
|
||||
from threading import Lock
|
||||
from typing import Any, Literal, TypeGuard
|
||||
from typing import Any, Literal, TypeGuard, TypeVar
|
||||
|
||||
import ccxt
|
||||
import ccxt.pro as ccxt_pro
|
||||
@@ -113,6 +113,8 @@ from freqtrade.util.periodic_cache import PeriodicCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class Exchange:
|
||||
# Parameters to add directly to buy/sell calls (like agreeing to trading agreement)
|
||||
@@ -131,7 +133,6 @@ class Exchange:
|
||||
"stoploss_order_types": {},
|
||||
"order_time_in_force": ["GTC"],
|
||||
"ohlcv_params": {},
|
||||
"ohlcv_candle_limit": 500,
|
||||
"ohlcv_has_history": True, # Some exchanges (Kraken) don't provide history via ohlcv
|
||||
"ohlcv_partial_candle": True,
|
||||
"ohlcv_require_since": False,
|
||||
@@ -276,6 +277,11 @@ class Exchange:
|
||||
|
||||
logger.info(f'Using Exchange "{self.name}"')
|
||||
self.required_candle_call_count = 1
|
||||
# Converts the interval provided in minutes in config to seconds
|
||||
self.markets_refresh_interval: int = (
|
||||
exchange_conf.get("markets_refresh_interval", 60) * 60 * 1000
|
||||
)
|
||||
|
||||
if validate:
|
||||
# Initial markets load
|
||||
self.reload_markets(True, load_leverage_tiers=False)
|
||||
@@ -285,11 +291,6 @@ class Exchange:
|
||||
self._startup_candle_count, config.get("timeframe", "")
|
||||
)
|
||||
|
||||
# Converts the interval provided in minutes in config to seconds
|
||||
self.markets_refresh_interval: int = (
|
||||
exchange_conf.get("markets_refresh_interval", 60) * 60 * 1000
|
||||
)
|
||||
|
||||
if self.trading_mode != TradingMode.SPOT and load_leverage_tiers:
|
||||
self.fill_leverage_tiers()
|
||||
self.additional_exchange_init()
|
||||
@@ -466,7 +467,12 @@ class Exchange:
|
||||
:return: Candle limit as integer
|
||||
"""
|
||||
|
||||
fallback_val = self._ft_has.get("ohlcv_candle_limit")
|
||||
ccxt_val = self.features(
|
||||
"spot" if candle_type == CandleType.SPOT else "futures", "fetchOHLCV", "limit", 500
|
||||
)
|
||||
if not isinstance(ccxt_val, float | int):
|
||||
ccxt_val = 500
|
||||
fallback_val = self._ft_has.get("ohlcv_candle_limit", ccxt_val)
|
||||
if candle_type == CandleType.FUNDING_RATE:
|
||||
fallback_val = self._ft_has.get("funding_fee_candle_limit", fallback_val)
|
||||
return int(
|
||||
@@ -642,7 +648,8 @@ class Exchange:
|
||||
|
||||
def _load_async_markets(self, reload: bool = False) -> dict[str, Any]:
|
||||
try:
|
||||
markets = self.loop.run_until_complete(self._api_reload_markets(reload=reload))
|
||||
with self._loop_lock:
|
||||
markets = self.loop.run_until_complete(self._api_reload_markets(reload=reload))
|
||||
|
||||
if isinstance(markets, Exception):
|
||||
raise markets
|
||||
@@ -887,6 +894,24 @@ class Exchange:
|
||||
return self._ft_has["exchange_has_overrides"][endpoint]
|
||||
return endpoint in self._api_async.has and self._api_async.has[endpoint]
|
||||
|
||||
def features(
|
||||
self, market_type: Literal["spot", "futures"], endpoint, attribute, default: T
|
||||
) -> T:
|
||||
"""
|
||||
Returns the exchange features for the given markettype
|
||||
https://docs.ccxt.com/#/README?id=features
|
||||
attributes are in a nested dict, with spot and swap.linear
|
||||
e.g. spot.fetchOHLCV.limit
|
||||
swap.linear.fetchOHLCV.limit
|
||||
"""
|
||||
feat = (
|
||||
self._api_async.features.get("spot", {})
|
||||
if market_type == "spot"
|
||||
else self._api_async.features.get("swap", {}).get("linear", {})
|
||||
)
|
||||
|
||||
return feat.get(endpoint, {}).get(attribute, default)
|
||||
|
||||
def get_precision_amount(self, pair: str) -> float | None:
|
||||
"""
|
||||
Returns the amount precision of the exchange.
|
||||
@@ -2318,15 +2343,16 @@ class Exchange:
|
||||
:param until_ms: Timestamp in milliseconds to get history up to
|
||||
:return: Dataframe with candle (OHLCV) data
|
||||
"""
|
||||
pair, _, _, data, _ = self.loop.run_until_complete(
|
||||
self._async_get_historic_ohlcv(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
since_ms=since_ms,
|
||||
until_ms=until_ms,
|
||||
candle_type=candle_type,
|
||||
with self._loop_lock:
|
||||
pair, _, _, data, _ = self.loop.run_until_complete(
|
||||
self._async_get_historic_ohlcv(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
since_ms=since_ms,
|
||||
until_ms=until_ms,
|
||||
candle_type=candle_type,
|
||||
)
|
||||
)
|
||||
)
|
||||
logger.debug(f"Downloaded data for {pair} from ccxt with length {len(data)}.")
|
||||
return ohlcv_to_dataframe(data, timeframe, pair, fill_missing=False, drop_incomplete=True)
|
||||
|
||||
|
||||
@@ -9,6 +9,8 @@ import ccxt
|
||||
|
||||
from freqtrade.constants import Config, PairWithTimeframe
|
||||
from freqtrade.enums.candletype import CandleType
|
||||
from freqtrade.exceptions import TemporaryError
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.exchange.exchange import timeframe_to_seconds
|
||||
from freqtrade.exchange.exchange_types import OHLCVResponse
|
||||
from freqtrade.util import dt_ts, format_ms_time, format_ms_time_det
|
||||
@@ -82,14 +84,21 @@ class ExchangeWS:
|
||||
Remove history for a pair/timeframe combination from ccxt cache
|
||||
"""
|
||||
self._ccxt_object.ohlcvs.get(paircomb[0], {}).pop(paircomb[1], None)
|
||||
self.klines_last_refresh.pop(paircomb, None)
|
||||
|
||||
@retrier(retries=3)
|
||||
def ohlcvs(self, pair: str, timeframe: str) -> list[list]:
|
||||
"""
|
||||
Returns a copy of the klines for a pair/timeframe combination
|
||||
Note: this will only contain the data received from the websocket
|
||||
so the data will build up over time.
|
||||
"""
|
||||
return deepcopy(self._ccxt_object.ohlcvs.get(pair, {}).get(timeframe, []))
|
||||
try:
|
||||
return deepcopy(self._ccxt_object.ohlcvs.get(pair, {}).get(timeframe, []))
|
||||
except RuntimeError as e:
|
||||
# Capture runtime errors and retry
|
||||
# TemporaryError does not cause backoff - so we're essentially retrying immediately
|
||||
raise TemporaryError(f"Error deepcopying: {e}") from e
|
||||
|
||||
def cleanup_expired(self) -> None:
|
||||
"""
|
||||
@@ -130,6 +139,15 @@ class ExchangeWS:
|
||||
)
|
||||
)
|
||||
|
||||
async def _unwatch_ohlcv(self, pair: str, timeframe: str, candle_type: CandleType) -> None:
|
||||
try:
|
||||
await self._ccxt_object.un_watch_ohlcv_for_symbols([[pair, timeframe]])
|
||||
except ccxt.NotSupported as e:
|
||||
logger.debug("un_watch_ohlcv_for_symbols not supported: %s", e)
|
||||
pass
|
||||
except Exception:
|
||||
logger.exception("Exception in _unwatch_ohlcv")
|
||||
|
||||
def _continuous_stopped(
|
||||
self, task: asyncio.Task, pair: str, timeframe: str, candle_type: CandleType
|
||||
):
|
||||
@@ -142,6 +160,10 @@ class ExchangeWS:
|
||||
result = str(result1)
|
||||
|
||||
logger.info(f"{pair}, {timeframe}, {candle_type} - Task finished - {result}")
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self._unwatch_ohlcv(pair, timeframe, candle_type), loop=self._loop
|
||||
)
|
||||
|
||||
self._klines_scheduled.discard((pair, timeframe, candle_type))
|
||||
self._pop_history((pair, timeframe, candle_type))
|
||||
|
||||
|
||||
@@ -30,7 +30,6 @@ class Gate(Exchange):
|
||||
unified_account = False
|
||||
|
||||
_ft_has: FtHas = {
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"order_time_in_force": ["GTC", "IOC"],
|
||||
"stoploss_on_exchange": True,
|
||||
"stoploss_order_types": {"limit": "limit"},
|
||||
|
||||
@@ -21,7 +21,6 @@ class Htx(Exchange):
|
||||
"stop_price_param": "stopPrice",
|
||||
"stop_price_prop": "stopPrice",
|
||||
"stoploss_order_types": {"limit": "stop-limit"},
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"l2_limit_range": [5, 10, 20],
|
||||
"l2_limit_range_required": False,
|
||||
"ohlcv_candle_limit_per_timeframe": {
|
||||
|
||||
@@ -22,7 +22,6 @@ class Hyperliquid(Exchange):
|
||||
|
||||
_ft_has: FtHas = {
|
||||
"ohlcv_has_history": False,
|
||||
"ohlcv_candle_limit": 5000,
|
||||
"l2_limit_range": [20],
|
||||
"trades_has_history": False,
|
||||
"tickers_have_bid_ask": False,
|
||||
|
||||
@@ -26,7 +26,6 @@ class Kraken(Exchange):
|
||||
"stop_price_prop": "stopLossPrice",
|
||||
"stoploss_order_types": {"limit": "limit", "market": "market"},
|
||||
"order_time_in_force": ["GTC", "IOC", "PO"],
|
||||
"ohlcv_candle_limit": 720,
|
||||
"ohlcv_has_history": False,
|
||||
"trades_pagination": "id",
|
||||
"trades_pagination_arg": "since",
|
||||
|
||||
@@ -28,7 +28,6 @@ class Kucoin(Exchange):
|
||||
"l2_limit_range": [20, 100],
|
||||
"l2_limit_range_required": False,
|
||||
"order_time_in_force": ["GTC", "FOK", "IOC"],
|
||||
"ohlcv_candle_limit": 1500,
|
||||
}
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
|
||||
|
||||
@@ -6,26 +6,21 @@ from sklearn.utils.validation import has_fit_parameter, validate_data
|
||||
class FreqaiMultiOutputRegressor(MultiOutputRegressor):
|
||||
def fit(self, X, y, sample_weight=None, fit_params=None):
|
||||
"""Fit the model to data, separately for each output variable.
|
||||
Parameters
|
||||
----------
|
||||
X : {array-like, sparse matrix} of shape (n_samples, n_features)
|
||||
:param X: {array-like, sparse matrix} of shape (n_samples, n_features)
|
||||
The input data.
|
||||
y : {array-like, sparse matrix} of shape (n_samples, n_outputs)
|
||||
:param y: {array-like, sparse matrix} of shape (n_samples, n_outputs)
|
||||
Multi-output targets. An indicator matrix turns on multilabel
|
||||
estimation.
|
||||
sample_weight : array-like of shape (n_samples,), default=None
|
||||
:param sample_weight: array-like of shape (n_samples,), default=None
|
||||
Sample weights. If `None`, then samples are equally weighted.
|
||||
Only supported if the underlying regressor supports sample
|
||||
weights.
|
||||
fit_params : A list of dicts for the fit_params
|
||||
|
||||
:param fit_params: A list of dicts for the fit_params
|
||||
Parameters passed to the ``estimator.fit`` method of each step.
|
||||
Each dict may contain same or different values (e.g. different
|
||||
eval_sets or init_models)
|
||||
.. versionadded:: 0.23
|
||||
Returns
|
||||
-------
|
||||
self : object
|
||||
Returns a fitted instance.
|
||||
|
||||
"""
|
||||
|
||||
if not hasattr(self.estimator, "fit"):
|
||||
|
||||
@@ -35,19 +35,20 @@ def update_liquidation_prices(
|
||||
|
||||
open_trades: list[Trade] = Trade.get_open_trades()
|
||||
for t in open_trades:
|
||||
# TODO: This should be done in a batch update
|
||||
t.set_liquidation_price(
|
||||
exchange.get_liquidation_price(
|
||||
pair=t.pair,
|
||||
open_rate=t.open_rate,
|
||||
is_short=t.is_short,
|
||||
amount=t.amount,
|
||||
stake_amount=t.stake_amount,
|
||||
leverage=t.leverage,
|
||||
wallet_balance=total_wallet_stake,
|
||||
open_trades=open_trades,
|
||||
if t.has_open_position:
|
||||
# TODO: This should be done in a batch update
|
||||
t.set_liquidation_price(
|
||||
exchange.get_liquidation_price(
|
||||
pair=t.pair,
|
||||
open_rate=t.open_rate,
|
||||
is_short=t.is_short,
|
||||
amount=t.amount,
|
||||
stake_amount=t.stake_amount,
|
||||
leverage=t.leverage,
|
||||
wallet_balance=total_wallet_stake,
|
||||
open_trades=open_trades,
|
||||
)
|
||||
)
|
||||
)
|
||||
elif trade:
|
||||
trade.set_liquidation_price(
|
||||
exchange.get_liquidation_price(
|
||||
|
||||
@@ -4,7 +4,6 @@ from logging import Handler
|
||||
from rich._null_file import NullFile
|
||||
from rich.console import Console
|
||||
from rich.text import Text
|
||||
from rich.traceback import Traceback
|
||||
|
||||
|
||||
class FtRichHandler(Handler):
|
||||
@@ -38,11 +37,6 @@ class FtRichHandler(Handler):
|
||||
self._console.print(
|
||||
Text() + log_time + gray_sep + name + gray_sep + log_level + gray_sep + msg
|
||||
)
|
||||
tb = None
|
||||
if record.exc_info:
|
||||
exc_type, exc_value, exc_traceback = record.exc_info
|
||||
tb = Traceback.from_exception(exc_type, exc_value, exc_traceback, extra_lines=1)
|
||||
self._console.print(tb)
|
||||
|
||||
except RecursionError:
|
||||
raise
|
||||
|
||||
@@ -7,7 +7,7 @@ This module contains the backtesting logic
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
from numpy import nan
|
||||
@@ -63,7 +63,7 @@ from freqtrade.plugins.protectionmanager import ProtectionManager
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
from freqtrade.util import FtPrecise
|
||||
from freqtrade.util import FtPrecise, dt_now
|
||||
from freqtrade.util.migrations import migrate_data
|
||||
from freqtrade.wallets import Wallets
|
||||
|
||||
@@ -1656,7 +1656,7 @@ class Backtesting:
|
||||
self.progress.init_step(BacktestState.ANALYZE, 0)
|
||||
strategy_name = strat.get_strategy_name()
|
||||
logger.info(f"Running backtesting for Strategy {strategy_name}")
|
||||
backtest_start_time = datetime.now(timezone.utc)
|
||||
backtest_start_time = dt_now()
|
||||
self._set_strategy(strat)
|
||||
|
||||
# need to reprocess data every time to populate signals
|
||||
@@ -1683,7 +1683,7 @@ class Backtesting:
|
||||
start_date=min_date,
|
||||
end_date=max_date,
|
||||
)
|
||||
backtest_end_time = datetime.now(timezone.utc)
|
||||
backtest_end_time = dt_now()
|
||||
results.update(
|
||||
{
|
||||
"run_id": self.run_ids.get(strategy_name, ""),
|
||||
@@ -1710,14 +1710,14 @@ class Backtesting:
|
||||
def _get_min_cached_backtest_date(self):
|
||||
min_backtest_date = None
|
||||
backtest_cache_age = self.config.get("backtest_cache", constants.BACKTEST_CACHE_DEFAULT)
|
||||
if self.timerange.stopts == 0 or self.timerange.stopdt > datetime.now(tz=timezone.utc):
|
||||
if self.timerange.stopts == 0 or self.timerange.stopdt > dt_now():
|
||||
logger.warning("Backtest result caching disabled due to use of open-ended timerange.")
|
||||
elif backtest_cache_age == "day":
|
||||
min_backtest_date = datetime.now(tz=timezone.utc) - timedelta(days=1)
|
||||
min_backtest_date = dt_now() - timedelta(days=1)
|
||||
elif backtest_cache_age == "week":
|
||||
min_backtest_date = datetime.now(tz=timezone.utc) - timedelta(weeks=1)
|
||||
min_backtest_date = dt_now() - timedelta(weeks=1)
|
||||
elif backtest_cache_age == "month":
|
||||
min_backtest_date = datetime.now(tz=timezone.utc) - timedelta(weeks=4)
|
||||
min_backtest_date = dt_now() - timedelta(weeks=4)
|
||||
return min_backtest_date
|
||||
|
||||
def load_prior_backtest(self):
|
||||
|
||||
@@ -6,7 +6,6 @@ This module contains the hyperopt logic
|
||||
|
||||
import logging
|
||||
import random
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from math import ceil
|
||||
from multiprocessing import Manager
|
||||
@@ -15,7 +14,6 @@ from typing import Any
|
||||
|
||||
import rapidjson
|
||||
from joblib import Parallel, cpu_count, delayed, wrap_non_picklable_objects
|
||||
from joblib.externals import cloudpickle
|
||||
|
||||
from freqtrade.constants import FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config
|
||||
from freqtrade.enums import HyperoptState
|
||||
@@ -110,17 +108,6 @@ class Hyperopt:
|
||||
logger.info(f"Removing `{p}`.")
|
||||
p.unlink()
|
||||
|
||||
def hyperopt_pickle_magic(self, bases) -> None:
|
||||
"""
|
||||
Hyperopt magic to allow strategy inheritance across files.
|
||||
For this to properly work, we need to register the module of the imported class
|
||||
to pickle as value.
|
||||
"""
|
||||
for modules in bases:
|
||||
if modules.__name__ != "IStrategy":
|
||||
cloudpickle.register_pickle_by_value(sys.modules[modules.__module__])
|
||||
self.hyperopt_pickle_magic(modules.__bases__)
|
||||
|
||||
def _save_result(self, epoch: dict) -> None:
|
||||
"""
|
||||
Save hyperopt results to file
|
||||
|
||||
@@ -114,7 +114,7 @@ class HyperOptimizer:
|
||||
def get_strategy_name(self) -> str:
|
||||
return self.backtesting.strategy.get_strategy_name()
|
||||
|
||||
def hyperopt_pickle_magic(self, bases) -> None:
|
||||
def hyperopt_pickle_magic(self, bases: tuple[type, ...]) -> None:
|
||||
"""
|
||||
Hyperopt magic to allow strategy inheritance across files.
|
||||
For this to properly work, we need to register the module of the imported class
|
||||
@@ -122,7 +122,8 @@ class HyperOptimizer:
|
||||
"""
|
||||
for modules in bases:
|
||||
if modules.__name__ != "IStrategy":
|
||||
cloudpickle.register_pickle_by_value(sys.modules[modules.__module__])
|
||||
if mod := sys.modules.get(modules.__module__):
|
||||
cloudpickle.register_pickle_by_value(mod)
|
||||
self.hyperopt_pickle_magic(modules.__bases__)
|
||||
|
||||
def _get_params_dict(
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import Any
|
||||
import freqtrade.exchange as exchanges
|
||||
from freqtrade.constants import Config, ExchangeConfig
|
||||
from freqtrade.exchange import MAP_EXCHANGE_CHILDCLASS, Exchange
|
||||
from freqtrade.resolvers import IResolver
|
||||
from freqtrade.resolvers.iresolver import IResolver
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -16,7 +16,7 @@ from freqtrade.configuration.config_validation import validate_migrated_strategy
|
||||
from freqtrade.constants import REQUIRED_ORDERTIF, REQUIRED_ORDERTYPES, USERPATH_STRATEGIES, Config
|
||||
from freqtrade.enums import TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.resolvers import IResolver
|
||||
from freqtrade.resolvers.iresolver import IResolver
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
|
||||
|
||||
|
||||
@@ -99,16 +99,18 @@ def __run_backtest_bg(btconfig: Config):
|
||||
ApiBG.bt["data"], ApiBG.bt["bt"].all_results, min_date=min_date, max_date=max_date
|
||||
)
|
||||
|
||||
if btconfig.get("export", "none") == "trades":
|
||||
combined_res = combined_dataframes_with_rel_mean(ApiBG.bt["data"], min_date, max_date)
|
||||
fn = store_backtest_results(
|
||||
btconfig,
|
||||
ApiBG.bt["bt"].results,
|
||||
datetime.now().strftime("%Y-%m-%d_%H-%M-%S"),
|
||||
market_change_data=combined_res,
|
||||
)
|
||||
ApiBG.bt["bt"].results["metadata"][strategy_name]["filename"] = str(fn.stem)
|
||||
ApiBG.bt["bt"].results["metadata"][strategy_name]["strategy"] = strategy_name
|
||||
if btconfig.get("export", "none") == "trades":
|
||||
combined_res = combined_dataframes_with_rel_mean(
|
||||
ApiBG.bt["data"], min_date, max_date
|
||||
)
|
||||
fn = store_backtest_results(
|
||||
btconfig,
|
||||
ApiBG.bt["bt"].results,
|
||||
datetime.now().strftime("%Y-%m-%d_%H-%M-%S"),
|
||||
market_change_data=combined_res,
|
||||
)
|
||||
ApiBG.bt["bt"].results["metadata"][strategy_name]["filename"] = str(fn.stem)
|
||||
ApiBG.bt["bt"].results["metadata"][strategy_name]["strategy"] = strategy_name
|
||||
|
||||
logger.info("Backtest finished.")
|
||||
|
||||
|
||||
77
freqtrade/rpc/api_server/api_pair_history.py
Normal file
77
freqtrade/rpc/api_server/api_pair_history.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from freqtrade.configuration import validate_config_consistency
|
||||
from freqtrade.rpc.api_server.api_pairlists import handleExchangePayload
|
||||
from freqtrade.rpc.api_server.api_schemas import PairHistory, PairHistoryRequest
|
||||
from freqtrade.rpc.api_server.deps import get_config, get_exchange
|
||||
from freqtrade.rpc.rpc import RPC
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/pair_history", response_model=PairHistory, tags=["candle data"])
|
||||
def pair_history(
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
timerange: str,
|
||||
strategy: str,
|
||||
freqaimodel: str | None = None,
|
||||
config=Depends(get_config),
|
||||
exchange=Depends(get_exchange),
|
||||
):
|
||||
# The initial call to this endpoint can be slow, as it may need to initialize
|
||||
# the exchange class.
|
||||
config_loc = deepcopy(config)
|
||||
config_loc.update(
|
||||
{
|
||||
"timeframe": timeframe,
|
||||
"strategy": strategy,
|
||||
"timerange": timerange,
|
||||
"freqaimodel": freqaimodel if freqaimodel else config_loc.get("freqaimodel"),
|
||||
}
|
||||
)
|
||||
validate_config_consistency(config_loc)
|
||||
try:
|
||||
return RPC._rpc_analysed_history_full(config_loc, pair, timeframe, exchange, None, False)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=502, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/pair_history", response_model=PairHistory, tags=["candle data"])
|
||||
def pair_history_filtered(payload: PairHistoryRequest, config=Depends(get_config)):
|
||||
# The initial call to this endpoint can be slow, as it may need to initialize
|
||||
# the exchange class.
|
||||
config_loc = deepcopy(config)
|
||||
config_loc.update(
|
||||
{
|
||||
"timeframe": payload.timeframe,
|
||||
"strategy": payload.strategy,
|
||||
"timerange": payload.timerange,
|
||||
"freqaimodel": (
|
||||
payload.freqaimodel if payload.freqaimodel else config_loc.get("freqaimodel")
|
||||
),
|
||||
}
|
||||
)
|
||||
handleExchangePayload(payload, config_loc)
|
||||
exchange = get_exchange(config_loc)
|
||||
|
||||
validate_config_consistency(config_loc)
|
||||
|
||||
try:
|
||||
return RPC._rpc_analysed_history_full(
|
||||
config_loc,
|
||||
payload.pair,
|
||||
payload.timeframe,
|
||||
exchange,
|
||||
payload.columns,
|
||||
payload.live_mode,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception("Error in pair_history_filtered")
|
||||
raise HTTPException(status_code=502, detail=str(e))
|
||||
@@ -524,10 +524,11 @@ class PairCandlesRequest(BaseModel):
|
||||
columns: list[str] | None = None
|
||||
|
||||
|
||||
class PairHistoryRequest(PairCandlesRequest):
|
||||
class PairHistoryRequest(PairCandlesRequest, ExchangeModePayloadMixin):
|
||||
timerange: str
|
||||
strategy: str
|
||||
strategy: str | None = None
|
||||
freqaimodel: str | None = None
|
||||
live_mode: bool = False
|
||||
|
||||
|
||||
class PairHistory(BaseModel):
|
||||
@@ -606,6 +607,24 @@ class BacktestMarketChange(BaseModel):
|
||||
data: list[list[Any]]
|
||||
|
||||
|
||||
class MarketRequest(ExchangeModePayloadMixin, BaseModel):
|
||||
base: str | None = None
|
||||
quote: str | None = None
|
||||
|
||||
|
||||
class MarketModel(BaseModel):
|
||||
symbol: str
|
||||
base: str
|
||||
quote: str
|
||||
spot: bool
|
||||
swap: bool
|
||||
|
||||
|
||||
class MarketResponse(BaseModel):
|
||||
markets: dict[str, MarketModel]
|
||||
exchange_id: str
|
||||
|
||||
|
||||
class SysInfo(BaseModel):
|
||||
cpu_pct: list[float]
|
||||
ram_pct: float
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from fastapi.exceptions import HTTPException
|
||||
|
||||
from freqtrade import __version__
|
||||
from freqtrade.data.history import get_datahandler
|
||||
from freqtrade.enums import CandleType, State, TradingMode
|
||||
from freqtrade.enums import CandleType, RunMode, State, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.rpc import RPC
|
||||
from freqtrade.rpc.api_server.api_pairlists import handleExchangePayload
|
||||
from freqtrade.rpc.api_server.api_schemas import (
|
||||
AvailablePairs,
|
||||
Balances,
|
||||
@@ -30,11 +32,12 @@ from freqtrade.rpc.api_server.api_schemas import (
|
||||
Locks,
|
||||
LocksPayload,
|
||||
Logs,
|
||||
MarketRequest,
|
||||
MarketResponse,
|
||||
MixTag,
|
||||
OpenTradeSchema,
|
||||
PairCandlesRequest,
|
||||
PairHistory,
|
||||
PairHistoryRequest,
|
||||
PerformanceEntry,
|
||||
Ping,
|
||||
PlotConfig,
|
||||
@@ -84,7 +87,8 @@ logger = logging.getLogger(__name__)
|
||||
# 2.35: pair_candles and pair_history endpoints as Post variant
|
||||
# 2.40: Add hyperopt-loss endpoint
|
||||
# 2.41: Add download-data endpoint
|
||||
API_VERSION = 2.41
|
||||
# 2.42: Add /pair_history endpoint with live data
|
||||
API_VERSION = 2.42
|
||||
|
||||
# Public API, requires no auth.
|
||||
router_public = APIRouter()
|
||||
@@ -342,58 +346,6 @@ def pair_candles_filtered(payload: PairCandlesRequest, rpc: RPC = Depends(get_rp
|
||||
)
|
||||
|
||||
|
||||
@router.get("/pair_history", response_model=PairHistory, tags=["candle data"])
|
||||
def pair_history(
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
timerange: str,
|
||||
strategy: str,
|
||||
freqaimodel: str | None = None,
|
||||
config=Depends(get_config),
|
||||
exchange=Depends(get_exchange),
|
||||
):
|
||||
# The initial call to this endpoint can be slow, as it may need to initialize
|
||||
# the exchange class.
|
||||
config = deepcopy(config)
|
||||
config.update(
|
||||
{
|
||||
"timeframe": timeframe,
|
||||
"strategy": strategy,
|
||||
"timerange": timerange,
|
||||
"freqaimodel": freqaimodel if freqaimodel else config.get("freqaimodel"),
|
||||
}
|
||||
)
|
||||
try:
|
||||
return RPC._rpc_analysed_history_full(config, pair, timeframe, exchange, None)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=502, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/pair_history", response_model=PairHistory, tags=["candle data"])
|
||||
def pair_history_filtered(
|
||||
payload: PairHistoryRequest, config=Depends(get_config), exchange=Depends(get_exchange)
|
||||
):
|
||||
# The initial call to this endpoint can be slow, as it may need to initialize
|
||||
# the exchange class.
|
||||
config = deepcopy(config)
|
||||
config.update(
|
||||
{
|
||||
"timeframe": payload.timeframe,
|
||||
"strategy": payload.strategy,
|
||||
"timerange": payload.timerange,
|
||||
"freqaimodel": (
|
||||
payload.freqaimodel if payload.freqaimodel else config.get("freqaimodel")
|
||||
),
|
||||
}
|
||||
)
|
||||
try:
|
||||
return RPC._rpc_analysed_history_full(
|
||||
config, payload.pair, payload.timeframe, exchange, payload.columns
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=502, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/plot_config", response_model=PlotConfig, tags=["candle data"])
|
||||
def plot_config(
|
||||
strategy: str | None = None,
|
||||
@@ -525,6 +477,29 @@ def list_available_pairs(
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/markets", response_model=MarketResponse, tags=["candle data", "webserver"])
|
||||
def markets(
|
||||
query: Annotated[MarketRequest, Query()],
|
||||
config=Depends(get_config),
|
||||
rpc: RPC | None = Depends(get_rpc_optional),
|
||||
):
|
||||
if not rpc or config["runmode"] == RunMode.WEBSERVER:
|
||||
# webserver mode
|
||||
config_loc = deepcopy(config)
|
||||
handleExchangePayload(query, config_loc)
|
||||
exchange = get_exchange(config_loc)
|
||||
else:
|
||||
exchange = rpc._freqtrade.exchange
|
||||
|
||||
return {
|
||||
"markets": exchange.get_markets(
|
||||
base_currencies=[query.base] if query.base else None,
|
||||
quote_currencies=[query.quote] if query.quote else None,
|
||||
),
|
||||
"exchange_id": exchange.id,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/sysinfo", response_model=SysInfo, tags=["info"])
|
||||
def sysinfo():
|
||||
return RPC._rpc_sysinfo()
|
||||
|
||||
@@ -120,6 +120,7 @@ class ApiServer(RPCHandler):
|
||||
from freqtrade.rpc.api_server.api_background_tasks import router as api_bg_tasks
|
||||
from freqtrade.rpc.api_server.api_backtest import router as api_backtest
|
||||
from freqtrade.rpc.api_server.api_download_data import router as api_download_data
|
||||
from freqtrade.rpc.api_server.api_pair_history import router as api_pair_history
|
||||
from freqtrade.rpc.api_server.api_pairlists import router as api_pairlists
|
||||
from freqtrade.rpc.api_server.api_v1 import router as api_v1
|
||||
from freqtrade.rpc.api_server.api_v1 import router_public as api_v1_public
|
||||
@@ -145,6 +146,11 @@ class ApiServer(RPCHandler):
|
||||
prefix="/api/v1",
|
||||
dependencies=[Depends(http_basic_or_jwt_token), Depends(is_webserver_mode)],
|
||||
)
|
||||
app.include_router(
|
||||
api_pair_history,
|
||||
prefix="/api/v1",
|
||||
dependencies=[Depends(http_basic_or_jwt_token), Depends(is_webserver_mode)],
|
||||
)
|
||||
app.include_router(
|
||||
api_pairlists,
|
||||
prefix="/api/v1",
|
||||
|
||||
@@ -31,7 +31,7 @@ from freqtrade.enums import (
|
||||
TradingMode,
|
||||
)
|
||||
from freqtrade.exceptions import ExchangeError, PricingError
|
||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_msecs
|
||||
from freqtrade.exchange import Exchange, timeframe_to_minutes, timeframe_to_msecs
|
||||
from freqtrade.exchange.exchange_utils import price_to_precision
|
||||
from freqtrade.loggers import bufferHandler
|
||||
from freqtrade.persistence import KeyStoreKeys, KeyValueStore, PairLocks, Trade
|
||||
@@ -42,12 +42,13 @@ from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||
from freqtrade.util import (
|
||||
decimals_per_coin,
|
||||
dt_from_ts,
|
||||
dt_humanize_delta,
|
||||
dt_now,
|
||||
dt_ts,
|
||||
dt_ts_def,
|
||||
format_date,
|
||||
shorten_date,
|
||||
)
|
||||
from freqtrade.util.datetime_helpers import dt_humanize_delta
|
||||
from freqtrade.wallets import PositionWallet, Wallet
|
||||
|
||||
|
||||
@@ -1436,7 +1437,12 @@ class RPC:
|
||||
|
||||
@staticmethod
|
||||
def _rpc_analysed_history_full(
|
||||
config: Config, pair: str, timeframe: str, exchange, selected_cols: list[str] | None
|
||||
config: Config,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
exchange: Exchange,
|
||||
selected_cols: list[str] | None,
|
||||
live: bool,
|
||||
) -> dict[str, Any]:
|
||||
timerange_parsed = TimeRange.parse_timerange(config.get("timerange"))
|
||||
|
||||
@@ -1444,31 +1450,53 @@ class RPC:
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.resolvers.strategy_resolver import StrategyResolver
|
||||
|
||||
strategy = StrategyResolver.load_strategy(config)
|
||||
startup_candles = strategy.startup_candle_count
|
||||
strategy_name = ""
|
||||
startup_candles = 0
|
||||
if config.get("strategy"):
|
||||
strategy = StrategyResolver.load_strategy(config)
|
||||
startup_candles = strategy.startup_candle_count
|
||||
strategy_name = strategy.get_strategy_name()
|
||||
|
||||
_data = load_data(
|
||||
datadir=config["datadir"],
|
||||
pairs=[pair],
|
||||
timeframe=timeframe,
|
||||
timerange=timerange_parsed,
|
||||
data_format=config["dataformat_ohlcv"],
|
||||
candle_type=config.get("candle_type_def", CandleType.SPOT),
|
||||
startup_candles=startup_candles,
|
||||
)
|
||||
if pair not in _data:
|
||||
raise RPCException(
|
||||
f"No data for {pair}, {timeframe} in {config.get('timerange')} found."
|
||||
if live:
|
||||
data = exchange.get_historic_ohlcv(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
since_ms=timerange_parsed.startts * 1000
|
||||
if timerange_parsed.startts
|
||||
else dt_ts(dt_now() - timedelta(days=30)),
|
||||
is_new_pair=True, # history is never available - so always treat as new pair
|
||||
candle_type=config.get("candle_type_def", CandleType.SPOT),
|
||||
until_ms=timerange_parsed.stopts,
|
||||
)
|
||||
else:
|
||||
_data = load_data(
|
||||
datadir=config["datadir"],
|
||||
pairs=[pair],
|
||||
timeframe=timeframe,
|
||||
timerange=timerange_parsed,
|
||||
data_format=config["dataformat_ohlcv"],
|
||||
candle_type=config.get("candle_type_def", CandleType.SPOT),
|
||||
startup_candles=startup_candles,
|
||||
)
|
||||
if pair not in _data:
|
||||
raise RPCException(
|
||||
f"No data for {pair}, {timeframe} in {config.get('timerange')} found."
|
||||
)
|
||||
data = _data[pair]
|
||||
|
||||
strategy.dp = DataProvider(config, exchange=exchange, pairlists=None)
|
||||
strategy.ft_bot_start()
|
||||
if config.get("strategy"):
|
||||
strategy.dp = DataProvider(config, exchange=exchange, pairlists=None)
|
||||
strategy.ft_bot_start()
|
||||
|
||||
df_analyzed = strategy.analyze_ticker(_data[pair], {"pair": pair})
|
||||
df_analyzed = trim_dataframe(df_analyzed, timerange_parsed, startup_candles=startup_candles)
|
||||
df_analyzed = strategy.analyze_ticker(data, {"pair": pair})
|
||||
df_analyzed = trim_dataframe(
|
||||
df_analyzed, timerange_parsed, startup_candles=startup_candles
|
||||
)
|
||||
else:
|
||||
df_analyzed = data
|
||||
|
||||
return RPC._convert_dataframe_to_dict(
|
||||
strategy.get_strategy_name(),
|
||||
strategy_name,
|
||||
pair,
|
||||
timeframe,
|
||||
df_analyzed.copy(),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from freqtrade_client.ft_rest_client import FtRestClient
|
||||
|
||||
|
||||
__version__ = "2025.1-dev"
|
||||
__version__ = "2025.2-dev"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
@@ -23,10 +23,18 @@ PostDataT = dict[str, Any] | list[dict[str, Any]] | None
|
||||
|
||||
class FtRestClient:
|
||||
def __init__(
|
||||
self, serverurl, username=None, password=None, *, pool_connections=10, pool_maxsize=10
|
||||
self,
|
||||
serverurl,
|
||||
username=None,
|
||||
password=None,
|
||||
*,
|
||||
pool_connections=10,
|
||||
pool_maxsize=10,
|
||||
timeout=10,
|
||||
):
|
||||
self._serverurl = serverurl
|
||||
self._session = requests.Session()
|
||||
self._timeout = timeout
|
||||
|
||||
# allow configuration of pool
|
||||
adapter = HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize)
|
||||
@@ -50,7 +58,9 @@ class FtRestClient:
|
||||
url = urlunparse((schema, netloc, path, par, query, fragment))
|
||||
|
||||
try:
|
||||
resp = self._session.request(method, url, headers=hd, data=json.dumps(data))
|
||||
resp = self._session.request(
|
||||
method, url, headers=hd, timeout=self._timeout, data=json.dumps(data)
|
||||
)
|
||||
# return resp.text
|
||||
return resp.json()
|
||||
except RequestConnectionError:
|
||||
|
||||
@@ -7,22 +7,22 @@
|
||||
-r docs/requirements-docs.txt
|
||||
|
||||
coveralls==4.0.1
|
||||
ruff==0.9.3
|
||||
mypy==1.14.1
|
||||
ruff==0.9.5
|
||||
mypy==1.15.0
|
||||
pre-commit==4.1.0
|
||||
pytest==8.3.4
|
||||
pytest-asyncio==0.25.2
|
||||
pytest-asyncio==0.25.3
|
||||
pytest-cov==6.0.0
|
||||
pytest-mock==3.14.0
|
||||
pytest-random-order==1.1.1
|
||||
pytest-timeout==2.3.1
|
||||
pytest-xdist==3.6.1
|
||||
isort==5.13.2
|
||||
isort==6.0.0
|
||||
# For datetime mocking
|
||||
time-machine==2.16.0
|
||||
|
||||
# Convert jupyter notebooks to markdown documents
|
||||
nbconvert==7.16.5
|
||||
nbconvert==7.16.6
|
||||
|
||||
# mypy types
|
||||
types-cachetools==5.5.0.20240820
|
||||
|
||||
@@ -3,9 +3,11 @@
|
||||
|
||||
# Required for freqai-rl
|
||||
torch==2.2.2; sys_platform == 'darwin' and platform_machine == 'x86_64'
|
||||
torch==2.5.1; sys_platform != 'darwin' or platform_machine != 'x86_64'
|
||||
torch==2.6.0; sys_platform != 'darwin' or platform_machine != 'x86_64'
|
||||
gymnasium==0.29.1
|
||||
stable_baselines3==2.4.1
|
||||
# SB3 >=2.5.0 depends on torch 2.3.0 - which implies it dropped support x86 macos
|
||||
stable_baselines3==2.4.1; sys_platform == 'darwin' and platform_machine == 'x86_64'
|
||||
stable_baselines3==2.5.0; sys_platform != 'darwin' or platform_machine != 'x86_64'
|
||||
sb3_contrib>=2.2.1
|
||||
# Progress bar for stable-baselines3 and sb3-contrib
|
||||
tqdm==4.67.1
|
||||
|
||||
@@ -7,6 +7,6 @@ scikit-learn==1.6.1
|
||||
joblib==1.4.2
|
||||
catboost==1.2.7; 'arm' not in platform_machine
|
||||
lightgbm==4.5.0
|
||||
xgboost==2.1.3
|
||||
xgboost==2.1.4
|
||||
tensorboard==2.18.0
|
||||
datasieve==0.1.7
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Include all requirements to run the bot.
|
||||
-r requirements.txt
|
||||
|
||||
plotly==5.24.1
|
||||
plotly==6.0.0
|
||||
|
||||
@@ -4,11 +4,11 @@ bottleneck==1.4.2
|
||||
numexpr==2.10.2
|
||||
pandas-ta==0.3.14b
|
||||
|
||||
ccxt==4.4.50
|
||||
ccxt==4.4.58
|
||||
cryptography==42.0.8; platform_machine == 'armv7l'
|
||||
cryptography==44.0.0; platform_machine != 'armv7l'
|
||||
aiohttp==3.10.11
|
||||
SQLAlchemy==2.0.37
|
||||
cryptography==44.0.1; platform_machine != 'armv7l'
|
||||
aiohttp==3.9.5
|
||||
SQLAlchemy==2.0.38
|
||||
python-telegram-bot==21.10
|
||||
# can't be hard-pinned due to telegram-bot pinning httpx with ~
|
||||
httpx>=0.24.1
|
||||
@@ -17,7 +17,7 @@ cachetools==5.5.1
|
||||
requests==2.32.3
|
||||
urllib3==2.3.0
|
||||
jsonschema==4.23.0
|
||||
TA-Lib==0.4.34
|
||||
TA-Lib==0.4.38
|
||||
technical==1.5.0
|
||||
tabulate==0.9.0
|
||||
pycoingecko==3.2.0
|
||||
@@ -38,7 +38,7 @@ orjson==3.10.15
|
||||
sdnotify==0.3.2
|
||||
|
||||
# API Server
|
||||
fastapi==0.115.7
|
||||
fastapi==0.115.8
|
||||
pydantic==2.10.6
|
||||
uvicorn==0.34.0
|
||||
pyjwt==2.10.1
|
||||
@@ -50,7 +50,7 @@ questionary==2.1.0
|
||||
prompt-toolkit==3.0.50
|
||||
# Extensions to datetime library
|
||||
python-dateutil==2.9.0.post0
|
||||
pytz==2024.2
|
||||
pytz==2025.1
|
||||
|
||||
#Futures
|
||||
schedule==1.2.2
|
||||
|
||||
@@ -1779,15 +1779,6 @@ def limit_buy_order_open():
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def limit_buy_order(limit_buy_order_open):
|
||||
order = deepcopy(limit_buy_order_open)
|
||||
order["status"] = "closed"
|
||||
order["filled"] = order["amount"]
|
||||
order["remaining"] = 0.0
|
||||
return order
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def limit_buy_order_old():
|
||||
return {
|
||||
|
||||
@@ -645,7 +645,7 @@ def test_reload_markets(default_conf, mocker, caplog, time_machine):
|
||||
# Tried once, failed
|
||||
|
||||
lam_spy.reset_mock()
|
||||
# When forceing (bot startup), it should retry 3 times.
|
||||
# When forcing (bot startup), it should retry 3 times.
|
||||
exchange.reload_markets(force=True)
|
||||
assert lam_spy.call_count == 4
|
||||
assert exchange.markets == updated_markets
|
||||
@@ -4439,7 +4439,7 @@ def test_ohlcv_candle_limit(default_conf, mocker, exchange_name):
|
||||
pytest.skip("Tested separately for okx")
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange=exchange_name)
|
||||
timeframes = ("1m", "5m", "1h")
|
||||
expected = exchange._ft_has["ohlcv_candle_limit"]
|
||||
expected = exchange._ft_has.get("ohlcv_candle_limit", 500)
|
||||
for timeframe in timeframes:
|
||||
# if 'ohlcv_candle_limit_per_timeframe' in exchange._ft_has:
|
||||
# expected = exchange._ft_has['ohlcv_candle_limit_per_timeframe'][timeframe]
|
||||
@@ -6262,3 +6262,26 @@ def test_price_to_precision_with_default_conf(default_conf, mocker):
|
||||
prec_price = patched_ex.price_to_precision("XRP/USDT", 1.0000000101)
|
||||
assert prec_price == 1.00000001
|
||||
assert prec_price == 1.00000001
|
||||
|
||||
|
||||
def test_exchange_features(default_conf, mocker):
|
||||
conf = copy.deepcopy(default_conf)
|
||||
exchange = get_patched_exchange(mocker, conf)
|
||||
exchange._api_async.features = {
|
||||
"spot": {
|
||||
"fetchOHLCV": {
|
||||
"limit": 995,
|
||||
}
|
||||
},
|
||||
"swap": {
|
||||
"linear": {
|
||||
"fetchOHLCV": {
|
||||
"limit": 997,
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
assert exchange.features("spot", "fetchOHLCV", "limit", 500) == 995
|
||||
assert exchange.features("futures", "fetchOHLCV", "limit", 500) == 997
|
||||
# Fall back to default
|
||||
assert exchange.features("futures", "fetchOHLCV_else", "limit", 601) == 601
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
from datetime import timedelta
|
||||
from time import sleep
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from ccxt import NotSupported
|
||||
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.exchange.exchange_ws import ExchangeWS
|
||||
from ft_client.test_client.test_rest_client import log_has_re
|
||||
@@ -61,15 +64,18 @@ def patch_eventloop_threading(exchange):
|
||||
pass
|
||||
|
||||
|
||||
async def test_exchangews_ohlcv(mocker, time_machine):
|
||||
async def test_exchangews_ohlcv(mocker, time_machine, caplog):
|
||||
config = MagicMock()
|
||||
ccxt_object = MagicMock()
|
||||
caplog.set_level(logging.DEBUG)
|
||||
|
||||
async def sleeper(*args, **kwargs):
|
||||
# pass
|
||||
await asyncio.sleep(0.12)
|
||||
return MagicMock()
|
||||
|
||||
ccxt_object.un_watch_ohlcv_for_symbols = AsyncMock(side_effect=NotSupported)
|
||||
|
||||
ccxt_object.watch_ohlcv = AsyncMock(side_effect=sleeper)
|
||||
ccxt_object.close = AsyncMock()
|
||||
time_machine.move_to("2024-11-01 01:00:02 +00:00")
|
||||
@@ -101,11 +107,14 @@ async def test_exchangews_ohlcv(mocker, time_machine):
|
||||
time_machine.shift(timedelta(minutes=5))
|
||||
exchange_ws.schedule_ohlcv("ETH/BTC", "1m", CandleType.SPOT)
|
||||
await asyncio.sleep(1)
|
||||
assert log_has_re("un_watch_ohlcv_for_symbols not supported: ", caplog)
|
||||
# XRP/BTC should be cleaned up.
|
||||
assert exchange_ws._klines_watching == {
|
||||
("ETH/BTC", "1m", CandleType.SPOT),
|
||||
}
|
||||
|
||||
# Cleanup happened.
|
||||
ccxt_object.un_watch_ohlcv_for_symbols = AsyncMock(side_effect=ValueError)
|
||||
exchange_ws.schedule_ohlcv("ETH/BTC", "1m", CandleType.SPOT)
|
||||
assert exchange_ws._klines_watching == {
|
||||
("ETH/BTC", "1m", CandleType.SPOT),
|
||||
@@ -117,6 +126,7 @@ async def test_exchangews_ohlcv(mocker, time_machine):
|
||||
finally:
|
||||
# Cleanup
|
||||
exchange_ws.cleanup()
|
||||
assert log_has_re("Exception in _unwatch_ohlcv", caplog)
|
||||
|
||||
|
||||
async def test_exchangews_get_ohlcv(mocker, caplog):
|
||||
|
||||
@@ -21,6 +21,7 @@ EXCHANGES = {
|
||||
"use_ci_proxy": True,
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 1000,
|
||||
"futures": True,
|
||||
"futures_pair": "BTC/USDT:USDT",
|
||||
"hasQuoteVolumeFutures": True,
|
||||
@@ -96,6 +97,7 @@ EXCHANGES = {
|
||||
"stake_currency": "USDT",
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 1000,
|
||||
"futures": False,
|
||||
"skip_ws_tests": True,
|
||||
"sample_order": [
|
||||
@@ -136,6 +138,7 @@ EXCHANGES = {
|
||||
"stake_currency": "USD",
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 720,
|
||||
"leverage_tiers_public": False,
|
||||
"leverage_in_spot_market": True,
|
||||
"trades_lookback_hours": 12,
|
||||
@@ -162,6 +165,7 @@ EXCHANGES = {
|
||||
"stake_currency": "USDT",
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 1500,
|
||||
"leverage_tiers_public": False,
|
||||
"leverage_in_spot_market": True,
|
||||
"sample_order": [
|
||||
@@ -229,6 +233,7 @@ EXCHANGES = {
|
||||
"stake_currency": "USDT",
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 1000,
|
||||
"futures": True,
|
||||
"futures_pair": "BTC/USDT:USDT",
|
||||
"hasQuoteVolumeFutures": True,
|
||||
@@ -345,6 +350,7 @@ EXCHANGES = {
|
||||
"stake_currency": "USDT",
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 300,
|
||||
"futures": True,
|
||||
"futures_pair": "BTC/USDT:USDT",
|
||||
"hasQuoteVolumeFutures": False,
|
||||
@@ -358,6 +364,7 @@ EXCHANGES = {
|
||||
"hasQuoteVolume": True,
|
||||
"use_ci_proxy": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 1000,
|
||||
"futures_pair": "BTC/USDT:USDT",
|
||||
"futures": True,
|
||||
"orderbook_max_entries": 50,
|
||||
@@ -398,6 +405,7 @@ EXCHANGES = {
|
||||
"stake_currency": "USDT",
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 200,
|
||||
"orderbook_max_entries": 50,
|
||||
},
|
||||
"htx": {
|
||||
@@ -405,13 +413,14 @@ EXCHANGES = {
|
||||
"stake_currency": "BTC",
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"futures": False,
|
||||
"candle_count": 1000,
|
||||
},
|
||||
"bitvavo": {
|
||||
"pair": "BTC/EUR",
|
||||
"stake_currency": "EUR",
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 1440,
|
||||
"leverage_tiers_public": False,
|
||||
"leverage_in_spot_market": False,
|
||||
},
|
||||
@@ -420,6 +429,7 @@ EXCHANGES = {
|
||||
"stake_currency": "USDT",
|
||||
"hasQuoteVolume": True,
|
||||
"timeframe": "1h",
|
||||
"candle_count": 1000,
|
||||
"futures": False,
|
||||
"sample_order": [
|
||||
{
|
||||
@@ -482,6 +492,7 @@ EXCHANGES = {
|
||||
"hasQuoteVolume": False,
|
||||
"timeframe": "1h",
|
||||
"futures": True,
|
||||
"candle_count": 5000,
|
||||
"orderbook_max_entries": 20,
|
||||
"futures_pair": "BTC/USDC:USDC",
|
||||
"hasQuoteVolumeFutures": True,
|
||||
|
||||
@@ -48,6 +48,22 @@ class TestCCXTExchange:
|
||||
}
|
||||
)
|
||||
|
||||
def test_ohlcv_limit(self, exchange: EXCHANGE_FIXTURE_TYPE):
|
||||
exch, exchangename = exchange
|
||||
expected_count = EXCHANGES[exchangename].get("candle_count")
|
||||
if not expected_count:
|
||||
pytest.skip("No expected candle count for exchange")
|
||||
|
||||
assert exch.ohlcv_candle_limit("1m", CandleType.SPOT) == expected_count
|
||||
|
||||
def test_ohlcv_limit_futures(self, exchange_futures: EXCHANGE_FIXTURE_TYPE):
|
||||
exch, exchangename = exchange_futures
|
||||
expected_count = EXCHANGES[exchangename].get("candle_count")
|
||||
if not expected_count:
|
||||
pytest.skip("No expected candle count for exchange")
|
||||
|
||||
assert exch.ohlcv_candle_limit("1m", CandleType.SPOT) == expected_count
|
||||
|
||||
def test_load_markets_futures(self, exchange_futures: EXCHANGE_FIXTURE_TYPE):
|
||||
exchange, exchangename = exchange_futures
|
||||
pair = EXCHANGES[exchangename]["pair"]
|
||||
|
||||
@@ -107,7 +107,7 @@ def test_volume_change_pair_list_init_wrong_lookback_period(mocker, rpl_config):
|
||||
with pytest.raises(
|
||||
OperationalException,
|
||||
match=r"ChangeFilter requires lookback_period to not exceed"
|
||||
r" exchange max request size \(1000\)",
|
||||
r" exchange max request size \(\d+\)",
|
||||
):
|
||||
get_patched_freqtradebot(mocker, rpl_config)
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ from tests.conftest import (
|
||||
EXMS,
|
||||
create_mock_trades,
|
||||
create_mock_trades_usdt,
|
||||
generate_test_data,
|
||||
get_mock_coro,
|
||||
get_patched_freqtradebot,
|
||||
log_has,
|
||||
@@ -220,16 +221,16 @@ def test_api_ws_auth(botclient):
|
||||
|
||||
bad_token = "bad-ws_token"
|
||||
with pytest.raises(WebSocketDisconnect):
|
||||
with client.websocket_connect(url(bad_token)) as websocket:
|
||||
websocket.receive()
|
||||
with client.websocket_connect(url(bad_token)):
|
||||
pass
|
||||
|
||||
good_token = _TEST_WS_TOKEN
|
||||
with client.websocket_connect(url(good_token)) as websocket:
|
||||
with client.websocket_connect(url(good_token)):
|
||||
pass
|
||||
|
||||
jwt_secret = ftbot.config["api_server"].get("jwt_secret_key", "super-secret")
|
||||
jwt_token = create_token({"identity": {"u": "Freqtrade"}}, jwt_secret)
|
||||
with client.websocket_connect(url(jwt_token)) as websocket:
|
||||
with client.websocket_connect(url(jwt_token)):
|
||||
pass
|
||||
|
||||
|
||||
@@ -1914,6 +1915,15 @@ def test_api_pair_history(botclient, tmp_path, mocker):
|
||||
|
||||
timeframe = "5m"
|
||||
lfm = mocker.patch("freqtrade.strategy.interface.IStrategy.load_freqAI_model")
|
||||
# Wrong mode
|
||||
rc = client_get(
|
||||
client,
|
||||
f"{BASE_URI}/pair_history?timeframe={timeframe}"
|
||||
f"&timerange=20180111-20180112&strategy={CURRENT_TEST_STRATEGY}",
|
||||
)
|
||||
assert_response(rc, 503)
|
||||
_ftbot.config["runmode"] = RunMode.WEBSERVER
|
||||
|
||||
# No pair
|
||||
rc = client_get(
|
||||
client,
|
||||
@@ -2025,6 +2035,87 @@ def test_api_pair_history(botclient, tmp_path, mocker):
|
||||
assert_response(rc, 502)
|
||||
assert rc.json()["detail"] == ("No data for UNITTEST/BTC, 5m in 20200111-20200112 found.")
|
||||
|
||||
# No strategy
|
||||
rc = client_post(
|
||||
client,
|
||||
f"{BASE_URI}/pair_history",
|
||||
data={
|
||||
"pair": "UNITTEST/BTC",
|
||||
"timeframe": timeframe,
|
||||
"timerange": "20180111-20180112",
|
||||
# "strategy": CURRENT_TEST_STRATEGY,
|
||||
"columns": ["rsi", "fastd", "fastk"],
|
||||
},
|
||||
)
|
||||
assert_response(rc, 200)
|
||||
result = rc.json()
|
||||
assert result["length"] == 289
|
||||
assert len(result["data"]) == result["length"]
|
||||
assert "columns" in result
|
||||
assert "data" in result
|
||||
# Result without strategy won't have enter_long assigned.
|
||||
assert "enter_long" not in result["columns"]
|
||||
assert result["columns"] == ["date", "open", "high", "low", "close", "volume", "__date_ts"]
|
||||
|
||||
|
||||
def test_api_pair_history_live_mode(botclient, tmp_path, mocker):
|
||||
_ftbot, client = botclient
|
||||
_ftbot.config["user_data_dir"] = tmp_path
|
||||
_ftbot.config["runmode"] = RunMode.WEBSERVER
|
||||
|
||||
mocker.patch("freqtrade.strategy.interface.IStrategy.load_freqAI_model")
|
||||
# no strategy, live data
|
||||
gho = mocker.patch(
|
||||
"freqtrade.exchange.binance.Binance.get_historic_ohlcv",
|
||||
return_value=generate_test_data("1h", 100),
|
||||
)
|
||||
rc = client_post(
|
||||
client,
|
||||
f"{BASE_URI}/pair_history",
|
||||
data={
|
||||
"pair": "UNITTEST/BTC",
|
||||
"timeframe": "1h",
|
||||
"timerange": "20240101-",
|
||||
# "strategy": CURRENT_TEST_STRATEGY,
|
||||
"columns": ["rsi", "fastd", "fastk"],
|
||||
"live_mode": True,
|
||||
},
|
||||
)
|
||||
|
||||
assert_response(rc, 200)
|
||||
result = rc.json()
|
||||
# 100 candles - as in the generate_test_data call above
|
||||
assert result["length"] == 100
|
||||
assert len(result["data"]) == result["length"]
|
||||
assert result["columns"] == ["date", "open", "high", "low", "close", "volume", "__date_ts"]
|
||||
assert gho.call_count == 1
|
||||
|
||||
gho.reset_mock()
|
||||
rc = client_post(
|
||||
client,
|
||||
f"{BASE_URI}/pair_history",
|
||||
data={
|
||||
"pair": "UNITTEST/BTC",
|
||||
"timeframe": "1h",
|
||||
"timerange": "20240101-",
|
||||
"strategy": CURRENT_TEST_STRATEGY,
|
||||
"columns": ["rsi", "fastd", "fastk"],
|
||||
"live_mode": True,
|
||||
},
|
||||
)
|
||||
|
||||
assert_response(rc, 200)
|
||||
result = rc.json()
|
||||
# 80 candles - as in the generate_test_data call above - 20 startup candles
|
||||
assert result["length"] == 100 - 20
|
||||
assert len(result["data"]) == result["length"]
|
||||
|
||||
assert "rsi" in result["columns"]
|
||||
assert "enter_long" in result["columns"]
|
||||
assert "fastd" in result["columns"]
|
||||
assert "date" in result["columns"]
|
||||
assert gho.call_count == 1
|
||||
|
||||
|
||||
def test_api_plot_config(botclient, mocker, tmp_path):
|
||||
ftbot, client = botclient
|
||||
@@ -2849,7 +2940,7 @@ def test_api_ws_send_msg(default_conf, mocker, caplog):
|
||||
ApiServer.shutdown()
|
||||
|
||||
|
||||
def test_api_download_data(botclient, mocker, tmp_path, caplog):
|
||||
def test_api_download_data(botclient, mocker, tmp_path):
|
||||
ftbot, client = botclient
|
||||
|
||||
rc = client_post(client, f"{BASE_URI}/download_data", data={})
|
||||
@@ -2918,3 +3009,55 @@ def test_api_download_data(botclient, mocker, tmp_path, caplog):
|
||||
assert response["job_category"] == "download_data"
|
||||
assert response["status"] == "failed"
|
||||
assert response["error"] == "Download error"
|
||||
|
||||
|
||||
def test_api_markets_live(botclient):
|
||||
ftbot, client = botclient
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/markets")
|
||||
assert_response(rc, 200)
|
||||
response = rc.json()
|
||||
assert "markets" in response
|
||||
assert len(response["markets"]) >= 0
|
||||
assert response["markets"]["XRP/USDT"] == {
|
||||
"base": "XRP",
|
||||
"quote": "USDT",
|
||||
"symbol": "XRP/USDT",
|
||||
"spot": True,
|
||||
"swap": False,
|
||||
}
|
||||
|
||||
assert "BTC/USDT" in response["markets"]
|
||||
assert "XRP/BTC" in response["markets"]
|
||||
|
||||
rc = client_get(
|
||||
client,
|
||||
f"{BASE_URI}/markets?base=XRP",
|
||||
)
|
||||
assert_response(rc, 200)
|
||||
response = rc.json()
|
||||
assert "XRP/USDT" in response["markets"]
|
||||
assert "XRP/BTC" in response["markets"]
|
||||
|
||||
assert "BTC/USDT" not in response["markets"]
|
||||
|
||||
|
||||
def test_api_markets_webserver(botclient):
|
||||
# Ensure webserver exchanges are reset
|
||||
ApiBG.exchanges = {}
|
||||
ftbot, client = botclient
|
||||
# Test in webserver mode
|
||||
ftbot.config["runmode"] = RunMode.WEBSERVER
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/markets?exchange=binance")
|
||||
assert_response(rc, 200)
|
||||
response = rc.json()
|
||||
assert "markets" in response
|
||||
assert len(response["markets"]) >= 0
|
||||
assert response["exchange_id"] == "binance"
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/markets?exchange=hyperliquid")
|
||||
assert_response(rc, 200)
|
||||
|
||||
assert "hyperliquid_spot" in ApiBG.exchanges
|
||||
assert "binance_spot" in ApiBG.exchanges
|
||||
|
||||
Reference in New Issue
Block a user