diff --git a/docs/data-download.md b/docs/data-download.md index dbd7998c3..9bfc1e685 100644 --- a/docs/data-download.md +++ b/docs/data-download.md @@ -29,6 +29,7 @@ usage: freqtrade download-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [--erase] [--data-format-ohlcv {json,jsongz,hdf5}] [--data-format-trades {json,jsongz,hdf5}] + [--trading-mode {spot,margin,futures}] optional arguments: -h, --help show this help message and exit @@ -59,6 +60,8 @@ optional arguments: --data-format-trades {json,jsongz,hdf5} Storage format for downloaded trades data. (default: `jsongz`). + --trading-mode {spot,margin,futures} + Select Trading mode Common arguments: -v, --verbose Verbose mode (-vv for more, -vvv to get all messages). @@ -193,11 +196,14 @@ usage: freqtrade convert-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] {json,jsongz,hdf5} --format-to {json,jsongz,hdf5} [--erase] [-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...]] + [--exchange EXCHANGE] + [--trading-mode {spot,margin,futures}] + [--candle-types {spot,,futures,mark,index,premiumIndex,funding_rate} [{spot,,futures,mark,index,premiumIndex,funding_rate} ...]] optional arguments: -h, --help show this help message and exit -p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...] - Show profits for only these pairs. Pairs are space- + Limit command to these pairs. Pairs are space- separated. --format-from {json,jsongz,hdf5} Source format for data conversion. @@ -208,6 +214,12 @@ optional arguments: -t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...], --timeframes {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...] Specify which tickers to download. Space-separated list. Default: `1m 5m`. + --exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no + config is provided. + --trading-mode {spot,margin,futures} + Select Trading mode + --candle-types {spot,,futures,mark,index,premiumIndex,funding_rate} [{spot,,futures,mark,index,premiumIndex,funding_rate} ...] + Select candle type to use Common arguments: -v, --verbose Verbose mode (-vv for more, -vvv to get all messages). @@ -224,6 +236,7 @@ Common arguments: Path to directory with historical backtesting data. --userdir PATH, --user-data-dir PATH Path to userdata directory. + ``` ##### Example converting data @@ -347,6 +360,7 @@ usage: freqtrade list-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH] [--userdir PATH] [--exchange EXCHANGE] [--data-format-ohlcv {json,jsongz,hdf5}] [-p PAIRS [PAIRS ...]] + [--trading-mode {spot,margin,futures}] optional arguments: -h, --help show this help message and exit @@ -356,8 +370,10 @@ optional arguments: Storage format for downloaded candle (OHLCV) data. (default: `json`). -p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...] - Show profits for only these pairs. Pairs are space- + Limit command to these pairs. Pairs are space- separated. + --trading-mode {spot,margin,futures} + Select Trading mode Common arguments: -v, --verbose Verbose mode (-vv for more, -vvv to get all messages). diff --git a/freqtrade/commands/arguments.py b/freqtrade/commands/arguments.py index 025fee66c..4ddd16410 100644 --- a/freqtrade/commands/arguments.py +++ b/freqtrade/commands/arguments.py @@ -61,15 +61,17 @@ ARGS_BUILD_CONFIG = ["config"] ARGS_BUILD_STRATEGY = ["user_data_dir", "strategy", "template"] ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase"] -ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes"] + +ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes", "exchange", "trading_mode", + "candle_types"] ARGS_CONVERT_TRADES = ["pairs", "timeframes", "exchange", "dataformat_ohlcv", "dataformat_trades"] -ARGS_LIST_DATA = ["exchange", "dataformat_ohlcv", "pairs"] +ARGS_LIST_DATA = ["exchange", "dataformat_ohlcv", "pairs", "trading_mode"] ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "new_pairs_days", "include_inactive", "timerange", "download_trades", "exchange", "timeframes", - "erase", "dataformat_ohlcv", "dataformat_trades"] + "erase", "dataformat_ohlcv", "dataformat_trades", "trading_mode"] ARGS_PLOT_DATAFRAME = ["pairs", "indicators1", "indicators2", "plot_limit", "db_url", "trade_source", "export", "exportfilename", diff --git a/freqtrade/commands/cli_options.py b/freqtrade/commands/cli_options.py index 7d1d2edd1..33d751f54 100644 --- a/freqtrade/commands/cli_options.py +++ b/freqtrade/commands/cli_options.py @@ -5,6 +5,7 @@ from argparse import SUPPRESS, ArgumentTypeError from freqtrade import __version__, constants from freqtrade.constants import HYPEROPT_LOSS_BUILTIN +from freqtrade.enums import CandleType def check_int_positive(value: str) -> int: @@ -353,6 +354,12 @@ AVAILABLE_CLI_OPTIONS = { help='Select Trading mode', choices=constants.TRADING_MODES, ), + "candle_types": Arg( + '--candle-types', + help='Select candle type to use', + choices=[c.value for c in CandleType], + nargs='+', + ), # Script options "pairs": Arg( '-p', '--pairs', diff --git a/freqtrade/commands/data_commands.py b/freqtrade/commands/data_commands.py index 5dc5fe7ea..0c6f48088 100644 --- a/freqtrade/commands/data_commands.py +++ b/freqtrade/commands/data_commands.py @@ -8,7 +8,7 @@ from freqtrade.configuration import TimeRange, setup_utils_configuration from freqtrade.data.converter import convert_ohlcv_format, convert_trades_format from freqtrade.data.history import (convert_trades_to_ohlcv, refresh_backtest_ohlcv_data, refresh_backtest_trades_data) -from freqtrade.enums import RunMode +from freqtrade.enums import CandleType, RunMode from freqtrade.exceptions import OperationalException from freqtrade.exchange import timeframe_to_minutes from freqtrade.exchange.exchange import market_is_active @@ -64,6 +64,8 @@ def start_download_data(args: Dict[str, Any]) -> None: try: if config.get('download_trades'): + if config.get('trading_mode') == 'futures': + raise OperationalException("Trade download not supported for futures.") pairs_not_available = refresh_backtest_trades_data( exchange, pairs=expanded_pairs, datadir=config['datadir'], timerange=timerange, new_pairs_days=config['new_pairs_days'], @@ -81,7 +83,9 @@ def start_download_data(args: Dict[str, Any]) -> None: exchange, pairs=expanded_pairs, timeframes=config['timeframes'], datadir=config['datadir'], timerange=timerange, new_pairs_days=config['new_pairs_days'], - erase=bool(config.get('erase')), data_format=config['dataformat_ohlcv']) + erase=bool(config.get('erase')), data_format=config['dataformat_ohlcv'], + trading_mode=config.get('trading_mode', 'spot'), + ) except KeyboardInterrupt: sys.exit("SIGINT received, aborting ...") @@ -133,9 +137,11 @@ def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None: """ config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE) if ohlcv: - convert_ohlcv_format(config, - convert_from=args['format_from'], convert_to=args['format_to'], - erase=args['erase']) + candle_types = [CandleType.from_string(ct) for ct in config.get('candle_types', ['spot'])] + for candle_type in candle_types: + convert_ohlcv_format(config, + convert_from=args['format_from'], convert_to=args['format_to'], + erase=args['erase'], candle_type=candle_type) else: convert_trades_format(config, convert_from=args['format_from'], convert_to=args['format_to'], @@ -154,17 +160,24 @@ def start_list_data(args: Dict[str, Any]) -> None: from freqtrade.data.history.idatahandler import get_datahandler dhc = get_datahandler(config['datadir'], config['dataformat_ohlcv']) - paircombs = dhc.ohlcv_get_available_data(config['datadir']) + # TODO-lev: trading-mode should be parsed at config level, and available as Enum in the config. + paircombs = dhc.ohlcv_get_available_data(config['datadir'], config.get('trading_mode', 'spot')) if args['pairs']: paircombs = [comb for comb in paircombs if comb[0] in args['pairs']] print(f"Found {len(paircombs)} pair / timeframe combinations.") groupedpair = defaultdict(list) - for pair, timeframe in sorted(paircombs, key=lambda x: (x[0], timeframe_to_minutes(x[1]))): - groupedpair[pair].append(timeframe) + for pair, timeframe, candle_type in sorted( + paircombs, + key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2]) + ): + groupedpair[(pair, candle_type)].append(timeframe) if groupedpair: - print(tabulate([(pair, ', '.join(timeframes)) for pair, timeframes in groupedpair.items()], - headers=("Pair", "Timeframe"), - tablefmt='psql', stralign='right')) + print(tabulate([ + (pair, ', '.join(timeframes), candle_type) + for (pair, candle_type), timeframes in groupedpair.items() + ], + headers=("Pair", "Timeframe", "Type"), + tablefmt='psql', stralign='right')) diff --git a/freqtrade/configuration/configuration.py b/freqtrade/configuration/configuration.py index 67617d84f..48bd7bdb3 100644 --- a/freqtrade/configuration/configuration.py +++ b/freqtrade/configuration/configuration.py @@ -13,7 +13,7 @@ from freqtrade.configuration.deprecated_settings import process_temporary_deprec from freqtrade.configuration.directory_operations import create_datadir, create_userdata_dir from freqtrade.configuration.environment_vars import enironment_vars_to_dict from freqtrade.configuration.load_config import load_config_file, load_file -from freqtrade.enums import NON_UTIL_MODES, TRADING_MODES, RunMode +from freqtrade.enums import NON_UTIL_MODES, TRADING_MODES, CandleType, RunMode from freqtrade.exceptions import OperationalException from freqtrade.loggers import setup_logging from freqtrade.misc import deep_merge_dicts, parse_db_uri_for_logging @@ -433,6 +433,10 @@ class Configuration: logstring='Detected --new-pairs-days: {}') self._args_to_config(config, argname='trading_mode', logstring='Detected --trading-mode: {}') + config['candle_type_def'] = CandleType.get_default(config.get('trading_mode', 'spot')) + + self._args_to_config(config, argname='candle_types', + logstring='Detected --candle-types: {}') def _process_runmode(self, config: Dict[str, Any]) -> None: diff --git a/freqtrade/constants.py b/freqtrade/constants.py index 717178c77..53f2c0ddf 100644 --- a/freqtrade/constants.py +++ b/freqtrade/constants.py @@ -5,6 +5,8 @@ bot constants """ from typing import List, Tuple +from freqtrade.enums import CandleType + DEFAULT_CONFIG = 'config.json' DEFAULT_EXCHANGE = 'bittrex' @@ -475,7 +477,7 @@ CANCEL_REASON = { } # List of pairs with their timeframes -PairWithTimeframe = Tuple[str, str] +PairWithTimeframe = Tuple[str, str, CandleType] ListPairsWithTimeframes = List[PairWithTimeframe] # Type for trades list diff --git a/freqtrade/data/converter.py b/freqtrade/data/converter.py index d592b4990..84c57be41 100644 --- a/freqtrade/data/converter.py +++ b/freqtrade/data/converter.py @@ -11,6 +11,7 @@ import pandas as pd from pandas import DataFrame, to_datetime from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList +from freqtrade.enums import CandleType logger = logging.getLogger(__name__) @@ -261,13 +262,20 @@ def convert_trades_format(config: Dict[str, Any], convert_from: str, convert_to: src.trades_purge(pair=pair) -def convert_ohlcv_format(config: Dict[str, Any], convert_from: str, convert_to: str, erase: bool): +def convert_ohlcv_format( + config: Dict[str, Any], + convert_from: str, + convert_to: str, + erase: bool, + candle_type: CandleType +): """ Convert OHLCV from one format to another :param config: Config dictionary :param convert_from: Source format :param convert_to: Target format :param erase: Erase source data (does not apply if source and target format are identical) + :param candle_type: Any of the enum CandleType (must match trading mode!) """ from freqtrade.data.history.idatahandler import get_datahandler src = get_datahandler(config['datadir'], convert_from) @@ -279,8 +287,11 @@ def convert_ohlcv_format(config: Dict[str, Any], convert_from: str, convert_to: config['pairs'] = [] # Check timeframes or fall back to timeframe. for timeframe in timeframes: - config['pairs'].extend(src.ohlcv_get_pairs(config['datadir'], - timeframe)) + config['pairs'].extend(src.ohlcv_get_pairs( + config['datadir'], + timeframe, + candle_type=candle_type + )) logger.info(f"Converting candle (OHLCV) data for {config['pairs']}") for timeframe in timeframes: @@ -289,10 +300,16 @@ def convert_ohlcv_format(config: Dict[str, Any], convert_from: str, convert_to: timerange=None, fill_missing=False, drop_incomplete=False, - startup_candles=0) - logger.info(f"Converting {len(data)} candles for {pair}") + startup_candles=0, + candle_type=candle_type) + logger.info(f"Converting {len(data)} {candle_type} candles for {pair}") if len(data) > 0: - trg.ohlcv_store(pair=pair, timeframe=timeframe, data=data) + trg.ohlcv_store( + pair=pair, + timeframe=timeframe, + data=data, + candle_type=candle_type + ) if erase and convert_from != convert_to: logger.info(f"Deleting source data for {pair} / {timeframe}") - src.ohlcv_purge(pair=pair, timeframe=timeframe) + src.ohlcv_purge(pair=pair, timeframe=timeframe, candle_type=candle_type) diff --git a/freqtrade/data/dataprovider.py b/freqtrade/data/dataprovider.py index b197c159f..12b02f744 100644 --- a/freqtrade/data/dataprovider.py +++ b/freqtrade/data/dataprovider.py @@ -13,7 +13,7 @@ from pandas import DataFrame from freqtrade.configuration import TimeRange from freqtrade.constants import ListPairsWithTimeframes, PairWithTimeframe from freqtrade.data.history import load_pair_history -from freqtrade.enums import RunMode +from freqtrade.enums import CandleType, RunMode from freqtrade.exceptions import ExchangeError, OperationalException from freqtrade.exchange import Exchange, timeframe_to_seconds @@ -41,7 +41,13 @@ class DataProvider: """ self.__slice_index = limit_index - def _set_cached_df(self, pair: str, timeframe: str, dataframe: DataFrame) -> None: + def _set_cached_df( + self, + pair: str, + timeframe: str, + dataframe: DataFrame, + candle_type: CandleType + ) -> None: """ Store cached Dataframe. Using private method as this should never be used by a user @@ -49,8 +55,10 @@ class DataProvider: :param pair: pair to get the data for :param timeframe: Timeframe to get data for :param dataframe: analyzed dataframe + :param candle_type: Any of the enum CandleType (must match trading mode!) """ - self.__cached_pairs[(pair, timeframe)] = (dataframe, datetime.now(timezone.utc)) + self.__cached_pairs[(pair, timeframe, candle_type)] = ( + dataframe, datetime.now(timezone.utc)) def add_pairlisthandler(self, pairlists) -> None: """ @@ -58,13 +66,20 @@ class DataProvider: """ self._pairlists = pairlists - def historic_ohlcv(self, pair: str, timeframe: str = None) -> DataFrame: + def historic_ohlcv( + self, + pair: str, + timeframe: str = None, + candle_type: str = '' + ) -> DataFrame: """ Get stored historical candle (OHLCV) data :param pair: pair to get the data for :param timeframe: timeframe to get data for + :param candle_type: '', mark, index, premiumIndex, or funding_rate """ - saved_pair = (pair, str(timeframe)) + candleType = CandleType.from_string(candle_type) + saved_pair = (pair, str(timeframe), candleType) if saved_pair not in self.__cached_pairs_backtesting: timerange = TimeRange.parse_timerange(None if self._config.get( 'timerange') is None else str(self._config.get('timerange'))) @@ -77,26 +92,36 @@ class DataProvider: timeframe=timeframe or self._config['timeframe'], datadir=self._config['datadir'], timerange=timerange, - data_format=self._config.get('dataformat_ohlcv', 'json') + data_format=self._config.get('dataformat_ohlcv', 'json'), + candle_type=candleType, + ) return self.__cached_pairs_backtesting[saved_pair].copy() - def get_pair_dataframe(self, pair: str, timeframe: str = None) -> DataFrame: + def get_pair_dataframe( + self, + pair: str, + timeframe: str = None, + candle_type: str = '' + ) -> DataFrame: """ Return pair candle (OHLCV) data, either live or cached historical -- depending on the runmode. + Only combinations in the pairlist or which have been specified as informative pairs + will be available. :param pair: pair to get the data for :param timeframe: timeframe to get data for :return: Dataframe for this pair + :param candle_type: '', mark, index, premiumIndex, or funding_rate """ if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): # Get live OHLCV data. - data = self.ohlcv(pair=pair, timeframe=timeframe) + data = self.ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type) else: # Get historical OHLCV data (cached on disk). - data = self.historic_ohlcv(pair=pair, timeframe=timeframe) + data = self.historic_ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type) if len(data) == 0: - logger.warning(f"No data found for ({pair}, {timeframe}).") + logger.warning(f"No data found for ({pair}, {timeframe}, {candle_type}).") return data def get_analyzed_dataframe(self, pair: str, timeframe: str) -> Tuple[DataFrame, datetime]: @@ -109,7 +134,7 @@ class DataProvider: combination. Returns empty dataframe and Epoch 0 (1970-01-01) if no dataframe was cached. """ - pair_key = (pair, timeframe) + pair_key = (pair, timeframe, CandleType.SPOT) if pair_key in self.__cached_pairs: if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): df, date = self.__cached_pairs[pair_key] @@ -177,20 +202,29 @@ class DataProvider: raise OperationalException(NO_EXCHANGE_EXCEPTION) return list(self._exchange._klines.keys()) - def ohlcv(self, pair: str, timeframe: str = None, copy: bool = True) -> DataFrame: + def ohlcv( + self, + pair: str, + timeframe: str = None, + copy: bool = True, + candle_type: str = '' + ) -> DataFrame: """ Get candle (OHLCV) data for the given pair as DataFrame Please use the `available_pairs` method to verify which pairs are currently cached. :param pair: pair to get the data for :param timeframe: Timeframe to get data for + :param candle_type: '', mark, index, premiumIndex, or funding_rate :param copy: copy dataframe before returning if True. Use False only for read-only operations (where the dataframe is not modified) """ if self._exchange is None: raise OperationalException(NO_EXCHANGE_EXCEPTION) if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): - return self._exchange.klines((pair, timeframe or self._config['timeframe']), - copy=copy) + return self._exchange.klines( + (pair, timeframe or self._config['timeframe'], CandleType.from_string(candle_type)), + copy=copy + ) else: return DataFrame() diff --git a/freqtrade/data/history/hdf5datahandler.py b/freqtrade/data/history/hdf5datahandler.py index 49fac99ea..6483cfb21 100644 --- a/freqtrade/data/history/hdf5datahandler.py +++ b/freqtrade/data/history/hdf5datahandler.py @@ -9,6 +9,7 @@ import pandas as pd from freqtrade.configuration import TimeRange from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, ListPairsWithTimeframes, TradeList) +from freqtrade.enums import CandleType from .idatahandler import IDataHandler @@ -21,44 +22,61 @@ class HDF5DataHandler(IDataHandler): _columns = DEFAULT_DATAFRAME_COLUMNS @classmethod - def ohlcv_get_available_data(cls, datadir: Path) -> ListPairsWithTimeframes: + def ohlcv_get_available_data(cls, datadir: Path, trading_mode: str) -> ListPairsWithTimeframes: """ Returns a list of all pairs with ohlcv data available in this datadir :param datadir: Directory to search for ohlcv files + :param trading_mode: trading-mode to be used :return: List of Tuples of (pair, timeframe) """ - _tmp = [re.search(r'^([a-zA-Z_]+)\-(\d+\S+)(?=.h5)', p.name) - for p in datadir.glob("*.h5")] - return [(match[1].replace('_', '/'), match[2]) for match in _tmp - if match and len(match.groups()) > 1] + if trading_mode == 'futures': + datadir = datadir.joinpath('futures') + _tmp = [ + re.search( + cls._OHLCV_REGEX, p.name + ) for p in datadir.glob("*.h5") + ] + return [ + ( + cls.rebuild_pair_from_filename(match[1]), + match[2], + CandleType.from_string(match[3]) + ) for match in _tmp if match and len(match.groups()) > 1] @classmethod - def ohlcv_get_pairs(cls, datadir: Path, timeframe: str) -> List[str]: + def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]: """ Returns a list of all pairs with ohlcv data available in this datadir for the specified timeframe :param datadir: Directory to search for ohlcv files :param timeframe: Timeframe to search pairs for + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: List of Pairs """ + candle = "" + if candle_type != CandleType.SPOT: + datadir = datadir.joinpath('futures') + candle = f"-{candle_type}" - _tmp = [re.search(r'^(\S+)(?=\-' + timeframe + '.h5)', p.name) - for p in datadir.glob(f"*{timeframe}.h5")] + _tmp = [re.search(r'^(\S+)(?=\-' + timeframe + candle + '.h5)', p.name) + for p in datadir.glob(f"*{timeframe}{candle}.h5")] # Check if regex found something and only return these results - return [match[0].replace('_', '/') for match in _tmp if match] + return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match] - def ohlcv_store(self, pair: str, timeframe: str, data: pd.DataFrame) -> None: + def ohlcv_store( + self, pair: str, timeframe: str, data: pd.DataFrame, candle_type: CandleType) -> None: """ Store data in hdf5 file. :param pair: Pair - used to generate filename :param timeframe: Timeframe - used to generate filename :param data: Dataframe containing OHLCV data + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: None """ key = self._pair_ohlcv_key(pair, timeframe) _data = data.copy() - filename = self._pair_data_filename(self._datadir, pair, timeframe) + filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type) _data.loc[:, self._columns].to_hdf( filename, key, mode='a', complevel=9, complib='blosc', @@ -66,7 +84,8 @@ class HDF5DataHandler(IDataHandler): ) def _ohlcv_load(self, pair: str, timeframe: str, - timerange: Optional[TimeRange] = None) -> pd.DataFrame: + timerange: Optional[TimeRange], candle_type: CandleType + ) -> pd.DataFrame: """ Internal method used to load data for one pair from disk. Implements the loading and conversion to a Pandas dataframe. @@ -76,10 +95,16 @@ class HDF5DataHandler(IDataHandler): :param timerange: Limit data to be loaded to this timerange. Optionally implemented by subclasses to avoid loading all data where possible. + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: DataFrame with ohlcv data, or empty DataFrame """ key = self._pair_ohlcv_key(pair, timeframe) - filename = self._pair_data_filename(self._datadir, pair, timeframe) + filename = self._pair_data_filename( + self._datadir, + pair, + timeframe, + candle_type=candle_type + ) if not filename.exists(): return pd.DataFrame(columns=self._columns) @@ -98,12 +123,19 @@ class HDF5DataHandler(IDataHandler): 'low': 'float', 'close': 'float', 'volume': 'float'}) return pairdata - def ohlcv_append(self, pair: str, timeframe: str, data: pd.DataFrame) -> None: + def ohlcv_append( + self, + pair: str, + timeframe: str, + data: pd.DataFrame, + candle_type: CandleType + ) -> None: """ Append data to existing data structures :param pair: Pair :param timeframe: Timeframe this ohlcv data is for :param data: Data to append. + :param candle_type: Any of the enum CandleType (must match trading mode!) """ raise NotImplementedError() @@ -117,7 +149,7 @@ class HDF5DataHandler(IDataHandler): _tmp = [re.search(r'^(\S+)(?=\-trades.h5)', p.name) for p in datadir.glob("*trades.h5")] # Check if regex found something and only return these results to avoid exceptions. - return [match[0].replace('_', '/') for match in _tmp if match] + return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match] def trades_store(self, pair: str, data: TradeList) -> None: """ diff --git a/freqtrade/data/history/history_utils.py b/freqtrade/data/history/history_utils.py index e6b8db322..64297c7e5 100644 --- a/freqtrade/data/history/history_utils.py +++ b/freqtrade/data/history/history_utils.py @@ -12,6 +12,7 @@ from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS from freqtrade.data.converter import (clean_ohlcv_dataframe, ohlcv_to_dataframe, trades_remove_duplicates, trades_to_ohlcv) from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler +from freqtrade.enums import CandleType from freqtrade.exceptions import OperationalException from freqtrade.exchange import Exchange from freqtrade.misc import format_ms_time @@ -29,6 +30,7 @@ def load_pair_history(pair: str, startup_candles: int = 0, data_format: str = None, data_handler: IDataHandler = None, + candle_type: CandleType = CandleType.SPOT ) -> DataFrame: """ Load cached ohlcv history for the given pair. @@ -43,6 +45,7 @@ def load_pair_history(pair: str, :param startup_candles: Additional candles to load at the start of the period :param data_handler: Initialized data-handler to use. Will be initialized from data_format if not set + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: DataFrame with ohlcv data, or empty DataFrame """ data_handler = get_datahandler(datadir, data_format, data_handler) @@ -53,6 +56,7 @@ def load_pair_history(pair: str, fill_missing=fill_up_missing, drop_incomplete=drop_incomplete, startup_candles=startup_candles, + candle_type=candle_type ) @@ -64,6 +68,7 @@ def load_data(datadir: Path, startup_candles: int = 0, fail_without_data: bool = False, data_format: str = 'json', + candle_type: CandleType = CandleType.SPOT ) -> Dict[str, DataFrame]: """ Load ohlcv history data for a list of pairs. @@ -76,6 +81,7 @@ def load_data(datadir: Path, :param startup_candles: Additional candles to load at the start of the period :param fail_without_data: Raise OperationalException if no data is found. :param data_format: Data format which should be used. Defaults to json + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: dict(:) """ result: Dict[str, DataFrame] = {} @@ -89,7 +95,8 @@ def load_data(datadir: Path, datadir=datadir, timerange=timerange, fill_up_missing=fill_up_missing, startup_candles=startup_candles, - data_handler=data_handler + data_handler=data_handler, + candle_type=candle_type ) if not hist.empty: result[pair] = hist @@ -105,6 +112,7 @@ def refresh_data(datadir: Path, exchange: Exchange, data_format: str = None, timerange: Optional[TimeRange] = None, + candle_type: CandleType = CandleType.SPOT ) -> None: """ Refresh ohlcv history data for a list of pairs. @@ -115,17 +123,24 @@ def refresh_data(datadir: Path, :param exchange: Exchange object :param data_format: dataformat to use :param timerange: Limit data to be loaded to this timerange + :param candle_type: Any of the enum CandleType (must match trading mode!) """ data_handler = get_datahandler(datadir, data_format) for idx, pair in enumerate(pairs): process = f'{idx}/{len(pairs)}' _download_pair_history(pair=pair, process=process, timeframe=timeframe, datadir=datadir, - timerange=timerange, exchange=exchange, data_handler=data_handler) + timerange=timerange, exchange=exchange, data_handler=data_handler, + candle_type=candle_type) -def _load_cached_data_for_updating(pair: str, timeframe: str, timerange: Optional[TimeRange], - data_handler: IDataHandler) -> Tuple[DataFrame, Optional[int]]: +def _load_cached_data_for_updating( + pair: str, + timeframe: str, + timerange: Optional[TimeRange], + data_handler: IDataHandler, + candle_type: CandleType = CandleType.SPOT +) -> Tuple[DataFrame, Optional[int]]: """ Load cached data to download more data. If timerange is passed in, checks whether data from an before the stored data will be @@ -142,7 +157,8 @@ def _load_cached_data_for_updating(pair: str, timeframe: str, timerange: Optiona # Intentionally don't pass timerange in - since we need to load the full dataset. data = data_handler.ohlcv_load(pair, timeframe=timeframe, timerange=None, fill_missing=False, - drop_incomplete=True, warn_no_data=False) + drop_incomplete=True, warn_no_data=False, + candle_type=candle_type) if not data.empty: if start and start < data.iloc[0]['date']: # Earlier data than existing data requested, redownload all @@ -161,7 +177,9 @@ def _download_pair_history(pair: str, *, process: str = '', new_pairs_days: int = 30, data_handler: IDataHandler = None, - timerange: Optional[TimeRange] = None) -> bool: + timerange: Optional[TimeRange] = None, + candle_type: CandleType = CandleType.SPOT + ) -> bool: """ Download latest candles from the exchange for the pair and timeframe passed in parameters The data is downloaded starting from the last correct data that @@ -173,6 +191,7 @@ def _download_pair_history(pair: str, *, :param pair: pair to download :param timeframe: Timeframe (e.g "5m") :param timerange: range of time to download + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: bool with success state """ data_handler = get_datahandler(datadir, data_handler=data_handler) @@ -185,7 +204,8 @@ def _download_pair_history(pair: str, *, # data, since_ms = _load_cached_data_for_updating_old(datadir, pair, timeframe, timerange) data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange, - data_handler=data_handler) + data_handler=data_handler, + candle_type=candle_type) logger.debug("Current Start: %s", f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') @@ -198,7 +218,8 @@ def _download_pair_history(pair: str, *, since_ms=since_ms if since_ms else arrow.utcnow().shift( days=-new_pairs_days).int_timestamp * 1000, - is_new_pair=data.empty + is_new_pair=data.empty, + candle_type=candle_type, ) # TODO: Maybe move parsing to exchange class (?) new_dataframe = ohlcv_to_dataframe(new_data, timeframe, pair, @@ -216,7 +237,7 @@ def _download_pair_history(pair: str, *, logger.debug("New End: %s", f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') - data_handler.ohlcv_store(pair, timeframe, data=data) + data_handler.ohlcv_store(pair, timeframe, data=data, candle_type=candle_type) return True except Exception: @@ -227,9 +248,11 @@ def _download_pair_history(pair: str, *, def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes: List[str], - datadir: Path, timerange: Optional[TimeRange] = None, + datadir: Path, trading_mode: str, + timerange: Optional[TimeRange] = None, new_pairs_days: int = 30, erase: bool = False, - data_format: str = None) -> List[str]: + data_format: str = None, + ) -> List[str]: """ Refresh stored ohlcv data for backtesting and hyperopt operations. Used by freqtrade download-data subcommand. @@ -237,6 +260,7 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes """ pairs_not_available = [] data_handler = get_datahandler(datadir, data_format) + candle_type = CandleType.get_default(trading_mode) for idx, pair in enumerate(pairs, start=1): if pair not in exchange.markets: pairs_not_available.append(pair) @@ -245,16 +269,32 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes for timeframe in timeframes: if erase: - if data_handler.ohlcv_purge(pair, timeframe): - logger.info( - f'Deleting existing data for pair {pair}, interval {timeframe}.') + if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type): + logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.') logger.info(f'Downloading pair {pair}, interval {timeframe}.') process = f'{idx}/{len(pairs)}' _download_pair_history(pair=pair, process=process, datadir=datadir, exchange=exchange, timerange=timerange, data_handler=data_handler, - timeframe=str(timeframe), new_pairs_days=new_pairs_days) + timeframe=str(timeframe), new_pairs_days=new_pairs_days, + candle_type=candle_type) + if trading_mode == 'futures': + # Predefined candletype (and timeframe) depending on exchange + # Downloads what is necessary to backtest based on futures data. + timeframe = exchange._ft_has['mark_ohlcv_timeframe'] + candle_type = CandleType.from_string(exchange._ft_has['mark_ohlcv_price']) + + # TODO: this could be in most parts to the above. + if erase: + if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type): + logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.') + _download_pair_history(pair=pair, process=process, + datadir=datadir, exchange=exchange, + timerange=timerange, data_handler=data_handler, + timeframe=str(timeframe), new_pairs_days=new_pairs_days, + candle_type=candle_type) + return pairs_not_available @@ -353,10 +393,16 @@ def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: return pairs_not_available -def convert_trades_to_ohlcv(pairs: List[str], timeframes: List[str], - datadir: Path, timerange: TimeRange, erase: bool = False, - data_format_ohlcv: str = 'json', - data_format_trades: str = 'jsongz') -> None: +def convert_trades_to_ohlcv( + pairs: List[str], + timeframes: List[str], + datadir: Path, + timerange: TimeRange, + erase: bool = False, + data_format_ohlcv: str = 'json', + data_format_trades: str = 'jsongz', + candle_type: CandleType = CandleType.SPOT +) -> None: """ Convert stored trades data to ohlcv data """ @@ -367,12 +413,12 @@ def convert_trades_to_ohlcv(pairs: List[str], timeframes: List[str], trades = data_handler_trades.trades_load(pair) for timeframe in timeframes: if erase: - if data_handler_ohlcv.ohlcv_purge(pair, timeframe): + if data_handler_ohlcv.ohlcv_purge(pair, timeframe, candle_type=candle_type): logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.') try: ohlcv = trades_to_ohlcv(trades, timeframe) # Store ohlcv - data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv) + data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv, candle_type=candle_type) except ValueError: logger.exception(f'Could not convert {pair} to OHLCV.') diff --git a/freqtrade/data/history/idatahandler.py b/freqtrade/data/history/idatahandler.py index 578d0b5bf..2d0e187b8 100644 --- a/freqtrade/data/history/idatahandler.py +++ b/freqtrade/data/history/idatahandler.py @@ -4,6 +4,7 @@ It's subclasses handle and storing data from disk. """ import logging +import re from abc import ABC, abstractclassmethod, abstractmethod from copy import deepcopy from datetime import datetime, timezone @@ -16,6 +17,7 @@ from freqtrade import misc from freqtrade.configuration import TimeRange from freqtrade.constants import ListPairsWithTimeframes, TradeList from freqtrade.data.converter import clean_ohlcv_dataframe, trades_remove_duplicates, trim_dataframe +from freqtrade.enums import CandleType from freqtrade.exchange import timeframe_to_seconds @@ -24,6 +26,8 @@ logger = logging.getLogger(__name__) class IDataHandler(ABC): + _OHLCV_REGEX = r'^([a-zA-Z_-]+)\-(\d+\S)\-?([a-zA-Z_]*)?(?=\.)' + def __init__(self, datadir: Path) -> None: self._datadir = datadir @@ -35,36 +39,40 @@ class IDataHandler(ABC): raise NotImplementedError() @abstractclassmethod - def ohlcv_get_available_data(cls, datadir: Path) -> ListPairsWithTimeframes: + def ohlcv_get_available_data(cls, datadir: Path, trading_mode: str) -> ListPairsWithTimeframes: """ Returns a list of all pairs with ohlcv data available in this datadir :param datadir: Directory to search for ohlcv files + :param trading_mode: trading-mode to be used :return: List of Tuples of (pair, timeframe) """ @abstractclassmethod - def ohlcv_get_pairs(cls, datadir: Path, timeframe: str) -> List[str]: + def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]: """ Returns a list of all pairs with ohlcv data available in this datadir for the specified timeframe :param datadir: Directory to search for ohlcv files :param timeframe: Timeframe to search pairs for + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: List of Pairs """ @abstractmethod - def ohlcv_store(self, pair: str, timeframe: str, data: DataFrame) -> None: + def ohlcv_store( + self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None: """ Store ohlcv data. :param pair: Pair - used to generate filename :param timeframe: Timeframe - used to generate filename :param data: Dataframe containing OHLCV data + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: None """ @abstractmethod - def _ohlcv_load(self, pair: str, timeframe: str, - timerange: Optional[TimeRange] = None, + def _ohlcv_load(self, pair: str, timeframe: str, timerange: Optional[TimeRange], + candle_type: CandleType ) -> DataFrame: """ Internal method used to load data for one pair from disk. @@ -75,29 +83,38 @@ class IDataHandler(ABC): :param timerange: Limit data to be loaded to this timerange. Optionally implemented by subclasses to avoid loading all data where possible. + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: DataFrame with ohlcv data, or empty DataFrame """ - def ohlcv_purge(self, pair: str, timeframe: str) -> bool: + def ohlcv_purge(self, pair: str, timeframe: str, candle_type: CandleType) -> bool: """ Remove data for this pair :param pair: Delete data for this pair. :param timeframe: Timeframe (e.g. "5m") + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: True when deleted, false if file did not exist. """ - filename = self._pair_data_filename(self._datadir, pair, timeframe) + filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type) if filename.exists(): filename.unlink() return True return False @abstractmethod - def ohlcv_append(self, pair: str, timeframe: str, data: DataFrame) -> None: + def ohlcv_append( + self, + pair: str, + timeframe: str, + data: DataFrame, + candle_type: CandleType + ) -> None: """ Append data to existing data structures :param pair: Pair :param timeframe: Timeframe this ohlcv data is for :param data: Data to append. + :param candle_type: Any of the enum CandleType (must match trading mode!) """ @abstractclassmethod @@ -158,9 +175,29 @@ class IDataHandler(ABC): return trades_remove_duplicates(self._trades_load(pair, timerange=timerange)) @classmethod - def _pair_data_filename(cls, datadir: Path, pair: str, timeframe: str) -> Path: + def create_dir_if_needed(cls, datadir: Path): + """ + Creates datadir if necessary + should only create directories for "futures" mode at the moment. + """ + if not datadir.parent.is_dir(): + datadir.parent.mkdir() + + @classmethod + def _pair_data_filename( + cls, + datadir: Path, + pair: str, + timeframe: str, + candle_type: CandleType + ) -> Path: pair_s = misc.pair_to_filename(pair) - filename = datadir.joinpath(f'{pair_s}-{timeframe}.{cls._get_file_extension()}') + candle = "" + if candle_type != CandleType.SPOT: + datadir = datadir.joinpath('futures') + candle = f"-{candle_type}" + filename = datadir.joinpath( + f'{pair_s}-{timeframe}{candle}.{cls._get_file_extension()}') return filename @classmethod @@ -169,12 +206,23 @@ class IDataHandler(ABC): filename = datadir.joinpath(f'{pair_s}-trades.{cls._get_file_extension()}') return filename + @staticmethod + def rebuild_pair_from_filename(pair: str) -> str: + """ + Rebuild pair name from filename + Assumes a asset name of max. 7 length to also support BTC-PERP and BTC-PERP:USD names. + """ + res = re.sub(r'^(([A-Za-z]{1,10})|^([A-Za-z\-]{1,6}))(_)', r'\g<1>/', pair, 1) + res = re.sub('_', ':', res, 1) + return res + def ohlcv_load(self, pair, timeframe: str, + candle_type: CandleType, timerange: Optional[TimeRange] = None, fill_missing: bool = True, drop_incomplete: bool = True, startup_candles: int = 0, - warn_no_data: bool = True + warn_no_data: bool = True, ) -> DataFrame: """ Load cached candle (OHLCV) data for the given pair. @@ -186,6 +234,7 @@ class IDataHandler(ABC): :param drop_incomplete: Drop last candle assuming it may be incomplete. :param startup_candles: Additional candles to load at the start of the period :param warn_no_data: Log a warning message when no data is found + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: DataFrame with ohlcv data, or empty DataFrame """ # Fix startup period @@ -193,8 +242,12 @@ class IDataHandler(ABC): if startup_candles > 0 and timerange_startup: timerange_startup.subtract_start(timeframe_to_seconds(timeframe) * startup_candles) - pairdf = self._ohlcv_load(pair, timeframe, - timerange=timerange_startup) + pairdf = self._ohlcv_load( + pair, + timeframe, + timerange=timerange_startup, + candle_type=candle_type + ) if self._check_empty_df(pairdf, pair, timeframe, warn_no_data): return pairdf else: diff --git a/freqtrade/data/history/jsondatahandler.py b/freqtrade/data/history/jsondatahandler.py index ccefc8356..82ab1abbf 100644 --- a/freqtrade/data/history/jsondatahandler.py +++ b/freqtrade/data/history/jsondatahandler.py @@ -10,6 +10,7 @@ from freqtrade import misc from freqtrade.configuration import TimeRange from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, ListPairsWithTimeframes, TradeList from freqtrade.data.converter import trades_dict_to_list +from freqtrade.enums import CandleType from .idatahandler import IDataHandler @@ -23,33 +24,48 @@ class JsonDataHandler(IDataHandler): _columns = DEFAULT_DATAFRAME_COLUMNS @classmethod - def ohlcv_get_available_data(cls, datadir: Path) -> ListPairsWithTimeframes: + def ohlcv_get_available_data(cls, datadir: Path, trading_mode: str) -> ListPairsWithTimeframes: """ Returns a list of all pairs with ohlcv data available in this datadir :param datadir: Directory to search for ohlcv files + :param trading_mode: trading-mode to be used :return: List of Tuples of (pair, timeframe) """ - _tmp = [re.search(r'^([a-zA-Z_]+)\-(\d+\S+)(?=.json)', p.name) - for p in datadir.glob(f"*.{cls._get_file_extension()}")] - return [(match[1].replace('_', '/'), match[2]) for match in _tmp - if match and len(match.groups()) > 1] + if trading_mode == 'futures': + datadir = datadir.joinpath('futures') + _tmp = [ + re.search( + cls._OHLCV_REGEX, p.name + ) for p in datadir.glob(f"*.{cls._get_file_extension()}")] + return [ + ( + cls.rebuild_pair_from_filename(match[1]), + match[2], + CandleType.from_string(match[3]) + ) for match in _tmp if match and len(match.groups()) > 1] @classmethod - def ohlcv_get_pairs(cls, datadir: Path, timeframe: str) -> List[str]: + def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]: """ Returns a list of all pairs with ohlcv data available in this datadir for the specified timeframe :param datadir: Directory to search for ohlcv files :param timeframe: Timeframe to search pairs for + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: List of Pairs """ + candle = "" + if candle_type != CandleType.SPOT: + datadir = datadir.joinpath('futures') + candle = f"-{candle_type}" - _tmp = [re.search(r'^(\S+)(?=\-' + timeframe + '.json)', p.name) - for p in datadir.glob(f"*{timeframe}.{cls._get_file_extension()}")] + _tmp = [re.search(r'^(\S+)(?=\-' + timeframe + candle + '.json)', p.name) + for p in datadir.glob(f"*{timeframe}{candle}.{cls._get_file_extension()}")] # Check if regex found something and only return these results - return [match[0].replace('_', '/') for match in _tmp if match] + return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match] - def ohlcv_store(self, pair: str, timeframe: str, data: DataFrame) -> None: + def ohlcv_store( + self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None: """ Store data in json format "values". format looks as follows: @@ -57,9 +73,11 @@ class JsonDataHandler(IDataHandler): :param pair: Pair - used to generate filename :param timeframe: Timeframe - used to generate filename :param data: Dataframe containing OHLCV data + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: None """ - filename = self._pair_data_filename(self._datadir, pair, timeframe) + filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type) + self.create_dir_if_needed(filename) _data = data.copy() # Convert date to int _data['date'] = _data['date'].view(np.int64) // 1000 // 1000 @@ -70,7 +88,7 @@ class JsonDataHandler(IDataHandler): compression='gzip' if self._use_zip else None) def _ohlcv_load(self, pair: str, timeframe: str, - timerange: Optional[TimeRange] = None, + timerange: Optional[TimeRange], candle_type: CandleType ) -> DataFrame: """ Internal method used to load data for one pair from disk. @@ -81,9 +99,10 @@ class JsonDataHandler(IDataHandler): :param timerange: Limit data to be loaded to this timerange. Optionally implemented by subclasses to avoid loading all data where possible. + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: DataFrame with ohlcv data, or empty DataFrame """ - filename = self._pair_data_filename(self._datadir, pair, timeframe) + filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type) if not filename.exists(): return DataFrame(columns=self._columns) try: @@ -100,25 +119,19 @@ class JsonDataHandler(IDataHandler): infer_datetime_format=True) return pairdata - def ohlcv_purge(self, pair: str, timeframe: str) -> bool: - """ - Remove data for this pair - :param pair: Delete data for this pair. - :param timeframe: Timeframe (e.g. "5m") - :return: True when deleted, false if file did not exist. - """ - filename = self._pair_data_filename(self._datadir, pair, timeframe) - if filename.exists(): - filename.unlink() - return True - return False - - def ohlcv_append(self, pair: str, timeframe: str, data: DataFrame) -> None: + def ohlcv_append( + self, + pair: str, + timeframe: str, + data: DataFrame, + candle_type: CandleType + ) -> None: """ Append data to existing data structures :param pair: Pair :param timeframe: Timeframe this ohlcv data is for :param data: Data to append. + :param candle_type: Any of the enum CandleType (must match trading mode!) """ raise NotImplementedError() @@ -132,7 +145,7 @@ class JsonDataHandler(IDataHandler): _tmp = [re.search(r'^(\S+)(?=\-trades.json)', p.name) for p in datadir.glob(f"*trades.{cls._get_file_extension()}")] # Check if regex found something and only return these results to avoid exceptions. - return [match[0].replace('_', '/') for match in _tmp if match] + return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match] def trades_store(self, pair: str, data: TradeList) -> None: """ diff --git a/freqtrade/edge/edge_positioning.py b/freqtrade/edge/edge_positioning.py index e08b3df2f..b2f4534f1 100644 --- a/freqtrade/edge/edge_positioning.py +++ b/freqtrade/edge/edge_positioning.py @@ -119,8 +119,8 @@ class Edge: ) # Download informative pairs too res = defaultdict(list) - for p, t in self.strategy.gather_informative_pairs(): - res[t].append(p) + for pair, timeframe, _ in self.strategy.gather_informative_pairs(): + res[timeframe].append(pair) for timeframe, inf_pairs in res.items(): timerange_startup = deepcopy(self._timerange) timerange_startup.subtract_start(timeframe_to_seconds( diff --git a/freqtrade/enums/__init__.py b/freqtrade/enums/__init__.py index e9d166258..f2fee4792 100644 --- a/freqtrade/enums/__init__.py +++ b/freqtrade/enums/__init__.py @@ -1,5 +1,6 @@ # flake8: noqa: F401 from freqtrade.enums.backteststate import BacktestState +from freqtrade.enums.candletype import CandleType from freqtrade.enums.collateral import Collateral from freqtrade.enums.rpcmessagetype import RPCMessageType from freqtrade.enums.runmode import NON_UTIL_MODES, OPTIMIZE_MODES, TRADING_MODES, RunMode diff --git a/freqtrade/enums/candletype.py b/freqtrade/enums/candletype.py new file mode 100644 index 000000000..0188650f6 --- /dev/null +++ b/freqtrade/enums/candletype.py @@ -0,0 +1,25 @@ +from enum import Enum + + +class CandleType(str, Enum): + """Enum to distinguish candle types""" + SPOT = "spot" + FUTURES = "futures" + MARK = "mark" + INDEX = "index" + PREMIUMINDEX = "premiumIndex" + # TODO-lev: not sure this belongs here, as the datatype is really different + FUNDING_RATE = "funding_rate" + + @staticmethod + def from_string(value: str) -> 'CandleType': + if not value: + # Default to spot + return CandleType.SPOT + return CandleType(value) + + @staticmethod + def get_default(trading_mode: str) -> 'CandleType': + if trading_mode == 'futures': + return CandleType.FUTURES + return CandleType.SPOT diff --git a/freqtrade/exchange/binance.py b/freqtrade/exchange/binance.py index eb0ab5cc8..10fc7ab65 100644 --- a/freqtrade/exchange/binance.py +++ b/freqtrade/exchange/binance.py @@ -8,7 +8,7 @@ from typing import Dict, List, Optional, Tuple import arrow import ccxt -from freqtrade.enums import Collateral, TradingMode +from freqtrade.enums import CandleType, Collateral, TradingMode from freqtrade.exceptions import (DDosProtection, InsufficientFundsError, InvalidOrderException, OperationalException, TemporaryError) from freqtrade.exchange import Exchange @@ -197,23 +197,30 @@ class Binance(Exchange): raise OperationalException(e) from e async def _async_get_historic_ohlcv(self, pair: str, timeframe: str, - since_ms: int, is_new_pair: bool = False, - raise_: bool = False - ) -> Tuple[str, str, List]: + since_ms: int, candle_type: CandleType, + is_new_pair: bool = False, raise_: bool = False, + ) -> Tuple[str, str, str, List]: """ Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date Does not work for other exchanges, which don't return the earliest data when called with "0" + :param candle_type: Any of the enum CandleType (must match trading mode!) """ if is_new_pair: - x = await self._async_get_candle_history(pair, timeframe, 0) - if x and x[2] and x[2][0] and x[2][0][0] > since_ms: + x = await self._async_get_candle_history(pair, timeframe, candle_type, 0) + if x and x[3] and x[3][0] and x[3][0][0] > since_ms: # Set starting date to first available candle. - since_ms = x[2][0][0] + since_ms = x[3][0][0] logger.info(f"Candle-data for {pair} available starting with " f"{arrow.get(since_ms // 1000).isoformat()}.") + return await super()._async_get_historic_ohlcv( - pair=pair, timeframe=timeframe, since_ms=since_ms, is_new_pair=is_new_pair, - raise_=raise_) + pair=pair, + timeframe=timeframe, + since_ms=since_ms, + is_new_pair=is_new_pair, + raise_=raise_, + candle_type=candle_type + ) def funding_fee_cutoff(self, open_date: datetime): """ diff --git a/freqtrade/exchange/common.py b/freqtrade/exchange/common.py index fc21c0f02..3beb253df 100644 --- a/freqtrade/exchange/common.py +++ b/freqtrade/exchange/common.py @@ -43,10 +43,14 @@ EXCHANGE_HAS_REQUIRED = [ EXCHANGE_HAS_OPTIONAL = [ # Private 'fetchMyTrades', # Trades for order - fee detection + # 'setLeverage', # Margin/Futures trading + # 'setMarginMode', # Margin/Futures trading + # 'fetchFundingHistory', # Futures trading # Public 'fetchOrderBook', 'fetchL2OrderBook', 'fetchTicker', # OR for pricing 'fetchTickers', # For volumepairlist? 'fetchTrades', # Downloading trades data + # 'fetchFundingRateHistory', # Futures trading ] diff --git a/freqtrade/exchange/exchange.py b/freqtrade/exchange/exchange.py index 40600bfaa..6aa15f550 100644 --- a/freqtrade/exchange/exchange.py +++ b/freqtrade/exchange/exchange.py @@ -20,9 +20,9 @@ from ccxt.base.decimal_to_precision import (ROUND_DOWN, ROUND_UP, TICK_SIZE, TRU from pandas import DataFrame from freqtrade.constants import (DEFAULT_AMOUNT_RESERVE_PERCENT, NON_OPEN_EXCHANGE_STATES, - ListPairsWithTimeframes) + ListPairsWithTimeframes, PairWithTimeframe) from freqtrade.data.converter import ohlcv_to_dataframe, trades_dict_to_list -from freqtrade.enums import Collateral, TradingMode +from freqtrade.enums import CandleType, Collateral, TradingMode from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFundsError, InvalidOrderException, OperationalException, PricingError, RetryableOrderError, TemporaryError) @@ -70,6 +70,7 @@ class Exchange: "l2_limit_range": None, "l2_limit_range_required": True, # Allow Empty L2 limit (kucoin) "mark_ohlcv_price": "mark", + "mark_ohlcv_timeframe": "8h", "ccxt_futures_name": "swap" } _ft_has: Dict = {} @@ -92,7 +93,7 @@ class Exchange: self._config.update(config) # Holds last candle refreshed time of each pair - self._pairs_last_refresh_time: Dict[Tuple[str, str], int] = {} + self._pairs_last_refresh_time: Dict[PairWithTimeframe, int] = {} # Timestamp of last markets refresh self._last_markets_refresh: int = 0 @@ -105,7 +106,7 @@ class Exchange: self._buy_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=1800) # Holds candles - self._klines: Dict[Tuple[str, str], DataFrame] = {} + self._klines: Dict[PairWithTimeframe, DataFrame] = {} # Holds all open sell orders for dry_run self._dry_run_open_orders: Dict[str, Any] = {} @@ -359,7 +360,7 @@ class Exchange: or (self.trading_mode == TradingMode.FUTURES and self.market_is_future(market)) ) - def klines(self, pair_interval: Tuple[str, str], copy: bool = True) -> DataFrame: + def klines(self, pair_interval: PairWithTimeframe, copy: bool = True) -> DataFrame: if pair_interval in self._klines: return self._klines[pair_interval].copy() if copy else self._klines[pair_interval] else: @@ -1314,7 +1315,8 @@ class Exchange: # Historic data def get_historic_ohlcv(self, pair: str, timeframe: str, - since_ms: int, is_new_pair: bool = False) -> List: + since_ms: int, candle_type: CandleType, + is_new_pair: bool = False) -> List: """ Get candle history using asyncio and returns the list of candles. Handles all async work for this. @@ -1322,34 +1324,38 @@ class Exchange: :param pair: Pair to download :param timeframe: Timeframe to get data for :param since_ms: Timestamp in milliseconds to get history from + :param candle_type: '', mark, index, premiumIndex, or funding_rate :return: List with candle (OHLCV) data """ - pair, timeframe, data = asyncio.get_event_loop().run_until_complete( + pair, _, _, data = asyncio.get_event_loop().run_until_complete( self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe, - since_ms=since_ms, is_new_pair=is_new_pair)) + since_ms=since_ms, is_new_pair=is_new_pair, + candle_type=candle_type)) logger.info(f"Downloaded data for {pair} with length {len(data)}.") return data def get_historic_ohlcv_as_df(self, pair: str, timeframe: str, - since_ms: int) -> DataFrame: + since_ms: int, candle_type: CandleType) -> DataFrame: """ Minimal wrapper around get_historic_ohlcv - converting the result into a dataframe :param pair: Pair to download :param timeframe: Timeframe to get data for :param since_ms: Timestamp in milliseconds to get history from + :param candle_type: Any of the enum CandleType (must match trading mode!) :return: OHLCV DataFrame """ - ticks = self.get_historic_ohlcv(pair, timeframe, since_ms=since_ms) + ticks = self.get_historic_ohlcv(pair, timeframe, since_ms=since_ms, candle_type=candle_type) return ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True, drop_incomplete=self._ohlcv_partial_candle) async def _async_get_historic_ohlcv(self, pair: str, timeframe: str, - since_ms: int, is_new_pair: bool = False, - raise_: bool = False - ) -> Tuple[str, str, List]: + since_ms: int, candle_type: CandleType, + is_new_pair: bool = False, raise_: bool = False, + ) -> Tuple[str, str, str, List]: """ Download historic ohlcv :param is_new_pair: used by binance subclass to allow "fast" new pair downloading + :param candle_type: Any of the enum CandleType (must match trading mode!) """ one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(timeframe) @@ -1359,7 +1365,7 @@ class Exchange: arrow.utcnow().shift(seconds=one_call // 1000).humanize(only_distance=True) ) input_coroutines = [self._async_get_candle_history( - pair, timeframe, since) for since in + pair, timeframe, candle_type, since) for since in range(since_ms, arrow.utcnow().int_timestamp * 1000, one_call)] data: List = [] @@ -1375,16 +1381,16 @@ class Exchange: continue else: # Deconstruct tuple if it's not an exception - p, _, new_data = res - if p == pair: + p, _, c, new_data = res + if p == pair and c == candle_type: data.extend(new_data) # Sort data again after extending the result - above calls return in "async order" data = sorted(data, key=lambda x: x[0]) - return pair, timeframe, data + return pair, timeframe, candle_type, data def refresh_latest_ohlcv(self, pair_list: ListPairsWithTimeframes, *, since_ms: Optional[int] = None, cache: bool = True - ) -> Dict[Tuple[str, str], DataFrame]: + ) -> Dict[PairWithTimeframe, DataFrame]: """ Refresh in-memory OHLCV asynchronously and set `_klines` with the result Loops asynchronously over pair_list and downloads all pairs async (semi-parallel). @@ -1399,9 +1405,9 @@ class Exchange: input_coroutines = [] cached_pairs = [] # Gather coroutines to run - for pair, timeframe in set(pair_list): - if ((pair, timeframe) not in self._klines or not cache - or self._now_is_time_to_refresh(pair, timeframe)): + for pair, timeframe, candle_type in set(pair_list): + if ((pair, timeframe, candle_type) not in self._klines or not cache + or self._now_is_time_to_refresh(pair, timeframe, candle_type)): if not since_ms and self.required_candle_call_count > 1: # Multiple calls for one pair - to get more history one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(timeframe) @@ -1411,17 +1417,17 @@ class Exchange: if since_ms: input_coroutines.append(self._async_get_historic_ohlcv( - pair, timeframe, since_ms=since_ms, raise_=True)) + pair, timeframe, since_ms=since_ms, raise_=True, candle_type=candle_type)) else: # One call ... "regular" refresh input_coroutines.append(self._async_get_candle_history( - pair, timeframe, since_ms=since_ms)) + pair, timeframe, since_ms=since_ms, candle_type=candle_type)) else: logger.debug( - "Using cached candle (OHLCV) data for pair %s, timeframe %s ...", - pair, timeframe + "Using cached candle (OHLCV) data for pair %s, timeframe %s, candleType %s ...", + pair, timeframe, candle_type ) - cached_pairs.append((pair, timeframe)) + cached_pairs.append((pair, timeframe, candle_type)) results_df = {} # Chunk requests into batches of 100 to avoid overwelming ccxt Throttling @@ -1429,42 +1435,53 @@ class Exchange: results = asyncio.get_event_loop().run_until_complete( asyncio.gather(*input_coro, return_exceptions=True)) - # handle caching for res in results: if isinstance(res, Exception): logger.warning(f"Async code raised an exception: {repr(res)}") continue - # Deconstruct tuple (has 3 elements) - pair, timeframe, ticks = res + # Deconstruct tuple (has 4 elements) + pair, timeframe, c_type, ticks = res # keeping last candle time as last refreshed time of the pair if ticks: - self._pairs_last_refresh_time[(pair, timeframe)] = ticks[-1][0] // 1000 + self._pairs_last_refresh_time[(pair, timeframe, c_type)] = ticks[-1][0] // 1000 # keeping parsed dataframe in cache ohlcv_df = ohlcv_to_dataframe( ticks, timeframe, pair=pair, fill_missing=True, drop_incomplete=self._ohlcv_partial_candle) - results_df[(pair, timeframe)] = ohlcv_df + results_df[(pair, timeframe, c_type)] = ohlcv_df if cache: - self._klines[(pair, timeframe)] = ohlcv_df - + self._klines[(pair, timeframe, c_type)] = ohlcv_df # Return cached klines - for pair, timeframe in cached_pairs: - results_df[(pair, timeframe)] = self.klines((pair, timeframe), copy=False) + for pair, timeframe, c_type in cached_pairs: + results_df[(pair, timeframe, c_type)] = self.klines( + (pair, timeframe, c_type), + copy=False + ) return results_df - def _now_is_time_to_refresh(self, pair: str, timeframe: str) -> bool: + def _now_is_time_to_refresh(self, pair: str, timeframe: str, candle_type: CandleType) -> bool: # Timeframe in seconds interval_in_sec = timeframe_to_seconds(timeframe) - return not ((self._pairs_last_refresh_time.get((pair, timeframe), 0) - + interval_in_sec) >= arrow.utcnow().int_timestamp) + return not ( + (self._pairs_last_refresh_time.get( + (pair, timeframe, candle_type), + 0 + ) + interval_in_sec) >= arrow.utcnow().int_timestamp + ) @retrier_async - async def _async_get_candle_history(self, pair: str, timeframe: str, - since_ms: Optional[int] = None) -> Tuple[str, str, List]: + async def _async_get_candle_history( + self, + pair: str, + timeframe: str, + candle_type: CandleType, + since_ms: Optional[int] = None, + ) -> Tuple[str, str, str, List]: """ Asynchronously get candle history data using fetch_ohlcv + :param candle_type: '', mark, index, premiumIndex, or funding_rate returns tuple: (pair, timeframe, ohlcv_list) """ try: @@ -1474,7 +1491,9 @@ class Exchange: "Fetching pair %s, interval %s, since %s %s...", pair, timeframe, since_ms, s ) - params = self._ft_has.get('ohlcv_params', {}) + params = deepcopy(self._ft_has.get('ohlcv_params', {})) + if candle_type != CandleType.SPOT: + params.update({'price': candle_type}) data = await self._api_async.fetch_ohlcv(pair, timeframe=timeframe, since=since_ms, limit=self.ohlcv_candle_limit(timeframe), @@ -1489,9 +1508,9 @@ class Exchange: data = sorted(data, key=lambda x: x[0]) except IndexError: logger.exception("Error loading %s. Result was %s.", pair, data) - return pair, timeframe, [] + return pair, timeframe, candle_type, [] logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe) - return pair, timeframe, data + return pair, timeframe, candle_type, data except ccxt.NotSupported as e: raise OperationalException( diff --git a/freqtrade/exchange/ftx.py b/freqtrade/exchange/ftx.py index fc7bc682e..36a08239d 100644 --- a/freqtrade/exchange/ftx.py +++ b/freqtrade/exchange/ftx.py @@ -21,6 +21,7 @@ class Ftx(Exchange): "stoploss_on_exchange": True, "ohlcv_candle_limit": 1500, "mark_ohlcv_price": "index", + "mark_ohlcv_timeframe": "1h", "ccxt_futures_name": "future" } diff --git a/freqtrade/exchange/kraken.py b/freqtrade/exchange/kraken.py index 42d817222..40944d15b 100644 --- a/freqtrade/exchange/kraken.py +++ b/freqtrade/exchange/kraken.py @@ -22,6 +22,7 @@ class Kraken(Exchange): "ohlcv_candle_limit": 720, "trades_pagination": "id", "trades_pagination_arg": "since", + "mark_ohlcv_timeframe": "4h", } _supported_trading_mode_collateral_pairs: List[Tuple[TradingMode, Collateral]] = [ diff --git a/freqtrade/misc.py b/freqtrade/misc.py index 6f439866b..7c83c22bd 100644 --- a/freqtrade/misc.py +++ b/freqtrade/misc.py @@ -109,7 +109,7 @@ def file_load_json(file): def pair_to_filename(pair: str) -> str: - for ch in ['/', '-', ' ', '.', '@', '$', '+', ':']: + for ch in ['/', ' ', '.', '@', '$', '+', ':']: pair = pair.replace(ch, '_') return pair diff --git a/freqtrade/optimize/backtesting.py b/freqtrade/optimize/backtesting.py index 6c5a44da0..43401be46 100644 --- a/freqtrade/optimize/backtesting.py +++ b/freqtrade/optimize/backtesting.py @@ -17,8 +17,7 @@ from freqtrade.data import history from freqtrade.data.btanalysis import trade_list_to_dataframe from freqtrade.data.converter import trim_dataframe, trim_dataframes from freqtrade.data.dataprovider import DataProvider -from freqtrade.enums import BacktestState, SellType -from freqtrade.enums.tradingmode import TradingMode +from freqtrade.enums import BacktestState, CandleType, SellType, TradingMode from freqtrade.exceptions import DependencyException, OperationalException from freqtrade.exchange import timeframe_to_minutes, timeframe_to_seconds from freqtrade.mixins import LoggingMixin @@ -290,7 +289,8 @@ class Backtesting: df_analyzed.loc[:, col] = 0 if col not in ('enter_tag', 'exit_tag') else None # Update dataprovider cache - self.dataprovider._set_cached_df(pair, self.timeframe, df_analyzed) + self.dataprovider._set_cached_df(pair, self.timeframe, df_analyzed, CandleType.SPOT) + # TODO-lev: Candle-type should be conditional, either "spot" or futures df_analyzed = df_analyzed.drop(df_analyzed.head(1).index) diff --git a/freqtrade/plugins/pairlist/AgeFilter.py b/freqtrade/plugins/pairlist/AgeFilter.py index 5627d82ce..f5507d0a6 100644 --- a/freqtrade/plugins/pairlist/AgeFilter.py +++ b/freqtrade/plugins/pairlist/AgeFilter.py @@ -9,6 +9,7 @@ import arrow from pandas import DataFrame from freqtrade.configuration import PeriodicCache +from freqtrade.constants import ListPairsWithTimeframes from freqtrade.exceptions import OperationalException from freqtrade.misc import plural from freqtrade.plugins.pairlist.IPairList import IPairList @@ -71,8 +72,8 @@ class AgeFilter(IPairList): :param tickers: Tickers (from exchange.get_tickers()). May be cached. :return: new allowlist """ - needed_pairs = [ - (p, '1d') for p in pairlist + needed_pairs: ListPairsWithTimeframes = [ + (p, '1d', self._config['candle_type_def']) for p in pairlist if p not in self._symbolsChecked and p not in self._symbolsCheckFailed] if not needed_pairs: # Remove pairs that have been removed before @@ -88,7 +89,8 @@ class AgeFilter(IPairList): candles = self._exchange.refresh_latest_ohlcv(needed_pairs, since_ms=since_ms, cache=False) if self._enabled: for p in deepcopy(pairlist): - daily_candles = candles[(p, '1d')] if (p, '1d') in candles else None + daily_candles = candles[(p, '1d', self._config['candle_type_def'])] if ( + p, '1d', self._config['candle_type_def']) in candles else None if not self._validate_pair_loc(p, daily_candles): pairlist.remove(p) self.log_once(f"Validated {len(pairlist)} pairs.", logger.info) diff --git a/freqtrade/plugins/pairlist/ShuffleFilter.py b/freqtrade/plugins/pairlist/ShuffleFilter.py index 55cf9938f..663bba49b 100644 --- a/freqtrade/plugins/pairlist/ShuffleFilter.py +++ b/freqtrade/plugins/pairlist/ShuffleFilter.py @@ -5,7 +5,7 @@ import logging import random from typing import Any, Dict, List -from freqtrade.enums.runmode import RunMode +from freqtrade.enums import RunMode from freqtrade.plugins.pairlist.IPairList import IPairList diff --git a/freqtrade/plugins/pairlist/VolatilityFilter.py b/freqtrade/plugins/pairlist/VolatilityFilter.py index 9383e5d06..55340fa14 100644 --- a/freqtrade/plugins/pairlist/VolatilityFilter.py +++ b/freqtrade/plugins/pairlist/VolatilityFilter.py @@ -11,6 +11,7 @@ import numpy as np from cachetools.ttl import TTLCache from pandas import DataFrame +from freqtrade.constants import ListPairsWithTimeframes from freqtrade.exceptions import OperationalException from freqtrade.misc import plural from freqtrade.plugins.pairlist.IPairList import IPairList @@ -33,6 +34,7 @@ class VolatilityFilter(IPairList): self._min_volatility = pairlistconfig.get('min_volatility', 0) self._max_volatility = pairlistconfig.get('max_volatility', sys.maxsize) self._refresh_period = pairlistconfig.get('refresh_period', 1440) + self._def_candletype = self._config['candle_type_def'] self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period) @@ -67,7 +69,8 @@ class VolatilityFilter(IPairList): :param tickers: Tickers (from exchange.get_tickers()). May be cached. :return: new allowlist """ - needed_pairs = [(p, '1d') for p in pairlist if p not in self._pair_cache] + needed_pairs: ListPairsWithTimeframes = [ + (p, '1d', self._def_candletype) for p in pairlist if p not in self._pair_cache] since_ms = (arrow.utcnow() .floor('day') @@ -81,7 +84,8 @@ class VolatilityFilter(IPairList): if self._enabled: for p in deepcopy(pairlist): - daily_candles = candles[(p, '1d')] if (p, '1d') in candles else None + daily_candles = candles[(p, '1d', self._def_candletype)] if ( + p, '1d', self._def_candletype) in candles else None if not self._validate_pair_loc(p, daily_candles): pairlist.remove(p) return pairlist diff --git a/freqtrade/plugins/pairlist/VolumePairList.py b/freqtrade/plugins/pairlist/VolumePairList.py index 0ffc8a8c8..ca9771516 100644 --- a/freqtrade/plugins/pairlist/VolumePairList.py +++ b/freqtrade/plugins/pairlist/VolumePairList.py @@ -10,6 +10,7 @@ from typing import Any, Dict, List import arrow from cachetools.ttl import TTLCache +from freqtrade.constants import ListPairsWithTimeframes from freqtrade.exceptions import OperationalException from freqtrade.exchange import timeframe_to_minutes from freqtrade.misc import format_ms_time @@ -43,6 +44,7 @@ class VolumePairList(IPairList): self._lookback_days = self._pairlistconfig.get('lookback_days', 0) self._lookback_timeframe = self._pairlistconfig.get('lookback_timeframe', '1d') self._lookback_period = self._pairlistconfig.get('lookback_period', 0) + self._def_candletype = self._config['candle_type_def'] if (self._lookback_days > 0) & (self._lookback_period > 0): raise OperationalException( @@ -159,11 +161,10 @@ class VolumePairList(IPairList): self.log_once(f"Using volume range of {self._lookback_period} candles, timeframe: " f"{self._lookback_timeframe}, starting from {format_ms_time(since_ms)} " f"till {format_ms_time(to_ms)}", logger.info) - needed_pairs = [ - (p, self._lookback_timeframe) for p in - [ - s['symbol'] for s in filtered_tickers - ] if p not in self._pair_cache + needed_pairs: ListPairsWithTimeframes = [ + (p, self._lookback_timeframe, self._def_candletype) for p in + [s['symbol'] for s in filtered_tickers] + if p not in self._pair_cache ] # Get all candles @@ -174,8 +175,10 @@ class VolumePairList(IPairList): ) for i, p in enumerate(filtered_tickers): pair_candles = candles[ - (p['symbol'], self._lookback_timeframe) - ] if (p['symbol'], self._lookback_timeframe) in candles else None + (p['symbol'], self._lookback_timeframe, self._def_candletype) + ] if ( + p['symbol'], self._lookback_timeframe, self._def_candletype + ) in candles else None # in case of candle data calculate typical price and quoteVolume for candle if pair_candles is not None and not pair_candles.empty: pair_candles['typical_price'] = (pair_candles['high'] + pair_candles['low'] diff --git a/freqtrade/plugins/pairlist/rangestabilityfilter.py b/freqtrade/plugins/pairlist/rangestabilityfilter.py index 3e5a002ff..96a59808e 100644 --- a/freqtrade/plugins/pairlist/rangestabilityfilter.py +++ b/freqtrade/plugins/pairlist/rangestabilityfilter.py @@ -9,6 +9,7 @@ import arrow from cachetools.ttl import TTLCache from pandas import DataFrame +from freqtrade.constants import ListPairsWithTimeframes from freqtrade.exceptions import OperationalException from freqtrade.misc import plural from freqtrade.plugins.pairlist.IPairList import IPairList @@ -28,6 +29,7 @@ class RangeStabilityFilter(IPairList): self._min_rate_of_change = pairlistconfig.get('min_rate_of_change', 0.01) self._max_rate_of_change = pairlistconfig.get('max_rate_of_change', None) self._refresh_period = pairlistconfig.get('refresh_period', 1440) + self._def_candletype = self._config['candle_type_def'] self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period) @@ -65,7 +67,8 @@ class RangeStabilityFilter(IPairList): :param tickers: Tickers (from exchange.get_tickers()). May be cached. :return: new allowlist """ - needed_pairs = [(p, '1d') for p in pairlist if p not in self._pair_cache] + needed_pairs: ListPairsWithTimeframes = [ + (p, '1d', self._def_candletype) for p in pairlist if p not in self._pair_cache] since_ms = (arrow.utcnow() .floor('day') @@ -79,7 +82,8 @@ class RangeStabilityFilter(IPairList): if self._enabled: for p in deepcopy(pairlist): - daily_candles = candles[(p, '1d')] if (p, '1d') in candles else None + daily_candles = candles[(p, '1d', self._def_candletype)] if ( + p, '1d', self._def_candletype) in candles else None if not self._validate_pair_loc(p, daily_candles): pairlist.remove(p) return pairlist diff --git a/freqtrade/plugins/pairlistmanager.py b/freqtrade/plugins/pairlistmanager.py index 93b5e90e2..5ae9a7e35 100644 --- a/freqtrade/plugins/pairlistmanager.py +++ b/freqtrade/plugins/pairlistmanager.py @@ -8,6 +8,7 @@ from typing import Dict, List from cachetools import TTLCache, cached from freqtrade.constants import ListPairsWithTimeframes +from freqtrade.enums import CandleType from freqtrade.exceptions import OperationalException from freqtrade.plugins.pairlist.IPairList import IPairList from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist @@ -138,4 +139,4 @@ class PairListManager(): """ Create list of pair tuples with (pair, timeframe) """ - return [(pair, timeframe or self._config['timeframe']) for pair in pairs] + return [(pair, timeframe or self._config['timeframe'], CandleType.SPOT) for pair in pairs] diff --git a/freqtrade/rpc/api_server/api_v1.py b/freqtrade/rpc/api_server/api_v1.py index 65b6941e2..644e24655 100644 --- a/freqtrade/rpc/api_server/api_v1.py +++ b/freqtrade/rpc/api_server/api_v1.py @@ -9,6 +9,7 @@ from fastapi.exceptions import HTTPException from freqtrade import __version__ from freqtrade.constants import USERPATH_STRATEGIES from freqtrade.data.history import get_datahandler +from freqtrade.enums import CandleType from freqtrade.exceptions import OperationalException from freqtrade.rpc import RPC from freqtrade.rpc.api_server.api_schemas import (AvailablePairs, Balances, BlacklistPayload, @@ -250,16 +251,22 @@ def get_strategy(strategy: str, config=Depends(get_config)): @router.get('/available_pairs', response_model=AvailablePairs, tags=['candle data']) def list_available_pairs(timeframe: Optional[str] = None, stake_currency: Optional[str] = None, - config=Depends(get_config)): + candletype: Optional[CandleType] = None, config=Depends(get_config)): dh = get_datahandler(config['datadir'], config.get('dataformat_ohlcv', None)) - - pair_interval = dh.ohlcv_get_available_data(config['datadir']) + trading_mode = config.get('trading_mode', 'spot') + pair_interval = dh.ohlcv_get_available_data(config['datadir'], trading_mode) if timeframe: pair_interval = [pair for pair in pair_interval if pair[1] == timeframe] if stake_currency: pair_interval = [pair for pair in pair_interval if pair[0].endswith(stake_currency)] + if candletype: + pair_interval = [pair for pair in pair_interval if pair[2] == candletype] + else: + candle_type = CandleType.get_default(trading_mode) + pair_interval = [pair for pair in pair_interval if pair[2] == candle_type] + pair_interval = sorted(pair_interval, key=lambda x: x[0]) pairs = list({x[0] for x in pair_interval}) diff --git a/freqtrade/strategy/informative_decorator.py b/freqtrade/strategy/informative_decorator.py index 722e7a128..986b457a2 100644 --- a/freqtrade/strategy/informative_decorator.py +++ b/freqtrade/strategy/informative_decorator.py @@ -2,6 +2,7 @@ from typing import Any, Callable, NamedTuple, Optional, Union from pandas import DataFrame +from freqtrade.enums import CandleType from freqtrade.exceptions import OperationalException from freqtrade.strategy.strategy_helper import merge_informative_pair @@ -14,6 +15,7 @@ class InformativeData(NamedTuple): timeframe: str fmt: Union[str, Callable[[Any], str], None] ffill: bool + candle_type: CandleType def informative(timeframe: str, asset: str = '', @@ -46,6 +48,7 @@ def informative(timeframe: str, asset: str = '', * {column} - name of dataframe column. * {timeframe} - timeframe of informative dataframe. :param ffill: ffill dataframe after merging informative pair. + :param candle_type: '', mark, index, premiumIndex, or funding_rate """ _asset = asset _timeframe = timeframe @@ -54,7 +57,9 @@ def informative(timeframe: str, asset: str = '', def decorator(fn: PopulateIndicators): informative_pairs = getattr(fn, '_ft_informative', []) - informative_pairs.append(InformativeData(_asset, _timeframe, _fmt, _ffill)) + # TODO-lev: Add candle_type to InformativeData + informative_pairs.append(InformativeData(_asset, _timeframe, _fmt, _ffill, + CandleType.SPOT)) setattr(fn, '_ft_informative', informative_pairs) return fn return decorator diff --git a/freqtrade/strategy/interface.py b/freqtrade/strategy/interface.py index 36bf09f5f..25b7404f7 100644 --- a/freqtrade/strategy/interface.py +++ b/freqtrade/strategy/interface.py @@ -13,7 +13,7 @@ from pandas import DataFrame from freqtrade.constants import ListPairsWithTimeframes from freqtrade.data.dataprovider import DataProvider -from freqtrade.enums import SellType, SignalDirection, SignalTagType, SignalType +from freqtrade.enums import CandleType, SellType, SignalDirection, SignalTagType, SignalType from freqtrade.exceptions import OperationalException, StrategyError from freqtrade.exchange import timeframe_to_minutes, timeframe_to_seconds from freqtrade.exchange.exchange import timeframe_to_next_date @@ -422,16 +422,25 @@ class IStrategy(ABC, HyperStrategyMixin): Internal method which gathers all informative pairs (user or automatically defined). """ informative_pairs = self.informative_pairs() + # Compatibility code for 2 tuple informative pairs + informative_pairs = [ + (p[0], p[1], CandleType.from_string(p[2]) if len( + p) > 2 else self.config.get('candle_type_def', CandleType.SPOT)) + for p in informative_pairs] for inf_data, _ in self._ft_informative: if inf_data.asset: - pair_tf = (_format_pair_name(self.config, inf_data.asset), inf_data.timeframe) + pair_tf = ( + _format_pair_name(self.config, inf_data.asset), + inf_data.timeframe, + inf_data.candle_type + ) informative_pairs.append(pair_tf) else: if not self.dp: raise OperationalException('@informative decorator with unspecified asset ' 'requires DataProvider instance.') for pair in self.dp.current_whitelist(): - informative_pairs.append((pair, inf_data.timeframe)) + informative_pairs.append((pair, inf_data.timeframe, inf_data.candle_type)) return list(set(informative_pairs)) def get_strategy_name(self) -> str: @@ -522,7 +531,9 @@ class IStrategy(ABC, HyperStrategyMixin): dataframe = self.analyze_ticker(dataframe, metadata) self._last_candle_seen_per_pair[pair] = dataframe.iloc[-1]['date'] if self.dp: - self.dp._set_cached_df(pair, self.timeframe, dataframe) + self.dp._set_cached_df( + pair, self.timeframe, dataframe, + candle_type=self.config.get('candle_type_def', CandleType.SPOT)) else: logger.debug("Skipping TA Analysis for already analyzed candle") dataframe[SignalType.ENTER_LONG.value] = 0 diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py index 55fc4463d..2b5504324 100644 --- a/tests/commands/test_commands.py +++ b/tests/commands/test_commands.py @@ -814,6 +814,18 @@ def test_download_data_trades(mocker, caplog): assert dl_mock.call_args[1]['timerange'].starttype == "date" assert dl_mock.call_count == 1 assert convert_mock.call_count == 1 + args = [ + "download-data", + "--exchange", "kraken", + "--pairs", "ETH/BTC", "XRP/BTC", + "--days", "20", + "--trading-mode", "futures", + "--dl-trades" + ] + with pytest.raises(OperationalException, + match="Trade download not supported for futures."): + + start_download_data(get_args(args)) def test_start_convert_trades(mocker, caplog): @@ -1327,8 +1339,8 @@ def test_start_list_data(testdatadir, capsys): start_list_data(pargs) captured = capsys.readouterr() assert "Found 17 pair / timeframe combinations." in captured.out - assert "\n| Pair | Timeframe |\n" in captured.out - assert "\n| UNITTEST/BTC | 1m, 5m, 8m, 30m |\n" in captured.out + assert "\n| Pair | Timeframe | Type |\n" in captured.out + assert "\n| UNITTEST/BTC | 1m, 5m, 8m, 30m | spot |\n" in captured.out args = [ "list-data", @@ -1343,9 +1355,27 @@ def test_start_list_data(testdatadir, capsys): start_list_data(pargs) captured = capsys.readouterr() assert "Found 2 pair / timeframe combinations." in captured.out - assert "\n| Pair | Timeframe |\n" in captured.out + assert "\n| Pair | Timeframe | Type |\n" in captured.out assert "UNITTEST/BTC" not in captured.out - assert "\n| XRP/ETH | 1m, 5m |\n" in captured.out + assert "\n| XRP/ETH | 1m, 5m | spot |\n" in captured.out + + args = [ + "list-data", + "--data-format-ohlcv", + "json", + "--trading-mode", "futures", + "--datadir", + str(testdatadir), + ] + pargs = get_args(args) + pargs['config'] = None + start_list_data(pargs) + captured = capsys.readouterr() + + assert "Found 3 pair / timeframe combinations." in captured.out + assert "\n| Pair | Timeframe | Type |\n" in captured.out + assert "\n| XRP/USDT | 1h | futures |\n" in captured.out + assert "\n| XRP/USDT | 1h | mark |\n" in captured.out @pytest.mark.usefixtures("init_persistence") diff --git a/tests/conftest.py b/tests/conftest.py index 6a85f5de2..0b625ab68 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,8 +18,7 @@ from freqtrade import constants from freqtrade.commands import Arguments from freqtrade.data.converter import ohlcv_to_dataframe from freqtrade.edge import PairInfo -from freqtrade.enums import Collateral, RunMode, TradingMode -from freqtrade.enums.signaltype import SignalDirection +from freqtrade.enums import CandleType, Collateral, RunMode, SignalDirection, TradingMode from freqtrade.exchange import Exchange from freqtrade.freqtradebot import FreqtradeBot from freqtrade.persistence import LocalTrade, Trade, init_db @@ -459,6 +458,7 @@ def get_default_conf(testdatadir): "disableparamexport": True, "internals": {}, "export": "none", + "candle_type_def": CandleType.SPOT, } return configuration @@ -2386,7 +2386,7 @@ def market_buy_order_usdt_doublefee(market_buy_order_usdt): 'amount': 25.0, 'cost': 50.25, 'fee': {'cost': 0.00025125, 'currency': 'BNB'} - }, { + }, { 'timestamp': None, 'datetime': None, 'symbol': 'ETH/USDT', @@ -2399,7 +2399,7 @@ def market_buy_order_usdt_doublefee(market_buy_order_usdt): 'amount': 5, 'cost': 10, 'fee': {'cost': 0.0100306, 'currency': 'USDT'} - }] + }] return order diff --git a/tests/data/test_converter.py b/tests/data/test_converter.py index 6c95a9f18..c6b0059a2 100644 --- a/tests/data/test_converter.py +++ b/tests/data/test_converter.py @@ -12,6 +12,8 @@ from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_forma trades_to_ohlcv, trim_dataframe) from freqtrade.data.history import (get_timerange, load_data, load_pair_history, validate_backtest_data) +from freqtrade.data.history.idatahandler import IDataHandler +from freqtrade.enums import CandleType from tests.conftest import log_has, log_has_re from tests.data.test_history import _clean_test_file @@ -75,7 +77,8 @@ def test_ohlcv_fill_up_missing_data(testdatadir, caplog): def test_ohlcv_fill_up_missing_data2(caplog): timeframe = '5m' - ticks = [[ + ticks = [ + [ 1511686200000, # 8:50:00 8.794e-05, # open 8.948e-05, # high @@ -106,7 +109,7 @@ def test_ohlcv_fill_up_missing_data2(caplog): 8.895e-05, 8.817e-05, 123551 - ] + ] ] # Generate test-data without filling missing @@ -133,7 +136,8 @@ def test_ohlcv_fill_up_missing_data2(caplog): def test_ohlcv_drop_incomplete(caplog): timeframe = '1d' - ticks = [[ + ticks = [ + [ 1559750400000, # 2019-06-04 8.794e-05, # open 8.948e-05, # high @@ -164,7 +168,7 @@ def test_ohlcv_drop_incomplete(caplog): 8.895e-05, 8.817e-05, 123551 - ] + ] ] caplog.set_level(logging.DEBUG) data = ohlcv_to_dataframe(ticks, timeframe, pair="UNITTEST/BTC", @@ -287,42 +291,56 @@ def test_convert_trades_format(default_conf, testdatadir, tmpdir): file['new'].unlink() -def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir): +@pytest.mark.parametrize('file_base,candletype', [ + (['XRP_ETH-5m', 'XRP_ETH-1m'], CandleType.SPOT), + (['UNITTEST_USDT-1h-mark', 'XRP_USDT-1h-mark'], CandleType.MARK), + (['XRP_USDT-1h-futures'], CandleType.FUTURES), +]) +def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, candletype): tmpdir1 = Path(tmpdir) + prependix = '' if candletype == CandleType.SPOT else 'futures/' + files_orig = [] + files_temp = [] + files_new = [] + for file in file_base: + file_orig = testdatadir / f"{prependix}{file}.json" + file_temp = tmpdir1 / f"{prependix}{file}.json" + file_new = tmpdir1 / f"{prependix}{file}.json.gz" + IDataHandler.create_dir_if_needed(file_temp) + copyfile(file_orig, file_temp) - file1_orig = testdatadir / "XRP_ETH-5m.json" - file1 = tmpdir1 / "XRP_ETH-5m.json" - file1_new = tmpdir1 / "XRP_ETH-5m.json.gz" - file2_orig = testdatadir / "XRP_ETH-1m.json" - file2 = tmpdir1 / "XRP_ETH-1m.json" - file2_new = tmpdir1 / "XRP_ETH-1m.json.gz" - - copyfile(file1_orig, file1) - copyfile(file2_orig, file2) + files_orig.append(file_orig) + files_temp.append(file_temp) + files_new.append(file_new) default_conf['datadir'] = tmpdir1 - default_conf['pairs'] = ['XRP_ETH'] - default_conf['timeframes'] = ['1m', '5m'] + default_conf['pairs'] = ['XRP_ETH', 'XRP_USDT', 'UNITTEST_USDT'] + default_conf['timeframes'] = ['1m', '5m', '1h'] - assert not file1_new.exists() - assert not file2_new.exists() + assert not file_new.exists() - convert_ohlcv_format(default_conf, convert_from='json', - convert_to='jsongz', erase=False) - - assert file1_new.exists() - assert file2_new.exists() - assert file1.exists() - assert file2.exists() + convert_ohlcv_format( + default_conf, + convert_from='json', + convert_to='jsongz', + erase=False, + candle_type=candletype + ) + for file in (files_temp + files_new): + assert file.exists() # Remove original files - file1.unlink() - file2.unlink() + for file in (files_temp): + file.unlink() # Convert back - convert_ohlcv_format(default_conf, convert_from='jsongz', - convert_to='json', erase=True) - - assert file1.exists() - assert file2.exists() - assert not file1_new.exists() - assert not file2_new.exists() + convert_ohlcv_format( + default_conf, + convert_from='jsongz', + convert_to='json', + erase=True, + candle_type=candletype + ) + for file in (files_temp): + assert file.exists() + for file in (files_new): + assert not file.exists() diff --git a/tests/data/test_dataprovider.py b/tests/data/test_dataprovider.py index 0f42068c1..93f82de5d 100644 --- a/tests/data/test_dataprovider.py +++ b/tests/data/test_dataprovider.py @@ -5,40 +5,49 @@ import pytest from pandas import DataFrame from freqtrade.data.dataprovider import DataProvider -from freqtrade.enums import RunMode +from freqtrade.enums import CandleType, RunMode from freqtrade.exceptions import ExchangeError, OperationalException from freqtrade.plugins.pairlistmanager import PairListManager from tests.conftest import get_patched_exchange -def test_ohlcv(mocker, default_conf, ohlcv_history): +@pytest.mark.parametrize('candle_type', [ + 'mark', + '', +]) +def test_dp_ohlcv(mocker, default_conf, ohlcv_history, candle_type): default_conf["runmode"] = RunMode.DRY_RUN timeframe = default_conf["timeframe"] exchange = get_patched_exchange(mocker, default_conf) - exchange._klines[("XRP/BTC", timeframe)] = ohlcv_history - exchange._klines[("UNITTEST/BTC", timeframe)] = ohlcv_history + candletype = CandleType.from_string(candle_type) + exchange._klines[("XRP/BTC", timeframe, candletype)] = ohlcv_history + exchange._klines[("UNITTEST/BTC", timeframe, candletype)] = ohlcv_history dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.DRY_RUN - assert ohlcv_history.equals(dp.ohlcv("UNITTEST/BTC", timeframe)) - assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe), DataFrame) - assert dp.ohlcv("UNITTEST/BTC", timeframe) is not ohlcv_history - assert dp.ohlcv("UNITTEST/BTC", timeframe, copy=False) is ohlcv_history - assert not dp.ohlcv("UNITTEST/BTC", timeframe).empty - assert dp.ohlcv("NONESENSE/AAA", timeframe).empty + assert ohlcv_history.equals(dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candletype)) + assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candletype), DataFrame) + assert dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candletype) is not ohlcv_history + assert dp.ohlcv("UNITTEST/BTC", timeframe, copy=False, candle_type=candletype) is ohlcv_history + assert not dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candletype).empty + assert dp.ohlcv("NONESENSE/AAA", timeframe, candle_type=candletype).empty # Test with and without parameter - assert dp.ohlcv("UNITTEST/BTC", timeframe).equals(dp.ohlcv("UNITTEST/BTC")) + assert dp.ohlcv( + "UNITTEST/BTC", + timeframe, + candle_type=candletype + ).equals(dp.ohlcv("UNITTEST/BTC", candle_type=candle_type)) default_conf["runmode"] = RunMode.LIVE dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.LIVE - assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe), DataFrame) + assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candle_type), DataFrame) default_conf["runmode"] = RunMode.BACKTEST dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.BACKTEST - assert dp.ohlcv("UNITTEST/BTC", timeframe).empty + assert dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candle_type).empty def test_historic_ohlcv(mocker, default_conf, ohlcv_history): @@ -77,37 +86,50 @@ def test_historic_ohlcv_dataformat(mocker, default_conf, ohlcv_history): jsonloadmock.assert_not_called() -def test_get_pair_dataframe(mocker, default_conf, ohlcv_history): +@pytest.mark.parametrize('candle_type', [ + 'mark', + 'futures', + '', +]) +def test_get_pair_dataframe(mocker, default_conf, ohlcv_history, candle_type): default_conf["runmode"] = RunMode.DRY_RUN timeframe = default_conf["timeframe"] exchange = get_patched_exchange(mocker, default_conf) - exchange._klines[("XRP/BTC", timeframe)] = ohlcv_history - exchange._klines[("UNITTEST/BTC", timeframe)] = ohlcv_history + candletype = CandleType.from_string(candle_type) + exchange._klines[("XRP/BTC", timeframe, candletype)] = ohlcv_history + exchange._klines[("UNITTEST/BTC", timeframe, candletype)] = ohlcv_history dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.DRY_RUN - assert ohlcv_history.equals(dp.get_pair_dataframe("UNITTEST/BTC", timeframe)) - assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", timeframe), DataFrame) - assert dp.get_pair_dataframe("UNITTEST/BTC", timeframe) is not ohlcv_history - assert not dp.get_pair_dataframe("UNITTEST/BTC", timeframe).empty - assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe).empty + assert ohlcv_history.equals(dp.get_pair_dataframe( + "UNITTEST/BTC", timeframe, candle_type=candle_type)) + assert ohlcv_history.equals(dp.get_pair_dataframe( + "UNITTEST/BTC", timeframe, candle_type=candletype)) + assert isinstance(dp.get_pair_dataframe( + "UNITTEST/BTC", timeframe, candle_type=candle_type), DataFrame) + assert dp.get_pair_dataframe("UNITTEST/BTC", timeframe, + candle_type=candle_type) is not ohlcv_history + assert not dp.get_pair_dataframe("UNITTEST/BTC", timeframe, candle_type=candle_type).empty + assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe, candle_type=candle_type).empty # Test with and without parameter - assert dp.get_pair_dataframe("UNITTEST/BTC", timeframe)\ - .equals(dp.get_pair_dataframe("UNITTEST/BTC")) + assert dp.get_pair_dataframe("UNITTEST/BTC", timeframe, candle_type=candle_type)\ + .equals(dp.get_pair_dataframe("UNITTEST/BTC", candle_type=candle_type)) default_conf["runmode"] = RunMode.LIVE dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.LIVE - assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", timeframe), DataFrame) - assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe).empty + assert isinstance(dp.get_pair_dataframe( + "UNITTEST/BTC", timeframe, candle_type=candle_type), DataFrame) + assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe, candle_type=candle_type).empty historymock = MagicMock(return_value=ohlcv_history) mocker.patch("freqtrade.data.dataprovider.load_pair_history", historymock) default_conf["runmode"] = RunMode.BACKTEST dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.BACKTEST - assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", timeframe), DataFrame) + assert isinstance(dp.get_pair_dataframe( + "UNITTEST/BTC", timeframe, candle_type=candle_type), DataFrame) # assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe).empty @@ -230,8 +252,8 @@ def test_get_analyzed_dataframe(mocker, default_conf, ohlcv_history): exchange = get_patched_exchange(mocker, default_conf) dp = DataProvider(default_conf, exchange) - dp._set_cached_df("XRP/BTC", timeframe, ohlcv_history) - dp._set_cached_df("UNITTEST/BTC", timeframe, ohlcv_history) + dp._set_cached_df("XRP/BTC", timeframe, ohlcv_history, CandleType.SPOT) + dp._set_cached_df("UNITTEST/BTC", timeframe, ohlcv_history, CandleType.SPOT) assert dp.runmode == RunMode.DRY_RUN dataframe, time = dp.get_analyzed_dataframe("UNITTEST/BTC", timeframe) @@ -276,7 +298,7 @@ def test_no_exchange_mode(default_conf): dp.refresh([()]) with pytest.raises(OperationalException, match=message): - dp.ohlcv('XRP/USDT', '5m') + dp.ohlcv('XRP/USDT', '5m', '') with pytest.raises(OperationalException, match=message): dp.market('XRP/USDT') diff --git a/tests/data/test_history.py b/tests/data/test_history.py index 73ceabbbf..678a0b31b 100644 --- a/tests/data/test_history.py +++ b/tests/data/test_history.py @@ -1,6 +1,7 @@ # pragma pylint: disable=missing-docstring, protected-access, C0103 import json +import re import uuid from pathlib import Path from shutil import copyfile @@ -23,6 +24,7 @@ from freqtrade.data.history.history_utils import (_download_pair_history, _downl validate_backtest_data) from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler, get_datahandlerclass from freqtrade.data.history.jsondatahandler import JsonDataHandler, JsonGzDataHandler +from freqtrade.enums import CandleType from freqtrade.exchange import timeframe_to_minutes from freqtrade.misc import file_dump_json from freqtrade.resolvers import StrategyResolver @@ -95,6 +97,17 @@ def test_load_data_1min_timeframe(ohlcv_history, mocker, caplog, testdatadir) -> ) +def test_load_data_mark(ohlcv_history, mocker, caplog, testdatadir) -> None: + mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ohlcv_history) + file = testdatadir / 'futures/UNITTEST_USDT-1h-mark.json' + load_data(datadir=testdatadir, timeframe='1h', pairs=['UNITTEST/BTC'], candle_type='mark') + assert file.is_file() + assert not log_has( + 'Download history data for pair: "UNITTEST/USDT", interval: 1m ' + 'and store in None.', caplog + ) + + def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) -> None: ltfmock = mocker.patch( 'freqtrade.data.history.jsondatahandler.JsonDataHandler._ohlcv_load', @@ -110,8 +123,9 @@ def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) -> assert ltfmock.call_args_list[0][1]['timerange'].startts == timerange.startts - 20 * 60 +@pytest.mark.parametrize('candle_type', ['mark', '']) def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog, - default_conf, tmpdir) -> None: + default_conf, tmpdir, candle_type) -> None: """ Test load_pair_history() with 1 min timeframe """ @@ -121,7 +135,7 @@ def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog, file = tmpdir1 / 'MEME_BTC-1m.json' # do not download a new pair if refresh_pairs isn't set - load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC') + load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type) assert not file.is_file() assert log_has( 'No history data for pair: "MEME/BTC", timeframe: 1m. ' @@ -131,7 +145,7 @@ def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog, # download a new pair if refresh_pairs is set refresh_data(datadir=tmpdir1, timeframe='1m', pairs=['MEME/BTC'], exchange=exchange) - load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC') + load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type) assert file.is_file() assert log_has_re( r'Download history data for pair: "MEME/BTC" \(0/1\), timeframe: 1m ' @@ -143,19 +157,31 @@ def test_testdata_path(testdatadir) -> None: assert str(Path('tests') / 'testdata') in str(testdatadir) -@pytest.mark.parametrize("pair,expected_result", [ - ("ETH/BTC", 'freqtrade/hello/world/ETH_BTC-5m.json'), - ("Fabric Token/ETH", 'freqtrade/hello/world/Fabric_Token_ETH-5m.json'), - ("ETHH20", 'freqtrade/hello/world/ETHH20-5m.json'), - (".XBTBON2H", 'freqtrade/hello/world/_XBTBON2H-5m.json'), - ("ETHUSD.d", 'freqtrade/hello/world/ETHUSD_d-5m.json'), - ("ACC_OLD/BTC", 'freqtrade/hello/world/ACC_OLD_BTC-5m.json'), +@pytest.mark.parametrize("pair,expected_result,candle_type", [ + ("ETH/BTC", 'freqtrade/hello/world/ETH_BTC-5m.json', ""), + ("Fabric Token/ETH", 'freqtrade/hello/world/Fabric_Token_ETH-5m.json', ""), + ("ETHH20", 'freqtrade/hello/world/ETHH20-5m.json', ""), + (".XBTBON2H", 'freqtrade/hello/world/_XBTBON2H-5m.json', ""), + ("ETHUSD.d", 'freqtrade/hello/world/ETHUSD_d-5m.json', ""), + ("ACC_OLD/BTC", 'freqtrade/hello/world/ACC_OLD_BTC-5m.json', ""), + ("ETH/BTC", 'freqtrade/hello/world/futures/ETH_BTC-5m-mark.json', "mark"), + ("ACC_OLD/BTC", 'freqtrade/hello/world/futures/ACC_OLD_BTC-5m-index.json', "index"), ]) -def test_json_pair_data_filename(pair, expected_result): - fn = JsonDataHandler._pair_data_filename(Path('freqtrade/hello/world'), pair, '5m') +def test_json_pair_data_filename(pair, expected_result, candle_type): + fn = JsonDataHandler._pair_data_filename( + Path('freqtrade/hello/world'), + pair, + '5m', + CandleType.from_string(candle_type) + ) assert isinstance(fn, Path) assert fn == Path(expected_result) - fn = JsonGzDataHandler._pair_data_filename(Path('freqtrade/hello/world'), pair, '5m') + fn = JsonGzDataHandler._pair_data_filename( + Path('freqtrade/hello/world'), + pair, + '5m', + candle_type=CandleType.from_string(candle_type) + ) assert isinstance(fn, Path) assert fn == Path(expected_result + '.gz') @@ -229,24 +255,38 @@ def test_load_cached_data_for_updating(mocker, testdatadir) -> None: assert start_ts is None -def test_download_pair_history(ohlcv_history_list, mocker, default_conf, tmpdir) -> None: +@pytest.mark.parametrize('candle_type,subdir,file_tail', [ + ('mark', 'futures/', '-mark'), + ('spot', '', ''), +]) +def test_download_pair_history( + ohlcv_history_list, + mocker, + default_conf, + tmpdir, + candle_type, + subdir, + file_tail +) -> None: mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ohlcv_history_list) exchange = get_patched_exchange(mocker, default_conf) tmpdir1 = Path(tmpdir) - file1_1 = tmpdir1 / 'MEME_BTC-1m.json' - file1_5 = tmpdir1 / 'MEME_BTC-5m.json' - file2_1 = tmpdir1 / 'CFI_BTC-1m.json' - file2_5 = tmpdir1 / 'CFI_BTC-5m.json' + file1_1 = tmpdir1 / f'{subdir}MEME_BTC-1m{file_tail}.json' + file1_5 = tmpdir1 / f'{subdir}MEME_BTC-5m{file_tail}.json' + file2_1 = tmpdir1 / f'{subdir}CFI_BTC-1m{file_tail}.json' + file2_5 = tmpdir1 / f'{subdir}CFI_BTC-5m{file_tail}.json' assert not file1_1.is_file() assert not file2_1.is_file() assert _download_pair_history(datadir=tmpdir1, exchange=exchange, pair='MEME/BTC', - timeframe='1m') + timeframe='1m', + candle_type=candle_type) assert _download_pair_history(datadir=tmpdir1, exchange=exchange, pair='CFI/BTC', - timeframe='1m') + timeframe='1m', + candle_type=candle_type) assert not exchange._pairs_last_refresh_time assert file1_1.is_file() assert file2_1.is_file() @@ -260,10 +300,12 @@ def test_download_pair_history(ohlcv_history_list, mocker, default_conf, tmpdir) assert _download_pair_history(datadir=tmpdir1, exchange=exchange, pair='MEME/BTC', - timeframe='5m') + timeframe='5m', + candle_type=candle_type) assert _download_pair_history(datadir=tmpdir1, exchange=exchange, pair='CFI/BTC', - timeframe='5m') + timeframe='5m', + candle_type=candle_type) assert not exchange._pairs_last_refresh_time assert file1_5.is_file() assert file2_5.is_file() @@ -283,7 +325,9 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None: timeframe='1m') _download_pair_history(datadir=testdatadir, exchange=exchange, pair="UNITTEST/BTC", timeframe='3m') - assert json_dump_mock.call_count == 2 + _download_pair_history(datadir=testdatadir, exchange=exchange, pair="UNITTEST/USDT", + timeframe='1h', candle_type='mark') + assert json_dump_mock.call_count == 3 def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmpdir) -> None: @@ -443,7 +487,13 @@ def test_validate_backtest_data(default_conf, mocker, caplog, testdatadir) -> No assert len(caplog.record_tuples) == 0 -def test_refresh_backtest_ohlcv_data(mocker, default_conf, markets, caplog, testdatadir): +@pytest.mark.parametrize('trademode,callcount', [ + ('spot', 4), + ('margin', 4), + ('futures', 6), +]) +def test_refresh_backtest_ohlcv_data( + mocker, default_conf, markets, caplog, testdatadir, trademode, callcount): dl_mock = mocker.patch('freqtrade.data.history.history_utils._download_pair_history', MagicMock()) mocker.patch( @@ -456,10 +506,11 @@ def test_refresh_backtest_ohlcv_data(mocker, default_conf, markets, caplog, test timerange = TimeRange.parse_timerange("20190101-20190102") refresh_backtest_ohlcv_data(exchange=ex, pairs=["ETH/BTC", "XRP/BTC"], timeframes=["1m", "5m"], datadir=testdatadir, - timerange=timerange, erase=True + timerange=timerange, erase=True, + trading_mode=trademode ) - assert dl_mock.call_count == 4 + assert dl_mock.call_count == callcount assert dl_mock.call_args[1]['timerange'].starttype == 'date' assert log_has("Downloading pair ETH/BTC, interval 1m.", caplog) @@ -477,7 +528,8 @@ def test_download_data_no_markets(mocker, default_conf, caplog, testdatadir): unav_pairs = refresh_backtest_ohlcv_data(exchange=ex, pairs=["BTT/BTC", "LTC/USDT"], timeframes=["1m", "5m"], datadir=testdatadir, - timerange=timerange, erase=False + timerange=timerange, erase=False, + trading_mode='spot' ) assert dl_mock.call_count == 0 @@ -605,33 +657,99 @@ def test_convert_trades_to_ohlcv(testdatadir, tmpdir, caplog): def test_datahandler_ohlcv_get_pairs(testdatadir): - pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '5m') + pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '5m', candle_type=CandleType.SPOT) # Convert to set to avoid failures due to sorting assert set(pairs) == {'UNITTEST/BTC', 'XLM/BTC', 'ETH/BTC', 'TRX/BTC', 'LTC/BTC', 'XMR/BTC', 'ZEC/BTC', 'ADA/BTC', 'ETC/BTC', 'NXT/BTC', 'DASH/BTC', 'XRP/ETH'} - pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '8m') + pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '8m', candle_type=CandleType.SPOT) assert set(pairs) == {'UNITTEST/BTC'} - pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '5m') + pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '5m', candle_type=CandleType.SPOT) assert set(pairs) == {'UNITTEST/BTC'} + pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.MARK) + assert set(pairs) == {'UNITTEST/USDT', 'XRP/USDT'} + + pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.FUTURES) + assert set(pairs) == {'XRP/USDT'} + + pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.MARK) + assert set(pairs) == {'UNITTEST/USDT'} + + +@pytest.mark.parametrize('filename,pair,timeframe,candletype', [ + ('XMR_BTC-5m.json', 'XMR_BTC', '5m', ''), + ('XMR_USDT-1h.h5', 'XMR_USDT', '1h', ''), + ('BTC-PERP-1h.h5', 'BTC-PERP', '1h', ''), + ('BTC_USDT-2h.jsongz', 'BTC_USDT', '2h', ''), + ('BTC_USDT-2h-mark.jsongz', 'BTC_USDT', '2h', 'mark'), + ('XMR_USDT-1h-mark.h5', 'XMR_USDT', '1h', 'mark'), + ('XMR_USDT-1h-random.h5', 'XMR_USDT', '1h', 'random'), + ('BTC-PERP-1h-index.h5', 'BTC-PERP', '1h', 'index'), + ('XMR_USDT_USDT-1h-mark.h5', 'XMR_USDT_USDT', '1h', 'mark'), +]) +def test_datahandler_ohlcv_regex(filename, pair, timeframe, candletype): + regex = JsonDataHandler._OHLCV_REGEX + + match = re.search(regex, filename) + assert len(match.groups()) > 1 + assert match[1] == pair + assert match[2] == timeframe + assert match[3] == candletype + + +@pytest.mark.parametrize('input,expected', [ + ('XMR_USDT', 'XMR/USDT'), + ('BTC_USDT', 'BTC/USDT'), + ('USDT_BUSD', 'USDT/BUSD'), + ('BTC_USDT_USDT', 'BTC/USDT:USDT'), # Futures + ('XRP_USDT_USDT', 'XRP/USDT:USDT'), # futures + ('BTC-PERP', 'BTC-PERP'), + ('BTC-PERP_USDT', 'BTC-PERP:USDT'), # potential FTX case + ('UNITTEST_USDT', 'UNITTEST/USDT'), +]) +def test_rebuild_pair_from_filename(input, expected): + + assert IDataHandler.rebuild_pair_from_filename(input) == expected + def test_datahandler_ohlcv_get_available_data(testdatadir): - paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir) + paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir, 'spot') # Convert to set to avoid failures due to sorting - assert set(paircombs) == {('UNITTEST/BTC', '5m'), ('ETH/BTC', '5m'), ('XLM/BTC', '5m'), - ('TRX/BTC', '5m'), ('LTC/BTC', '5m'), ('XMR/BTC', '5m'), - ('ZEC/BTC', '5m'), ('UNITTEST/BTC', '1m'), ('ADA/BTC', '5m'), - ('ETC/BTC', '5m'), ('NXT/BTC', '5m'), ('DASH/BTC', '5m'), - ('XRP/ETH', '1m'), ('XRP/ETH', '5m'), ('UNITTEST/BTC', '30m'), - ('UNITTEST/BTC', '8m'), ('NOPAIR/XXX', '4m')} + assert set(paircombs) == { + ('UNITTEST/BTC', '5m', CandleType.SPOT), + ('ETH/BTC', '5m', CandleType.SPOT), + ('XLM/BTC', '5m', CandleType.SPOT), + ('TRX/BTC', '5m', CandleType.SPOT), + ('LTC/BTC', '5m', CandleType.SPOT), + ('XMR/BTC', '5m', CandleType.SPOT), + ('ZEC/BTC', '5m', CandleType.SPOT), + ('UNITTEST/BTC', '1m', CandleType.SPOT), + ('ADA/BTC', '5m', CandleType.SPOT), + ('ETC/BTC', '5m', CandleType.SPOT), + ('NXT/BTC', '5m', CandleType.SPOT), + ('DASH/BTC', '5m', CandleType.SPOT), + ('XRP/ETH', '1m', CandleType.SPOT), + ('XRP/ETH', '5m', CandleType.SPOT), + ('UNITTEST/BTC', '30m', CandleType.SPOT), + ('UNITTEST/BTC', '8m', CandleType.SPOT), + ('NOPAIR/XXX', '4m', CandleType.SPOT), + } - paircombs = JsonGzDataHandler.ohlcv_get_available_data(testdatadir) - assert set(paircombs) == {('UNITTEST/BTC', '8m')} - paircombs = HDF5DataHandler.ohlcv_get_available_data(testdatadir) - assert set(paircombs) == {('UNITTEST/BTC', '5m')} + paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir, 'futures') + # Convert to set to avoid failures due to sorting + assert set(paircombs) == { + ('UNITTEST/USDT', '1h', 'mark'), + ('XRP/USDT', '1h', 'futures'), + ('XRP/USDT', '1h', 'mark'), + } + + paircombs = JsonGzDataHandler.ohlcv_get_available_data(testdatadir, 'spot') + assert set(paircombs) == {('UNITTEST/BTC', '8m', CandleType.SPOT)} + paircombs = HDF5DataHandler.ohlcv_get_available_data(testdatadir, 'spot') + assert set(paircombs) == {('UNITTEST/BTC', '5m', CandleType.SPOT)} def test_jsondatahandler_trades_get_pairs(testdatadir): @@ -644,21 +762,29 @@ def test_jsondatahandler_ohlcv_purge(mocker, testdatadir): mocker.patch.object(Path, "exists", MagicMock(return_value=False)) unlinkmock = mocker.patch.object(Path, "unlink", MagicMock()) dh = JsonGzDataHandler(testdatadir) - assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m') + assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '') + assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark') assert unlinkmock.call_count == 0 mocker.patch.object(Path, "exists", MagicMock(return_value=True)) - assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m') - assert unlinkmock.call_count == 1 + assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '') + assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark') + assert unlinkmock.call_count == 2 def test_jsondatahandler_ohlcv_load(testdatadir, caplog): dh = JsonDataHandler(testdatadir) - df = dh.ohlcv_load('XRP/ETH', '5m') + df = dh.ohlcv_load('XRP/ETH', '5m', 'spot') assert len(df) == 711 + df_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', candle_type="mark") + assert len(df_mark) == 99 + + df_no_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', 'spot') + assert len(df_no_mark) == 0 + # Failure case (empty array) - df1 = dh.ohlcv_load('NOPAIR/XXX', '4m') + df1 = dh.ohlcv_load('NOPAIR/XXX', '4m', 'spot') assert len(df1) == 0 assert log_has("Could not load data for NOPAIR/XXX.", caplog) assert df.columns.equals(df1.columns) @@ -691,7 +817,9 @@ def test_jsondatahandler_trades_purge(mocker, testdatadir): def test_datahandler_ohlcv_append(datahandler, testdatadir, ): dh = get_datahandler(testdatadir, datahandler) with pytest.raises(NotImplementedError): - dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame()) + dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame(), CandleType.SPOT) + with pytest.raises(NotImplementedError): + dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame(), CandleType.MARK) @pytest.mark.parametrize('datahandler', AVAILABLE_DATAHANDLERS) @@ -773,35 +901,52 @@ def test_hdf5datahandler_trades_purge(mocker, testdatadir): assert unlinkmock.call_count == 1 -def test_hdf5datahandler_ohlcv_load_and_resave(testdatadir, tmpdir): +@pytest.mark.parametrize('pair,timeframe,candle_type,candle_append,startdt,enddt', [ + # Data goes from 2018-01-10 - 2018-01-30 + ('UNITTEST/BTC', '5m', 'spot', '', '2018-01-15', '2018-01-19'), + # Mark data goes from to 2021-11-15 2021-11-19 + ('UNITTEST/USDT', '1h', 'mark', '-mark', '2021-11-16', '2021-11-18'), +]) +def test_hdf5datahandler_ohlcv_load_and_resave( + testdatadir, + tmpdir, + pair, + timeframe, + candle_type, + candle_append, + startdt, enddt +): tmpdir1 = Path(tmpdir) + tmpdir2 = tmpdir1 + if candle_type not in ('', 'spot'): + tmpdir2 = tmpdir1 / 'futures' + tmpdir2.mkdir() dh = HDF5DataHandler(testdatadir) - ohlcv = dh.ohlcv_load('UNITTEST/BTC', '5m') + ohlcv = dh._ohlcv_load(pair, timeframe, None, candle_type=candle_type) assert isinstance(ohlcv, DataFrame) assert len(ohlcv) > 0 - file = tmpdir1 / 'UNITTEST_NEW-5m.h5' + file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.h5" assert not file.is_file() dh1 = HDF5DataHandler(tmpdir1) - dh1.ohlcv_store('UNITTEST/NEW', '5m', ohlcv) + dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type) assert file.is_file() - assert not ohlcv[ohlcv['date'] < '2018-01-15'].empty + assert not ohlcv[ohlcv['date'] < startdt].empty - # Data gores from 2018-01-10 - 2018-01-30 - timerange = TimeRange.parse_timerange('20180115-20180119') + timerange = TimeRange.parse_timerange(f"{startdt.replace('-', '')}-{enddt.replace('-', '')}") # Call private function to ensure timerange is filtered in hdf5 - ohlcv = dh._ohlcv_load('UNITTEST/BTC', '5m', timerange) - ohlcv1 = dh1._ohlcv_load('UNITTEST/NEW', '5m', timerange) + ohlcv = dh._ohlcv_load(pair, timeframe, timerange, candle_type=candle_type) + ohlcv1 = dh1._ohlcv_load('UNITTEST/NEW', timeframe, timerange, candle_type=candle_type) assert len(ohlcv) == len(ohlcv1) assert ohlcv.equals(ohlcv1) - assert ohlcv[ohlcv['date'] < '2018-01-15'].empty - assert ohlcv[ohlcv['date'] > '2018-01-19'].empty + assert ohlcv[ohlcv['date'] < startdt].empty + assert ohlcv[ohlcv['date'] > enddt].empty # Try loading inexisting file - ohlcv = dh.ohlcv_load('UNITTEST/NONEXIST', '5m') + ohlcv = dh.ohlcv_load('UNITTEST/NONEXIST', timeframe, candle_type=candle_type) assert ohlcv.empty @@ -809,12 +954,14 @@ def test_hdf5datahandler_ohlcv_purge(mocker, testdatadir): mocker.patch.object(Path, "exists", MagicMock(return_value=False)) unlinkmock = mocker.patch.object(Path, "unlink", MagicMock()) dh = HDF5DataHandler(testdatadir) - assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m') + assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '') + assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark') assert unlinkmock.call_count == 0 mocker.patch.object(Path, "exists", MagicMock(return_value=True)) - assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m') - assert unlinkmock.call_count == 1 + assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '') + assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark') + assert unlinkmock.call_count == 2 def test_gethandlerclass(): diff --git a/tests/exchange/test_binance.py b/tests/exchange/test_binance.py index c4277daad..ac7647e73 100644 --- a/tests/exchange/test_binance.py +++ b/tests/exchange/test_binance.py @@ -343,7 +343,8 @@ def test__set_leverage_binance(mocker, default_conf): @pytest.mark.asyncio -async def test__async_get_historic_ohlcv_binance(default_conf, mocker, caplog): +@pytest.mark.parametrize('candle_type', ['mark', '']) +async def test__async_get_historic_ohlcv_binance(default_conf, mocker, caplog, candle_type): ohlcv = [ [ int((datetime.now(timezone.utc).timestamp() - 1000) * 1000), @@ -360,16 +361,17 @@ async def test__async_get_historic_ohlcv_binance(default_conf, mocker, caplog): exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv) pair = 'ETH/BTC' - respair, restf, res = await exchange._async_get_historic_ohlcv( - pair, "5m", 1500000000000, is_new_pair=False) + respair, restf, restype, res = await exchange._async_get_historic_ohlcv( + pair, "5m", 1500000000000, is_new_pair=False, candle_type=candle_type) assert respair == pair assert restf == '5m' + assert restype == candle_type # Call with very old timestamp - causes tons of requests assert exchange._api_async.fetch_ohlcv.call_count > 400 # assert res == ohlcv exchange._api_async.fetch_ohlcv.reset_mock() - _, _, res = await exchange._async_get_historic_ohlcv( - pair, "5m", 1500000000000, is_new_pair=True) + _, _, _, res = await exchange._async_get_historic_ohlcv( + pair, "5m", 1500000000000, is_new_pair=True, candle_type=candle_type) # Called twice - one "init" call - and one to get the actual data. assert exchange._api_async.fetch_ohlcv.call_count == 2 diff --git a/tests/exchange/test_ccxt_compat.py b/tests/exchange/test_ccxt_compat.py index ea0dc0fa4..8710463a6 100644 --- a/tests/exchange/test_ccxt_compat.py +++ b/tests/exchange/test_ccxt_compat.py @@ -11,6 +11,7 @@ from pathlib import Path import pytest +from freqtrade.enums import CandleType from freqtrade.exchange import timeframe_to_minutes, timeframe_to_prev_date from freqtrade.resolvers.exchange_resolver import ExchangeResolver from tests.conftest import get_default_conf_usdt @@ -51,14 +52,12 @@ EXCHANGES = { 'hasQuoteVolume': True, 'timeframe': '5m', 'futures': True, - 'futures_fundingrate_tf': '8h', 'futures_pair': 'BTC/USDT:USDT', }, 'okex': { 'pair': 'BTC/USDT', 'hasQuoteVolume': True, 'timeframe': '5m', - 'futures_fundingrate_tf': '8h', 'futures_pair': 'BTC/USDT:USDT', 'futures': True, }, @@ -182,7 +181,9 @@ class TestCCXTExchange(): exchange, exchangename = exchange pair = EXCHANGES[exchangename]['pair'] timeframe = EXCHANGES[exchangename]['timeframe'] - pair_tf = (pair, timeframe) + + pair_tf = (pair, timeframe, CandleType.SPOT) + ohlcv = exchange.refresh_latest_ohlcv([pair_tf]) assert isinstance(ohlcv, dict) assert len(ohlcv[pair_tf]) == len(exchange.klines(pair_tf)) @@ -193,7 +194,6 @@ class TestCCXTExchange(): now = datetime.now(timezone.utc) - timedelta(minutes=(timeframe_to_minutes(timeframe) * 2)) assert exchange.klines(pair_tf).iloc[-1]['date'] >= timeframe_to_prev_date(timeframe, now) - @pytest.mark.skip("No futures support yet") def test_ccxt_fetch_funding_rate_history(self, exchange_futures): # TODO-lev: enable this test once Futures mode is enabled. exchange, exchangename = exchange_futures @@ -206,12 +206,32 @@ class TestCCXTExchange(): rate = exchange.get_funding_rate_history(pair, since) assert isinstance(rate, dict) - expected_tf = EXCHANGES[exchangename].get('futures_fundingrate_tf', '1h') + + expected_tf = exchange._ft_has['mark_ohlcv_timeframe'] this_hour = timeframe_to_prev_date(expected_tf) prev_tick = timeframe_to_prev_date(expected_tf, this_hour - timedelta(minutes=1)) assert rate[int(this_hour.timestamp() * 1000)] != 0.0 assert rate[int(prev_tick.timestamp() * 1000)] != 0.0 + @pytest.mark.skip("No futures support yet") + def test_fetch_mark_price_history(self, exchange_futures): + exchange, exchangename = exchange_futures + if not exchange: + # exchange_futures only returns values for supported exchanges + return + pair = EXCHANGES[exchangename].get('futures_pair', EXCHANGES[exchangename]['pair']) + since = int((datetime.now(timezone.utc) - timedelta(days=5)).timestamp() * 1000) + + mark_candles = exchange._get_mark_price_history(pair, since) + + assert isinstance(mark_candles, dict) + expected_tf = '1h' + + this_hour = timeframe_to_prev_date(expected_tf) + prev_tick = timeframe_to_prev_date(expected_tf, this_hour - timedelta(minutes=1)) + assert mark_candles[int(this_hour.timestamp() * 1000)] != 0.0 + assert mark_candles[int(prev_tick.timestamp() * 1000)] != 0.0 + # TODO: tests fetch_trades (?) def test_ccxt_get_fee(self, exchange): diff --git a/tests/exchange/test_exchange.py b/tests/exchange/test_exchange.py index b92e54186..4632e6c56 100644 --- a/tests/exchange/test_exchange.py +++ b/tests/exchange/test_exchange.py @@ -11,7 +11,7 @@ import ccxt import pytest from pandas import DataFrame -from freqtrade.enums import Collateral, TradingMode +from freqtrade.enums import CandleType, Collateral, TradingMode from freqtrade.exceptions import (DDosProtection, DependencyException, InvalidOrderException, OperationalException, PricingError, TemporaryError) from freqtrade.exchange import Binance, Bittrex, Exchange, Kraken @@ -1560,7 +1560,8 @@ def test_fetch_ticker(default_conf, mocker, exchange_name): @pytest.mark.parametrize("exchange_name", EXCHANGES) -def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name): +@pytest.mark.parametrize('candle_type', ['mark', '']) +def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_type): exchange = get_patched_exchange(mocker, default_conf, id=exchange_name) ohlcv = [ [ @@ -1574,15 +1575,19 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name): ] pair = 'ETH/BTC' - async def mock_candle_hist(pair, timeframe, since_ms): - return pair, timeframe, ohlcv + async def mock_candle_hist(pair, timeframe, candle_type, since_ms): + return pair, timeframe, candle_type, ohlcv exchange._async_get_candle_history = Mock(wraps=mock_candle_hist) # one_call calculation * 1.8 should do 2 calls since = 5 * 60 * exchange.ohlcv_candle_limit('5m') * 1.8 - ret = exchange.get_historic_ohlcv(pair, "5m", int(( - arrow.utcnow().int_timestamp - since) * 1000)) + ret = exchange.get_historic_ohlcv( + pair, + "5m", + int((arrow.utcnow().int_timestamp - since) * 1000), + candle_type=candle_type + ) assert exchange._async_get_candle_history.call_count == 2 # Returns twice the above OHLCV data @@ -1595,13 +1600,18 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name): raise TimeoutError() exchange._async_get_candle_history = MagicMock(side_effect=mock_get_candle_hist_error) - ret = exchange.get_historic_ohlcv(pair, "5m", int( - (arrow.utcnow().int_timestamp - since) * 1000)) + ret = exchange.get_historic_ohlcv( + pair, + "5m", + int((arrow.utcnow().int_timestamp - since) * 1000), + candle_type=candle_type + ) assert log_has_re(r"Async code raised an exception: .*", caplog) @pytest.mark.parametrize("exchange_name", EXCHANGES) -def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name): +@pytest.mark.parametrize('candle_type', ['mark', '']) +def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name, candle_type): exchange = get_patched_exchange(mocker, default_conf, id=exchange_name) ohlcv = [ [ @@ -1631,15 +1641,19 @@ def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name): ] pair = 'ETH/BTC' - async def mock_candle_hist(pair, timeframe, since_ms): - return pair, timeframe, ohlcv + async def mock_candle_hist(pair, timeframe, candle_type, since_ms): + return pair, timeframe, candle_type, ohlcv exchange._async_get_candle_history = Mock(wraps=mock_candle_hist) # one_call calculation * 1.8 should do 2 calls since = 5 * 60 * exchange.ohlcv_candle_limit('5m') * 1.8 - ret = exchange.get_historic_ohlcv_as_df(pair, "5m", int(( - arrow.utcnow().int_timestamp - since) * 1000)) + ret = exchange.get_historic_ohlcv_as_df( + pair, + "5m", + int((arrow.utcnow().int_timestamp - since) * 1000), + candle_type=candle_type + ) assert exchange._async_get_candle_history.call_count == 2 # Returns twice the above OHLCV data @@ -1653,6 +1667,7 @@ def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name): @pytest.mark.asyncio @pytest.mark.parametrize("exchange_name", EXCHANGES) +# TODO-lev @pytest.mark.parametrize('candle_type', ['mark', '']) async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name): ohlcv = [ [ @@ -1669,8 +1684,8 @@ async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_ exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv) pair = 'ETH/USDT' - respair, restf, res = await exchange._async_get_historic_ohlcv( - pair, "5m", 1500000000000, is_new_pair=False) + respair, restf, _, res = await exchange._async_get_historic_ohlcv( + pair, "5m", 1500000000000, candle_type=CandleType.SPOT, is_new_pair=False) assert respair == pair assert restf == '5m' # Call with very old timestamp - causes tons of requests @@ -1678,6 +1693,7 @@ async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_ assert res[0] == ohlcv[0] +# TODO-lev: @pytest.mark.parametrize('candle_type', ['mark', '']) def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None: ohlcv = [ [ @@ -1702,7 +1718,7 @@ def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None: exchange = get_patched_exchange(mocker, default_conf) exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv) - pairs = [('IOTA/ETH', '5m'), ('XRP/ETH', '5m')] + pairs = [('IOTA/ETH', '5m', ''), ('XRP/ETH', '5m', '')] # empty dicts assert not exchange._klines res = exchange.refresh_latest_ohlcv(pairs, cache=False) @@ -1733,23 +1749,25 @@ def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None: assert exchange.klines(pair, copy=False) is exchange.klines(pair, copy=False) # test caching - res = exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m'), ('XRP/ETH', '5m')]) + res = exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m', ''), ('XRP/ETH', '5m', '')]) assert len(res) == len(pairs) assert exchange._api_async.fetch_ohlcv.call_count == 0 exchange.required_candle_call_count = 1 assert log_has(f"Using cached candle (OHLCV) data for pair {pairs[0][0]}, " - f"timeframe {pairs[0][1]} ...", + f"timeframe {pairs[0][1]}, candleType ...", caplog) - res = exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m'), ('XRP/ETH', '5m'), ('XRP/ETH', '1d')], - cache=False) + res = exchange.refresh_latest_ohlcv( + [('IOTA/ETH', '5m', ''), ('XRP/ETH', '5m', ''), ('XRP/ETH', '1d', '')], + cache=False + ) assert len(res) == 3 assert exchange._api_async.fetch_ohlcv.call_count == 3 # Test the same again, should NOT return from cache! exchange._api_async.fetch_ohlcv.reset_mock() - res = exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m'), ('XRP/ETH', '5m'), ('XRP/ETH', '1d')], - cache=False) + res = exchange.refresh_latest_ohlcv( + [('IOTA/ETH', '5m', ''), ('XRP/ETH', '5m', ''), ('XRP/ETH', '1d', '')], cache=False) assert len(res) == 3 assert exchange._api_async.fetch_ohlcv.call_count == 3 @@ -1774,33 +1792,35 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_ exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv) pair = 'ETH/BTC' - res = await exchange._async_get_candle_history(pair, "5m") + res = await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT) assert type(res) is tuple - assert len(res) == 3 + assert len(res) == 4 assert res[0] == pair assert res[1] == "5m" - assert res[2] == ohlcv + assert res[2] == CandleType.SPOT + assert res[3] == ohlcv assert exchange._api_async.fetch_ohlcv.call_count == 1 assert not log_has(f"Using cached candle (OHLCV) data for {pair} ...", caplog) # exchange = Exchange(default_conf) await async_ccxt_exception(mocker, default_conf, MagicMock(), "_async_get_candle_history", "fetch_ohlcv", - pair='ABCD/BTC', timeframe=default_conf['timeframe']) + pair='ABCD/BTC', timeframe=default_conf['timeframe'], + candle_type=CandleType.SPOT) api_mock = MagicMock() with pytest.raises(OperationalException, match=r'Could not fetch historical candle \(OHLCV\) data.*'): api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.BaseError("Unknown error")) exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) - await exchange._async_get_candle_history(pair, "5m", + await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT, (arrow.utcnow().int_timestamp - 2000) * 1000) with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching ' r'historical candle \(OHLCV\) data\..*'): api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.NotSupported("Not supported")) exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) - await exchange._async_get_candle_history(pair, "5m", + await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT, (arrow.utcnow().int_timestamp - 2000) * 1000) @@ -1816,12 +1836,13 @@ async def test__async_get_candle_history_empty(default_conf, mocker, caplog): exchange = Exchange(default_conf) pair = 'ETH/BTC' - res = await exchange._async_get_candle_history(pair, "5m") + res = await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT) assert type(res) is tuple - assert len(res) == 3 + assert len(res) == 4 assert res[0] == pair assert res[1] == "5m" - assert res[2] == ohlcv + assert res[2] == CandleType.SPOT + assert res[3] == ohlcv assert exchange._api_async.fetch_ohlcv.call_count == 1 @@ -1838,7 +1859,7 @@ def test_refresh_latest_ohlcv_inv_result(default_conf, mocker, caplog): # Monkey-patch async function with empty result exchange._api_async.fetch_ohlcv = MagicMock(side_effect=mock_get_candle_hist) - pairs = [("ETH/BTC", "5m"), ("XRP/BTC", "5m")] + pairs = [("ETH/BTC", "5m", ''), ("XRP/BTC", "5m", '')] res = exchange.refresh_latest_ohlcv(pairs) assert exchange._klines assert exchange._api_async.fetch_ohlcv.call_count == 2 @@ -2120,9 +2141,10 @@ async def test___async_get_candle_history_sort(default_conf, mocker, exchange_na exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv) sort_mock = mocker.patch('freqtrade.exchange.exchange.sorted', MagicMock(side_effect=sort_data)) # Test the OHLCV data sort - res = await exchange._async_get_candle_history('ETH/BTC', default_conf['timeframe']) + res = await exchange._async_get_candle_history( + 'ETH/BTC', default_conf['timeframe'], CandleType.SPOT) assert res[0] == 'ETH/BTC' - res_ohlcv = res[2] + res_ohlcv = res[3] assert sort_mock.call_count == 1 assert res_ohlcv[0][0] == 1527830400000 @@ -2157,10 +2179,11 @@ async def test___async_get_candle_history_sort(default_conf, mocker, exchange_na # Reset sort mock sort_mock = mocker.patch('freqtrade.exchange.sorted', MagicMock(side_effect=sort_data)) # Test the OHLCV data sort - res = await exchange._async_get_candle_history('ETH/BTC', default_conf['timeframe']) + res = await exchange._async_get_candle_history( + 'ETH/BTC', default_conf['timeframe'], CandleType.SPOT) assert res[0] == 'ETH/BTC' assert res[1] == default_conf['timeframe'] - res_ohlcv = res[2] + res_ohlcv = res[3] # Sorted not called again - data is already in order assert sort_mock.call_count == 0 assert res_ohlcv[0][0] == 1527827700000 @@ -3014,7 +3037,7 @@ def test_timeframe_to_next_date(): def test_market_is_tradable( mocker, default_conf, market_symbol, base, quote, spot, margin, futures, trademode, add_dict, exchange, expected_result - ) -> None: +) -> None: default_conf['trading_mode'] = trademode mocker.patch('freqtrade.exchange.exchange.Exchange.validate_trading_mode_and_collateral') ex = get_patched_exchange(mocker, default_conf, id=exchange) diff --git a/tests/leverage/test_candletype.py b/tests/leverage/test_candletype.py new file mode 100644 index 000000000..ed7991d26 --- /dev/null +++ b/tests/leverage/test_candletype.py @@ -0,0 +1,27 @@ +import pytest + +from freqtrade.enums import CandleType + + +@pytest.mark.parametrize('input,expected', [ + ('', CandleType.SPOT), + ('spot', CandleType.SPOT), + (CandleType.SPOT, CandleType.SPOT), + (CandleType.FUTURES, CandleType.FUTURES), + (CandleType.INDEX, CandleType.INDEX), + (CandleType.MARK, CandleType.MARK), + ('futures', CandleType.FUTURES), + ('mark', CandleType.MARK), + ('premiumIndex', CandleType.PREMIUMINDEX), +]) +def test_CandleType_from_string(input, expected): + assert CandleType.from_string(input) == expected + + +@pytest.mark.parametrize('input,expected', [ + ('futures', CandleType.FUTURES), + ('spot', CandleType.SPOT), + ('margin', CandleType.SPOT), +]) +def test_CandleType_get_default(input, expected): + assert CandleType.get_default(input) == expected diff --git a/tests/optimize/test_backtesting.py b/tests/optimize/test_backtesting.py index 356479857..18996c883 100644 --- a/tests/optimize/test_backtesting.py +++ b/tests/optimize/test_backtesting.py @@ -926,8 +926,9 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir) offset = 1 if tres == 0 else 0 removed_candles = len(data[pair]) - offset - backtesting.strategy.startup_candle_count assert len(backtesting.dataprovider.get_analyzed_dataframe(pair, '5m')[0]) == removed_candles - assert len(backtesting.dataprovider.get_analyzed_dataframe( - 'NXT/BTC', '5m')[0]) == len(data['NXT/BTC']) - 1 - backtesting.strategy.startup_candle_count + assert len( + backtesting.dataprovider.get_analyzed_dataframe('NXT/BTC', '5m')[0] + ) == len(data['NXT/BTC']) - 1 - backtesting.strategy.startup_candle_count backtest_conf = { 'processed': processed, diff --git a/tests/plugins/test_pairlist.py b/tests/plugins/test_pairlist.py index 30462f662..f70f2e388 100644 --- a/tests/plugins/test_pairlist.py +++ b/tests/plugins/test_pairlist.py @@ -7,7 +7,7 @@ import pytest import time_machine from freqtrade.constants import AVAILABLE_PAIRLISTS -from freqtrade.enums.runmode import RunMode +from freqtrade.enums import CandleType, RunMode from freqtrade.exceptions import OperationalException from freqtrade.persistence import Trade from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist @@ -461,11 +461,11 @@ def test_VolumePairList_whitelist_gen(mocker, whitelist_conf, shitcoinmarkets, t ohlcv_history_high_vola.loc[ohlcv_history_high_vola.index == 1, 'close'] = 0.00090 ohlcv_data = { - ('ETH/BTC', '1d'): ohlcv_history, - ('TKN/BTC', '1d'): ohlcv_history, - ('LTC/BTC', '1d'): ohlcv_history.append(ohlcv_history), - ('XRP/BTC', '1d'): ohlcv_history, - ('HOT/BTC', '1d'): ohlcv_history_high_vola, + ('ETH/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('TKN/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('LTC/BTC', '1d', CandleType.SPOT): ohlcv_history.append(ohlcv_history), + ('XRP/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('HOT/BTC', '1d', CandleType.SPOT): ohlcv_history_high_vola, } mocker.patch('freqtrade.exchange.Exchange.exchange_has', MagicMock(return_value=True)) @@ -579,11 +579,11 @@ def test_VolumePairList_range(mocker, whitelist_conf, shitcoinmarkets, tickers, ohlcv_history_high_volume.loc[:, 'volume'] = 10 ohlcv_data = { - ('ETH/BTC', '1d'): ohlcv_history, - ('TKN/BTC', '1d'): ohlcv_history, - ('LTC/BTC', '1d'): ohlcv_history_medium_volume, - ('XRP/BTC', '1d'): ohlcv_history_high_vola, - ('HOT/BTC', '1d'): ohlcv_history_high_volume, + ('ETH/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('TKN/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('LTC/BTC', '1d', CandleType.SPOT): ohlcv_history_medium_volume, + ('XRP/BTC', '1d', CandleType.SPOT): ohlcv_history_high_vola, + ('HOT/BTC', '1d', CandleType.SPOT): ohlcv_history_high_volume, } mocker.patch('freqtrade.exchange.Exchange.exchange_has', MagicMock(return_value=True)) @@ -855,9 +855,9 @@ def test_agefilter_min_days_listed_too_large(mocker, default_conf, markets, tick def test_agefilter_caching(mocker, markets, whitelist_conf_agefilter, tickers, ohlcv_history): with time_machine.travel("2021-09-01 05:00:00 +00:00") as t: ohlcv_data = { - ('ETH/BTC', '1d'): ohlcv_history, - ('TKN/BTC', '1d'): ohlcv_history, - ('LTC/BTC', '1d'): ohlcv_history, + ('ETH/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('TKN/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('LTC/BTC', '1d', CandleType.SPOT): ohlcv_history, } mocker.patch.multiple( 'freqtrade.exchange.Exchange', @@ -879,10 +879,10 @@ def test_agefilter_caching(mocker, markets, whitelist_conf_agefilter, tickers, o assert freqtrade.exchange.refresh_latest_ohlcv.call_count == 2 ohlcv_data = { - ('ETH/BTC', '1d'): ohlcv_history, - ('TKN/BTC', '1d'): ohlcv_history, - ('LTC/BTC', '1d'): ohlcv_history, - ('XRP/BTC', '1d'): ohlcv_history.iloc[[0]], + ('ETH/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('TKN/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('LTC/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('XRP/BTC', '1d', CandleType.SPOT): ohlcv_history.iloc[[0]], } mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', return_value=ohlcv_data) freqtrade.pairlists.refresh_pairlist() @@ -900,10 +900,10 @@ def test_agefilter_caching(mocker, markets, whitelist_conf_agefilter, tickers, o t.move_to("2021-09-03 01:00:00 +00:00") # Called once for XRP/BTC ohlcv_data = { - ('ETH/BTC', '1d'): ohlcv_history, - ('TKN/BTC', '1d'): ohlcv_history, - ('LTC/BTC', '1d'): ohlcv_history, - ('XRP/BTC', '1d'): ohlcv_history, + ('ETH/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('TKN/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('LTC/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('XRP/BTC', '1d', CandleType.SPOT): ohlcv_history, } mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', return_value=ohlcv_data) freqtrade.pairlists.refresh_pairlist() @@ -964,12 +964,12 @@ def test_rangestabilityfilter_caching(mocker, markets, default_conf, tickers, oh get_tickers=tickers ) ohlcv_data = { - ('ETH/BTC', '1d'): ohlcv_history, - ('TKN/BTC', '1d'): ohlcv_history, - ('LTC/BTC', '1d'): ohlcv_history, - ('XRP/BTC', '1d'): ohlcv_history, - ('HOT/BTC', '1d'): ohlcv_history, - ('BLK/BTC', '1d'): ohlcv_history, + ('ETH/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('TKN/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('LTC/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('XRP/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('HOT/BTC', '1d', CandleType.SPOT): ohlcv_history, + ('BLK/BTC', '1d', CandleType.SPOT): ohlcv_history, } mocker.patch.multiple( 'freqtrade.exchange.Exchange', diff --git a/tests/rpc/test_rpc_apiserver.py b/tests/rpc/test_rpc_apiserver.py index e023f76a2..8d25d1f5f 100644 --- a/tests/rpc/test_rpc_apiserver.py +++ b/tests/rpc/test_rpc_apiserver.py @@ -16,7 +16,7 @@ from numpy import isnan from requests.auth import _basic_auth_str from freqtrade.__init__ import __version__ -from freqtrade.enums import RunMode, State +from freqtrade.enums import CandleType, RunMode, State from freqtrade.exceptions import DependencyException, ExchangeError, OperationalException from freqtrade.loggers import setup_logging, setup_logging_pre from freqtrade.persistence import PairLocks, Trade @@ -706,9 +706,8 @@ def test_api_edge_disabled(botclient, mocker, ticker, fee, markets): assert rc.json() == {"error": "Error querying /api/v1/edge: Edge is not enabled."} -@pytest.mark.parametrize( - 'is_short,expected', - [( +@pytest.mark.parametrize('is_short,expected', [ + ( True, {'best_pair': 'ETC/BTC', 'best_rate': -0.5, 'best_pair_profit_ratio': -0.005, 'profit_all_coin': 43.61269123, @@ -720,8 +719,8 @@ def test_api_edge_disabled(botclient, mocker, ticker, fee, markets): 'profit_closed_percent_mean': -0.75, 'profit_closed_ratio_sum': -0.015, 'profit_closed_percent_sum': -1.5, 'profit_closed_ratio': -6.739057628404269e-06, 'profit_closed_percent': -0.0, 'winning_trades': 0, 'losing_trades': 2} - ), - ( + ), + ( False, {'best_pair': 'XRP/BTC', 'best_rate': 1.0, 'best_pair_profit_ratio': 0.01, 'profit_all_coin': -44.0631579, @@ -733,8 +732,8 @@ def test_api_edge_disabled(botclient, mocker, ticker, fee, markets): 'profit_closed_percent_mean': 0.75, 'profit_closed_ratio_sum': 0.015, 'profit_closed_percent_sum': 1.5, 'profit_closed_ratio': 7.391275897987988e-07, 'profit_closed_percent': 0.0, 'winning_trades': 2, 'losing_trades': 0} - ), - ( + ), + ( None, {'best_pair': 'XRP/BTC', 'best_rate': 1.0, 'best_pair_profit_ratio': 0.01, 'profit_all_coin': -14.43790415, @@ -746,8 +745,8 @@ def test_api_edge_disabled(botclient, mocker, ticker, fee, markets): 'profit_closed_percent_mean': 0.25, 'profit_closed_ratio_sum': 0.005, 'profit_closed_percent_sum': 0.5, 'profit_closed_ratio': -5.429078808526421e-06, 'profit_closed_percent': -0.0, 'winning_trades': 1, 'losing_trades': 1} - ) - ]) + ) +]) def test_api_profit(botclient, mocker, ticker, fee, markets, is_short, expected): ftbot, client = botclient patch_get_signal(ftbot) @@ -1180,7 +1179,7 @@ def test_api_pair_candles(botclient, ohlcv_history): ohlcv_history['enter_short'] = 0 ohlcv_history['exit_short'] = 0 - ftbot.dataprovider._set_cached_df("XRP/BTC", timeframe, ohlcv_history) + ftbot.dataprovider._set_cached_df("XRP/BTC", timeframe, ohlcv_history, CandleType.SPOT) rc = client_get(client, f"{BASE_URI}/pair_candles?limit={amount}&pair=XRP%2FBTC&timeframe={timeframe}") @@ -1354,6 +1353,20 @@ def test_list_available_pairs(botclient): assert rc.json()['pairs'] == ['XRP/ETH'] assert len(rc.json()['pair_interval']) == 1 + ftbot.config['trading_mode'] = 'futures' + rc = client_get( + client, f"{BASE_URI}/available_pairs?timeframe=1h") + assert_response(rc) + assert rc.json()['length'] == 1 + assert rc.json()['pairs'] == ['XRP/USDT'] + + rc = client_get( + client, f"{BASE_URI}/available_pairs?timeframe=1h&candletype=mark") + assert_response(rc) + assert rc.json()['length'] == 2 + assert rc.json()['pairs'] == ['UNITTEST/USDT', 'XRP/USDT'] + assert len(rc.json()['pair_interval']) == 2 + def test_sysinfo(botclient): ftbot, client = botclient diff --git a/tests/strategy/strats/informative_decorator_strategy.py b/tests/strategy/strats/informative_decorator_strategy.py index e8d535c60..91c4642fa 100644 --- a/tests/strategy/strats/informative_decorator_strategy.py +++ b/tests/strategy/strats/informative_decorator_strategy.py @@ -19,6 +19,7 @@ class InformativeDecoratorTest(IStrategy): startup_candle_count: int = 20 def informative_pairs(self): + # Intentionally return 2 tuples, must be converted to 3 in compatibility code return [('NEO/USDT', '5m')] def populate_buy_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame: @@ -67,7 +68,7 @@ class InformativeDecoratorTest(IStrategy): dataframe['rsi_less'] = dataframe['rsi'] < dataframe['rsi_1h'] # Mixing manual informative pairs with decorators. - informative = self.dp.get_pair_dataframe('NEO/USDT', '5m') + informative = self.dp.get_pair_dataframe('NEO/USDT', '5m', '') informative['rsi'] = 14 dataframe = merge_informative_pair(dataframe, informative, self.timeframe, '5m', ffill=True) diff --git a/tests/strategy/test_interface.py b/tests/strategy/test_interface.py index 7115f7aab..61a07191d 100644 --- a/tests/strategy/test_interface.py +++ b/tests/strategy/test_interface.py @@ -11,8 +11,7 @@ from pandas import DataFrame from freqtrade.configuration import TimeRange from freqtrade.data.dataprovider import DataProvider from freqtrade.data.history import load_data -from freqtrade.enums import SellType -from freqtrade.enums.signaltype import SignalDirection +from freqtrade.enums import SellType, SignalDirection from freqtrade.exceptions import OperationalException, StrategyError from freqtrade.optimize.space import SKDecimal from freqtrade.persistence import PairLocks, Trade diff --git a/tests/strategy/test_strategy_helpers.py b/tests/strategy/test_strategy_helpers.py index 9e546869a..a1b6f57d5 100644 --- a/tests/strategy/test_strategy_helpers.py +++ b/tests/strategy/test_strategy_helpers.py @@ -5,6 +5,7 @@ import pandas as pd import pytest from freqtrade.data.dataprovider import DataProvider +from freqtrade.enums import CandleType from freqtrade.strategy import (merge_informative_pair, stoploss_from_absolute, stoploss_from_open, timeframe_to_minutes) from tests.conftest import get_patched_exchange @@ -145,23 +146,24 @@ def test_stoploss_from_absolute(): assert stoploss_from_absolute(0, 100) == 1 +# TODO-lev: @pytest.mark.parametrize('candle_type', ['mark', '']) def test_informative_decorator(mocker, default_conf): test_data_5m = generate_test_data('5m', 40) test_data_30m = generate_test_data('30m', 40) test_data_1h = generate_test_data('1h', 40) data = { - ('XRP/USDT', '5m'): test_data_5m, - ('XRP/USDT', '30m'): test_data_30m, - ('XRP/USDT', '1h'): test_data_1h, - ('LTC/USDT', '5m'): test_data_5m, - ('LTC/USDT', '30m'): test_data_30m, - ('LTC/USDT', '1h'): test_data_1h, - ('NEO/USDT', '30m'): test_data_30m, - ('NEO/USDT', '5m'): test_data_5m, - ('NEO/USDT', '1h'): test_data_1h, - ('ETH/USDT', '1h'): test_data_1h, - ('ETH/USDT', '30m'): test_data_30m, - ('ETH/BTC', '1h'): test_data_1h, + ('XRP/USDT', '5m', CandleType.SPOT): test_data_5m, + ('XRP/USDT', '30m', CandleType.SPOT): test_data_30m, + ('XRP/USDT', '1h', CandleType.SPOT): test_data_1h, + ('LTC/USDT', '5m', CandleType.SPOT): test_data_5m, + ('LTC/USDT', '30m', CandleType.SPOT): test_data_30m, + ('LTC/USDT', '1h', CandleType.SPOT): test_data_1h, + ('NEO/USDT', '30m', CandleType.SPOT): test_data_30m, + ('NEO/USDT', '5m', CandleType.SPOT): test_data_5m, + ('NEO/USDT', '1h', CandleType.SPOT): test_data_1h, + ('ETH/USDT', '1h', CandleType.SPOT): test_data_1h, + ('ETH/USDT', '30m', CandleType.SPOT): test_data_30m, + ('ETH/BTC', '1h', CandleType.SPOT): test_data_1h, } from .strats.informative_decorator_strategy import InformativeDecoratorTest default_conf['stake_currency'] = 'USDT' @@ -173,19 +175,27 @@ def test_informative_decorator(mocker, default_conf): ]) assert len(strategy._ft_informative) == 6 # Equal to number of decorators used - informative_pairs = [('XRP/USDT', '1h'), ('LTC/USDT', '1h'), ('XRP/USDT', '30m'), - ('LTC/USDT', '30m'), ('NEO/USDT', '1h'), ('NEO/USDT', '30m'), - ('NEO/USDT', '5m'), ('ETH/BTC', '1h'), ('ETH/USDT', '30m')] + informative_pairs = [ + ('XRP/USDT', '1h', CandleType.SPOT), + ('LTC/USDT', '1h', CandleType.SPOT), + ('XRP/USDT', '30m', CandleType.SPOT), + ('LTC/USDT', '30m', CandleType.SPOT), + ('NEO/USDT', '1h', CandleType.SPOT), + ('NEO/USDT', '30m', CandleType.SPOT), + ('NEO/USDT', '5m', CandleType.SPOT), + ('ETH/BTC', '1h', CandleType.SPOT), + ('ETH/USDT', '30m', CandleType.SPOT)] for inf_pair in informative_pairs: assert inf_pair in strategy.gather_informative_pairs() - def test_historic_ohlcv(pair, timeframe): - return data[(pair, timeframe or strategy.timeframe)].copy() + def test_historic_ohlcv(pair, timeframe, candle_type): + return data[ + (pair, timeframe or strategy.timeframe, CandleType.from_string(candle_type))].copy() mocker.patch('freqtrade.data.dataprovider.DataProvider.historic_ohlcv', side_effect=test_historic_ohlcv) analyzed = strategy.advise_all_indicators( - {p: data[(p, strategy.timeframe)] for p in ('XRP/USDT', 'LTC/USDT')}) + {p: data[(p, strategy.timeframe, CandleType.SPOT)] for p in ('XRP/USDT', 'LTC/USDT')}) expected_columns = [ 'rsi_1h', 'rsi_30m', # Stacked informative decorators 'neo_usdt_rsi_1h', # NEO 1h informative diff --git a/tests/test_freqtradebot.py b/tests/test_freqtradebot.py index b581f2036..7c22078e2 100644 --- a/tests/test_freqtradebot.py +++ b/tests/test_freqtradebot.py @@ -11,7 +11,7 @@ import arrow import pytest from freqtrade.constants import CANCEL_REASON, MATH_CLOSE_PREC, UNLIMITED_STAKE_AMOUNT -from freqtrade.enums import RPCMessageType, RunMode, SellType, SignalDirection, State +from freqtrade.enums import CandleType, RPCMessageType, RunMode, SellType, SignalDirection, State from freqtrade.exceptions import (DependencyException, ExchangeError, InsufficientFundsError, InvalidOrderException, OperationalException, PricingError, TemporaryError) @@ -681,7 +681,10 @@ def test_process_informative_pairs_added(default_conf_usdt, ticker_usdt, mocker) create_order=MagicMock(side_effect=TemporaryError), refresh_latest_ohlcv=refresh_mock, ) - inf_pairs = MagicMock(return_value=[("BTC/ETH", '1m'), ("ETH/USDT", "1h")]) + inf_pairs = MagicMock(return_value=[ + ("BTC/ETH", '1m', CandleType.SPOT), + ("ETH/USDT", "1h", CandleType.SPOT) + ]) mocker.patch.multiple( 'freqtrade.strategy.interface.IStrategy', get_exit_signal=MagicMock(return_value=(False, False)), @@ -696,9 +699,10 @@ def test_process_informative_pairs_added(default_conf_usdt, ticker_usdt, mocker) freqtrade.process() assert inf_pairs.call_count == 1 assert refresh_mock.call_count == 1 - assert ("BTC/ETH", "1m") in refresh_mock.call_args[0][0] - assert ("ETH/USDT", "1h") in refresh_mock.call_args[0][0] - assert ("ETH/USDT", default_conf_usdt["timeframe"]) in refresh_mock.call_args[0][0] + assert ("BTC/ETH", "1m", CandleType.SPOT) in refresh_mock.call_args[0][0] + assert ("ETH/USDT", "1h", CandleType.SPOT) in refresh_mock.call_args[0][0] + assert ("ETH/USDT", default_conf_usdt["timeframe"], + CandleType.SPOT) in refresh_mock.call_args[0][0] @pytest.mark.parametrize("trading_mode", [ diff --git a/tests/test_misc.py b/tests/test_misc.py index de3f368e9..0d18117b6 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -69,14 +69,15 @@ def test_file_load_json(mocker, testdatadir) -> None: ("ETH/BTC", 'ETH_BTC'), ("ETH/USDT", 'ETH_USDT'), ("ETH/USDT:USDT", 'ETH_USDT_USDT'), # swap with USDT as settlement currency - ("ETH/USDT:USDT-210625", 'ETH_USDT_USDT_210625'), # expiring futures + ("ETH/USDT:USDT-210625", 'ETH_USDT_USDT-210625'), # expiring futures ("Fabric Token/ETH", 'Fabric_Token_ETH'), ("ETHH20", 'ETHH20'), (".XBTBON2H", '_XBTBON2H'), ("ETHUSD.d", 'ETHUSD_d'), - ("ADA-0327", 'ADA_0327'), - ("BTC-USD-200110", 'BTC_USD_200110'), - ("F-AKRO/USDT", 'F_AKRO_USDT'), + ("ADA-0327", 'ADA-0327'), + ("BTC-USD-200110", 'BTC-USD-200110'), + ("BTC-PERP:USDT", 'BTC-PERP_USDT'), + ("F-AKRO/USDT", 'F-AKRO_USDT'), ("LC+/ETH", 'LC__ETH'), ("CMT@18/ETH", 'CMT_18_ETH'), ("LBTC:1022/SAI", 'LBTC_1022_SAI'), diff --git a/tests/testdata/futures/UNITTEST_USDT-1h-mark.h5 b/tests/testdata/futures/UNITTEST_USDT-1h-mark.h5 new file mode 100644 index 000000000..ce17eb9e1 Binary files /dev/null and b/tests/testdata/futures/UNITTEST_USDT-1h-mark.h5 differ diff --git a/tests/testdata/futures/UNITTEST_USDT-1h-mark.json b/tests/testdata/futures/UNITTEST_USDT-1h-mark.json new file mode 100644 index 000000000..312f616fb --- /dev/null +++ b/tests/testdata/futures/UNITTEST_USDT-1h-mark.json @@ -0,0 +1,102 @@ +[ + [1636959600000, 1.21431, 1.2198, 1.20895, 1.20895, null], + [1636963200000, 1.20902, 1.21106, 1.19972, 1.20968, null], + [1636966800000, 1.20968, 1.21876, 1.20791, 1.20998, null], + [1636970400000, 1.20999, 1.21043, 1.20442, 1.20859, null], + [1636974000000, 1.20858, 1.20933, 1.20154, 1.20581, null], + [1636977600000, 1.20584, 1.20775, 1.20065, 1.20337, null], + [1636981200000, 1.20342, 1.2097, 1.19327, 1.19792, null], + [1636984800000, 1.19796, 1.1982, 1.18611, 1.19024, null], + [1636988400000, 1.19025, 1.19177, 1.18373, 1.18771, null], + [1636992000000, 1.18768, 1.19109, 1.18095, 1.1887, null], + [1636995600000, 1.18869, 1.18968, 1.18355, 1.18387, null], + [1636999200000, 1.18388, 1.18729, 1.17753, 1.18138, null], + [1637002800000, 1.18145, 1.18684, 1.17799, 1.18463, null], + [1637006400000, 1.18464, 1.18474, 1.17368, 1.17652, null], + [1637010000000, 1.17653, 1.18185, 1.16557, 1.17979, null], + [1637013600000, 1.17979, 1.18113, 1.16934, 1.18014, null], + [1637017200000, 1.18014, 1.18015, 1.16999, 1.17214, null], + [1637020800000, 1.17214, 1.17217, 1.12958, 1.14209, null], + [1637024400000, 1.14255, 1.14666, 1.10933, 1.14198, null], + [1637028000000, 1.14197, 1.14419, 1.12766, 1.12999, null], + [1637031600000, 1.12999, 1.13522, 1.11142, 1.12177, null], + [1637035200000, 1.12176, 1.13211, 1.10579, 1.1288, null], + [1637038800000, 1.12871, 1.13243, 1.12142, 1.12316, null], + [1637042400000, 1.12323, 1.1262, 1.11489, 1.12429, null], + [1637046000000, 1.12406, 1.12727, 1.11835, 1.1249, null], + [1637049600000, 1.12485, 1.13047, 1.1211, 1.12931, null], + [1637053200000, 1.12931, 1.13346, 1.10256, 1.10267, null], + [1637056800000, 1.10266, 1.10412, 1.04149, 1.0928, null], + [1637060400000, 1.09277, 1.09856, 1.08371, 1.09093, null], + [1637064000000, 1.09094, 1.09512, 1.079, 1.08003, null], + [1637067600000, 1.0802, 1.09914, 1.08016, 1.09515, null], + [1637071200000, 1.09518, 1.11627, 1.0937, 1.10985, null], + [1637074800000, 1.10985, 1.11353, 1.09618, 1.10071, null], + [1637078400000, 1.09989, 1.10852, 1.09763, 1.10461, null], + [1637082000000, 1.10459, 1.10837, 1.09662, 1.09847, null], + [1637085600000, 1.09858, 1.10506, 1.08687, 1.08716, null], + [1637089200000, 1.08677, 1.10096, 1.08151, 1.09271, null], + [1637092800000, 1.09245, 1.09269, 1.06592, 1.08025, null], + [1637096400000, 1.08026, 1.09732, 1.07953, 1.09527, null], + [1637100000000, 1.09527, 1.10506, 1.09524, 1.09933, null], + [1637103600000, 1.09933, 1.10205, 1.08761, 1.08785, null], + [1637107200000, 1.08763, 1.09518, 1.07646, 1.07999, null], + [1637110800000, 1.07997, 1.0978, 1.07651, 1.07936, null], + [1637114400000, 1.07932, 1.08758, 1.07352, 1.07603, null], + [1637118000000, 1.07604, 1.08542, 1.05931, 1.06764, null], + [1637121600000, 1.06788, 1.07848, 1.06045, 1.07608, null], + [1637125200000, 1.07613, 1.08797, 1.07293, 1.08377, null], + [1637128800000, 1.08379, 1.08567, 1.07428, 1.07942, null], + [1637132400000, 1.07958, 1.09472, 1.07356, 1.08713, null], + [1637136000000, 1.08714, 1.09149, 1.08018, 1.08021, null], + [1637139600000, 1.08021, 1.08021, 1.0668, 1.07032, null], + [1637143200000, 1.07042, 1.10563, 1.07034, 1.10255, null], + [1637146800000, 1.10284, 1.10954, 1.09767, 1.10685, null], + [1637150400000, 1.10669, 1.10848, 1.10157, 1.10537, null], + [1637154000000, 1.10537, 1.11263, 1.09554, 1.09585, null], + [1637157600000, 1.09569, 1.10051, 1.08402, 1.08431, null], + [1637161200000, 1.08444, 1.08942, 1.07569, 1.08489, null], + [1637164800000, 1.08498, 1.09581, 1.07939, 1.09485, null], + [1637168400000, 1.09443, 1.09793, 1.08778, 1.0944, null], + [1637172000000, 1.09445, 1.10227, 1.09376, 1.0992, null], + [1637175600000, 1.0992, 1.10189, 1.09216, 1.09474, null], + [1637179200000, 1.09476, 1.10198, 1.09045, 1.0993, null], + [1637182800000, 1.09934, 1.09959, 1.08755, 1.0948, null], + [1637186400000, 1.09483, 1.09519, 1.08532, 1.0923, null], + [1637190000000, 1.0923, 1.09876, 1.0874, 1.095, null], + [1637193600000, 1.09503, 1.10673, 1.09047, 1.10441, null], + [1637197200000, 1.10437, 1.16166, 1.09815, 1.12902, null], + [1637200800000, 1.12875, 1.15094, 1.1242, 1.13764, null], + [1637204400000, 1.13795, 1.14262, 1.12341, 1.12423, null], + [1637208000000, 1.12424, 1.14806, 1.11333, 1.1142, null], + [1637211600000, 1.11435, 1.12608, 1.11085, 1.11436, null], + [1637215200000, 1.11398, 1.11718, 1.10538, 1.11388, null], + [1637218800000, 1.1139, 1.11452, 1.09674, 1.1072, null], + [1637222400000, 1.10725, 1.10999, 1.10209, 1.10706, null], + [1637226000000, 1.10712, 1.10712, 1.07747, 1.08658, null], + [1637229600000, 1.08692, 1.09865, 1.0807, 1.09767, null], + [1637233200000, 1.09768, 1.10211, 1.08348, 1.08409, null], + [1637236800000, 1.08423, 1.09498, 1.08002, 1.08259, null], + [1637240400000, 1.0827, 1.08773, 1.06597, 1.07719, null], + [1637244000000, 1.07718, 1.08075, 1.06678, 1.07077, null], + [1637247600000, 1.07029, 1.07824, 1.04568, 1.05497, null], + [1637251200000, 1.05591, 1.06325, 1.03957, 1.04032, null], + [1637254800000, 1.04051, 1.05342, 1.01557, 1.04158, null], + [1637258400000, 1.04153, 1.05436, 1.04122, 1.05208, null], + [1637262000000, 1.05207, 1.05948, 1.04961, 1.05515, null], + [1637265600000, 1.05516, 1.05927, 1.04767, 1.04808, null], + [1637269200000, 1.04789, 1.05622, 1.04191, 1.04587, null], + [1637272800000, 1.04575, 1.05336, 1.03405, 1.03941, null], + [1637276400000, 1.03931, 1.04614, 1.02868, 1.0411, null], + [1637280000000, 1.04093, 1.05672, 1.0295, 1.05495, null], + [1637283600000, 1.05495, 1.0553, 1.03548, 1.03595, null], + [1637287200000, 1.0359, 1.04585, 1.02026, 1.02312, null], + [1637290800000, 1.0242, 1.02908, 1.01788, 1.02871, null], + [1637294400000, 1.02871, 1.04474, 1.02584, 1.04247, null], + [1637298000000, 1.04251, 1.04654, 1.03685, 1.0449, null], + [1637301600000, 1.0449, 1.04971, 1.04109, 1.04452, null], + [1637305200000, 1.04456, 1.04875, 1.03802, 1.04268, null], + [1637308800000, 1.04239, 1.06573, 1.04164, 1.05717, null], + [1637312400000, 1.05721, 1.06464, 1.05619, 1.05896, null], + [1637316000000, 1.05893, 1.05918, 1.04976, 1.05188, null] +] diff --git a/tests/testdata/futures/XRP_USDT-1h-futures.json b/tests/testdata/futures/XRP_USDT-1h-futures.json new file mode 100644 index 000000000..58944e717 --- /dev/null +++ b/tests/testdata/futures/XRP_USDT-1h-futures.json @@ -0,0 +1,102 @@ +[ + [ 1637110800000, 1.0801, 1.09758, 1.07654, 1.07925, 3153694.607359 ], + [ 1637114400000, 1.07896, 1.0875, 1.07351, 1.07616, 2697616.070908 ], + [ 1637118000000, 1.07607, 1.08521, 1.05896, 1.06804, 4014666.826073 ], + [ 1637121600000, 1.06848, 1.07846, 1.06067, 1.07629, 3764015.567745 ], + [ 1637125200000, 1.07647, 1.08791, 1.07309, 1.0839, 1669038.113726 ], + [ 1637128800000, 1.08414, 1.0856, 1.07431, 1.0794, 1921068.874499 ], + [ 1637132400000, 1.0798, 1.09499, 1.07363, 1.08721, 2491096.863582 ], + [ 1637136000000, 1.08688, 1.09133, 1.08004, 1.08011, 1983486.794272 ], + [ 1637139600000, 1.08017, 1.08027, 1.06667, 1.07039, 3429247.985309 ], + [ 1637143200000, 1.07054, 1.10699, 1.07038, 1.10284, 4554151.954177 ], + [ 1637146800000, 1.10315, 1.10989, 1.09781, 1.1071, 2012983.10465 ], + [ 1637150400000, 1.10627, 1.10849, 1.10155, 1.10539, 1117804.08918 ], + [ 1637154000000, 1.10545, 1.11299, 1.09574, 1.09604, 2252781.33926 ], + [ 1637157600000, 1.09583, 1.10037, 1.08402, 1.08404, 1882359.279342 ], + [ 1637161200000, 1.08433, 1.08924, 1.07583, 1.08543, 1826745.82579 ], + [ 1637164800000, 1.08571, 1.09622, 1.07946, 1.09496, 1651730.678891 ], + [ 1637168400000, 1.09509, 1.0979, 1.0878, 1.0945, 1081210.614598 ], + [ 1637172000000, 1.09483, 1.10223, 1.09362, 1.09922, 1065998.492028 ], + [ 1637175600000, 1.09916, 1.10201, 1.09226, 1.09459, 924935.492048 ], + [ 1637179200000, 1.09458, 1.10196, 1.09051, 1.09916, 1253539.625345 ], + [ 1637182800000, 1.09939, 1.09948, 1.08751, 1.09485, 1066269.190094 ], + [ 1637186400000, 1.0949, 1.095, 1.08537, 1.09229, 924726.680514 ], + [ 1637190000000, 1.0923, 1.09877, 1.08753, 1.09522, 1150213.905599 ], + [ 1637193600000, 1.09538, 1.10675, 1.09058, 1.10453, 1489867.578178 ], + [ 1637197200000, 1.10446, 1.16313, 1.0978, 1.12907, 10016166.026355 ], + [ 1637200800000, 1.1287, 1.15367, 1.12403, 1.1381, 7167920.053752 ], + [ 1637204400000, 1.13818, 1.14242, 1.12358, 1.1244, 2665326.190545 ], + [ 1637208000000, 1.12432, 1.14864, 1.11061, 1.11447, 9340547.947608 ], + [ 1637211600000, 1.114, 1.12618, 1.10911, 1.11412, 11759138.472952 ], + [ 1637215200000, 1.11381, 1.11701, 1.10507, 1.1136, 3104670.727264 ], + [ 1637218800000, 1.11433, 1.1145, 1.09682, 1.10715, 2522287.830673 ], + [ 1637222400000, 1.1073, 1.11, 1.10224, 1.10697, 2021691.204473 ], + [ 1637226000000, 1.10622, 1.10707, 1.07727, 1.08674, 3679010.223352 ], + [ 1637229600000, 1.08651, 1.09861, 1.08065, 1.09771, 2041421.476307 ], + [ 1637233200000, 1.09784, 1.102, 1.08339, 1.08399, 1920597.122813 ], + [ 1637236800000, 1.08458, 1.09523, 1.07961, 1.08263, 2403158.337373 ], + [ 1637240400000, 1.08309, 1.08959, 1.06094, 1.07703, 4425686.808376 ], + [ 1637244000000, 1.07702, 1.08064, 1.063, 1.07049, 3361334.048801 ], + [ 1637247600000, 1.07126, 1.07851, 1.04538, 1.0562, 5865602.611111 ], + [ 1637251200000, 1.05616, 1.06326, 1.0395, 1.04074, 4206860.947352 ], + [ 1637254800000, 1.04023, 1.0533, 1.01478, 1.0417, 5641193.647291 ], + [ 1637258400000, 1.04177, 1.05444, 1.04132, 1.05204, 1819341.083656 ], + [ 1637262000000, 1.05201, 1.05962, 1.04964, 1.05518, 1567923.362515 ], + [ 1637265600000, 1.05579, 1.05924, 1.04772, 1.04773, 1794108.065606 ], + [ 1637269200000, 1.0484, 1.05622, 1.04183, 1.04544, 1936537.403899 ], + [ 1637272800000, 1.04543, 1.05331, 1.03396, 1.03892, 2839486.418143 ], + [ 1637276400000, 1.03969, 1.04592, 1.02886, 1.04086, 3116275.899177 ], + [ 1637280000000, 1.0409, 1.05681, 1.02922, 1.05481, 4671209.916896 ], + [ 1637283600000, 1.05489, 1.05538, 1.03539, 1.03599, 2566357.247547 ], + [ 1637287200000, 1.03596, 1.04606, 1.02038, 1.02428, 3441834.238546 ], + [ 1637290800000, 1.02483, 1.0291, 1.01785, 1.0285, 2678602.729339 ], + [ 1637294400000, 1.0287, 1.0446, 1.0259, 1.04264, 2303621.340808 ], + [ 1637298000000, 1.04313, 1.04676, 1.03662, 1.04499, 2426475.439485 ], + [ 1637301600000, 1.0451, 1.04971, 1.041, 1.04448, 2088365.810515 ], + [ 1637305200000, 1.04473, 1.04845, 1.03801, 1.04227, 2222396.213472 ], + [ 1637308800000, 1.04211, 1.06965, 1.04168, 1.05711, 3267643.936025 ], + [ 1637312400000, 1.0569, 1.06578, 1.05626, 1.05844, 1512848.016057 ], + [ 1637316000000, 1.05814, 1.05916, 1.04923, 1.05464, 1710694.805693 ], + [ 1637319600000, 1.05484, 1.05731, 1.0458, 1.05359, 1587100.45253 ], + [ 1637323200000, 1.05382, 1.06063, 1.05156, 1.05227, 1409095.236152 ], + [ 1637326800000, 1.05256, 1.06489, 1.04996, 1.06471, 1879315.174541 ], + [ 1637330400000, 1.06491, 1.1036, 1.06489, 1.09439, 6212842.71216 ], + [ 1637334000000, 1.09441, 1.10252, 1.082, 1.08879, 4833417.181969 ], + [ 1637337600000, 1.08866, 1.09485, 1.07538, 1.09045, 2554438.746366 ], + [ 1637341200000, 1.09058, 1.09906, 1.08881, 1.09039, 1961024.28963 ], + [ 1637344800000, 1.09063, 1.09447, 1.08555, 1.09041, 1427538.639232 ], + [ 1637348400000, 1.09066, 1.09521, 1.088, 1.09332, 847724.821691 ], + [ 1637352000000, 1.09335, 1.09489, 1.08402, 1.08501, 1035043.133874 ], + [ 1637355600000, 1.08474, 1.08694, 1.08, 1.08606, 969952.892274 ], + [ 1637359200000, 1.08601, 1.09, 1.08201, 1.08476, 1105782.581808 ], + [ 1637362800000, 1.08463, 1.09245, 1.08201, 1.08971, 1334467.438673 ], + [ 1637366400000, 1.0897, 1.09925, 1.08634, 1.09049, 2460070.020396 ], + [ 1637370000000, 1.0908, 1.10002, 1.09002, 1.09845, 1210028.489394 ], + [ 1637373600000, 1.09785, 1.09791, 1.08944, 1.08962, 1261987.295847 ], + [ 1637377200000, 1.08951, 1.0919, 1.08429, 1.08548, 1124938.783404 ], + [ 1637380800000, 1.08536, 1.09, 1.08424, 1.08783, 1330935.680168 ], + [ 1637384400000, 1.0877, 1.08969, 1.08266, 1.08617, 874900.746037 ], + [ 1637388000000, 1.08622, 1.09224, 1.0843, 1.0889, 1240184.759178 ], + [ 1637391600000, 1.08917, 1.0909, 1.08408, 1.08535, 706148.380072 ], + [ 1637395200000, 1.08521, 1.08857, 1.07829, 1.08349, 1713832.050838 ], + [ 1637398800000, 1.08343, 1.08841, 1.08272, 1.0855, 696597.06327 ], + [ 1637402400000, 1.08553, 1.0898, 1.08353, 1.08695, 1104159.802108 ], + [ 1637406000000, 1.08703, 1.09838, 1.08635, 1.09695, 1404001.384389 ], + [ 1637409600000, 1.09695, 1.10175, 1.09024, 1.09278, 1219090.620484 ], + [ 1637413200000, 1.093, 1.09577, 1.08615, 1.08792, 994797.546591 ], + [ 1637416800000, 1.08793, 1.09239, 1.08572, 1.08725, 1251685.429497 ], + [ 1637420400000, 1.08721, 1.08767, 1.06029, 1.06556, 3955719.53631 ], + [ 1637424000000, 1.06553, 1.07385, 1.06169, 1.07257, 1868359.179534 ], + [ 1637427600000, 1.07266, 1.0745, 1.06759, 1.07261, 1015134.469304 ], + [ 1637431200000, 1.07255, 1.0974, 1.06819, 1.09369, 4377675.964829 ], + [ 1637434800000, 1.09368, 1.09562, 1.08899, 1.09036, 914791.699929 ], + [ 1637438400000, 1.09085, 1.09262, 1.08855, 1.09214, 661436.936672 ], + [ 1637442000000, 1.0924, 1.09475, 1.08874, 1.09282, 593143.283519 ], + [ 1637445600000, 1.09301, 1.09638, 1.09154, 1.09611, 603952.916221 ], + [ 1637449200000, 1.09569, 1.09828, 1.09301, 1.09747, 676053.591571 ], + [ 1637452800000, 1.09742, 1.09822, 1.09011, 1.0902, 1375704.506469 ], + [ 1637456400000, 1.0901, 1.09311, 1.08619, 1.08856, 928706.03929 ], + [ 1637460000000, 1.08855, 1.08941, 1.07401, 1.08035, 2669150.388642 ], + [ 1637463600000, 1.08016, 1.08341, 1.07448, 1.07672, 1604049.131307 ], + [ 1637467200000, 1.07685, 1.08229, 1.07552, 1.0765, 1153357.274076 ] +] diff --git a/tests/testdata/futures/XRP_USDT-1h-futures.json.gz b/tests/testdata/futures/XRP_USDT-1h-futures.json.gz new file mode 100644 index 000000000..f2a223b03 Binary files /dev/null and b/tests/testdata/futures/XRP_USDT-1h-futures.json.gz differ diff --git a/tests/testdata/futures/XRP_USDT-1h-mark.json b/tests/testdata/futures/XRP_USDT-1h-mark.json new file mode 100644 index 000000000..26703b945 --- /dev/null +++ b/tests/testdata/futures/XRP_USDT-1h-mark.json @@ -0,0 +1,102 @@ +[ + [1636956000000, 1.20932, 1.21787, 1.20763, 1.21431, null], + [1636959600000, 1.21431, 1.2198, 1.20895, 1.20895, null], + [1636963200000, 1.20902, 1.21106, 1.19972, 1.20968, null], + [1636966800000, 1.20968, 1.21876, 1.20791, 1.20998, null], + [1636970400000, 1.20999, 1.21043, 1.20442, 1.20859, null], + [1636974000000, 1.20858, 1.20933, 1.20154, 1.20581, null], + [1636977600000, 1.20584, 1.20775, 1.20065, 1.20337, null], + [1636981200000, 1.20342, 1.2097, 1.19327, 1.19792, null], + [1636984800000, 1.19796, 1.1982, 1.18611, 1.19024, null], + [1636988400000, 1.19025, 1.19177, 1.18373, 1.18771, null], + [1636992000000, 1.18768, 1.19109, 1.18095, 1.1887, null], + [1636995600000, 1.18869, 1.18968, 1.18355, 1.18387, null], + [1636999200000, 1.18388, 1.18729, 1.17753, 1.18138, null], + [1637002800000, 1.18145, 1.18684, 1.17799, 1.18463, null], + [1637006400000, 1.18464, 1.18474, 1.17368, 1.17652, null], + [1637010000000, 1.17653, 1.18185, 1.16557, 1.17979, null], + [1637013600000, 1.17979, 1.18113, 1.16934, 1.18014, null], + [1637017200000, 1.18014, 1.18015, 1.16999, 1.17214, null], + [1637020800000, 1.17214, 1.17217, 1.12958, 1.14209, null], + [1637024400000, 1.14255, 1.14666, 1.10933, 1.14198, null], + [1637028000000, 1.14197, 1.14419, 1.12766, 1.12999, null], + [1637031600000, 1.12999, 1.13522, 1.11142, 1.12177, null], + [1637035200000, 1.12176, 1.13211, 1.10579, 1.1288, null], + [1637038800000, 1.12871, 1.13243, 1.12142, 1.12316, null], + [1637042400000, 1.12323, 1.1262, 1.11489, 1.12429, null], + [1637046000000, 1.12406, 1.12727, 1.11835, 1.1249, null], + [1637049600000, 1.12485, 1.13047, 1.1211, 1.12931, null], + [1637053200000, 1.12931, 1.13346, 1.10256, 1.10267, null], + [1637056800000, 1.10266, 1.10412, 1.04149, 1.0928, null], + [1637060400000, 1.09277, 1.09856, 1.08371, 1.09093, null], + [1637064000000, 1.09094, 1.09512, 1.079, 1.08003, null], + [1637067600000, 1.0802, 1.09914, 1.08016, 1.09515, null], + [1637071200000, 1.09518, 1.11627, 1.0937, 1.10985, null], + [1637074800000, 1.10985, 1.11353, 1.09618, 1.10071, null], + [1637078400000, 1.09989, 1.10852, 1.09763, 1.10461, null], + [1637082000000, 1.10459, 1.10837, 1.09662, 1.09847, null], + [1637085600000, 1.09858, 1.10506, 1.08687, 1.08716, null], + [1637089200000, 1.08677, 1.10096, 1.08151, 1.09271, null], + [1637092800000, 1.09245, 1.09269, 1.06592, 1.08025, null], + [1637096400000, 1.08026, 1.09732, 1.07953, 1.09527, null], + [1637100000000, 1.09527, 1.10506, 1.09524, 1.09933, null], + [1637103600000, 1.09933, 1.10205, 1.08761, 1.08785, null], + [1637107200000, 1.08763, 1.09518, 1.07646, 1.07999, null], + [1637110800000, 1.07997, 1.0978, 1.07651, 1.07936, null], + [1637114400000, 1.07932, 1.08758, 1.07352, 1.07603, null], + [1637118000000, 1.07604, 1.08542, 1.05931, 1.06764, null], + [1637121600000, 1.06788, 1.07848, 1.06045, 1.07608, null], + [1637125200000, 1.07613, 1.08797, 1.07293, 1.08377, null], + [1637128800000, 1.08379, 1.08567, 1.07428, 1.07942, null], + [1637132400000, 1.07958, 1.09472, 1.07356, 1.08713, null], + [1637136000000, 1.08714, 1.09149, 1.08018, 1.08021, null], + [1637139600000, 1.08021, 1.08021, 1.0668, 1.07032, null], + [1637143200000, 1.07042, 1.10563, 1.07034, 1.10255, null], + [1637146800000, 1.10284, 1.10954, 1.09767, 1.10685, null], + [1637150400000, 1.10669, 1.10848, 1.10157, 1.10537, null], + [1637154000000, 1.10537, 1.11263, 1.09554, 1.09585, null], + [1637157600000, 1.09569, 1.10051, 1.08402, 1.08431, null], + [1637161200000, 1.08444, 1.08942, 1.07569, 1.08489, null], + [1637164800000, 1.08498, 1.09581, 1.07939, 1.09485, null], + [1637168400000, 1.09443, 1.09793, 1.08778, 1.0944, null], + [1637172000000, 1.09445, 1.10227, 1.09376, 1.0992, null], + [1637175600000, 1.0992, 1.10189, 1.09216, 1.09474, null], + [1637179200000, 1.09476, 1.10198, 1.09045, 1.0993, null], + [1637182800000, 1.09934, 1.09959, 1.08755, 1.0948, null], + [1637186400000, 1.09483, 1.09519, 1.08532, 1.0923, null], + [1637190000000, 1.0923, 1.09876, 1.0874, 1.095, null], + [1637193600000, 1.09503, 1.10673, 1.09047, 1.10441, null], + [1637197200000, 1.10437, 1.16166, 1.09815, 1.12902, null], + [1637200800000, 1.12875, 1.15094, 1.1242, 1.13764, null], + [1637204400000, 1.13795, 1.14262, 1.12341, 1.12423, null], + [1637208000000, 1.12424, 1.14806, 1.11333, 1.1142, null], + [1637211600000, 1.11435, 1.12608, 1.11085, 1.11436, null], + [1637215200000, 1.11398, 1.11718, 1.10538, 1.11388, null], + [1637218800000, 1.1139, 1.11452, 1.09674, 1.1072, null], + [1637222400000, 1.10725, 1.10999, 1.10209, 1.10706, null], + [1637226000000, 1.10712, 1.10712, 1.07747, 1.08658, null], + [1637229600000, 1.08692, 1.09865, 1.0807, 1.09767, null], + [1637233200000, 1.09768, 1.10211, 1.08348, 1.08409, null], + [1637236800000, 1.08423, 1.09498, 1.08002, 1.08259, null], + [1637240400000, 1.0827, 1.08773, 1.06597, 1.07719, null], + [1637244000000, 1.07718, 1.08075, 1.06678, 1.07077, null], + [1637247600000, 1.07029, 1.07824, 1.04568, 1.05497, null], + [1637251200000, 1.05591, 1.06325, 1.03957, 1.04032, null], + [1637254800000, 1.04051, 1.05342, 1.01557, 1.04158, null], + [1637258400000, 1.04153, 1.05436, 1.04122, 1.05208, null], + [1637262000000, 1.05207, 1.05948, 1.04961, 1.05515, null], + [1637265600000, 1.05516, 1.05927, 1.04767, 1.04808, null], + [1637269200000, 1.04789, 1.05622, 1.04191, 1.04587, null], + [1637272800000, 1.04575, 1.05336, 1.03405, 1.03941, null], + [1637276400000, 1.03931, 1.04614, 1.02868, 1.0411, null], + [1637280000000, 1.04093, 1.05672, 1.0295, 1.05495, null], + [1637283600000, 1.05495, 1.0553, 1.03548, 1.03595, null], + [1637287200000, 1.0359, 1.04585, 1.02026, 1.02312, null], + [1637290800000, 1.0242, 1.02908, 1.01788, 1.02871, null], + [1637294400000, 1.02871, 1.04474, 1.02584, 1.04247, null], + [1637298000000, 1.04251, 1.04654, 1.03685, 1.0449, null], + [1637301600000, 1.0449, 1.04971, 1.04109, 1.04452, null], + [1637305200000, 1.04456, 1.04875, 1.03802, 1.04268, null], + [1637308800000, 1.04239, 1.06573, 1.04164, 1.05717, null], + [1637312400000, 1.05721, 1.06464, 1.05619, 1.06051, null] +]