chore: use candle_type as argument for parallel-download

This commit is contained in:
Matthias
2025-08-28 06:50:08 +02:00
parent e0aa660b56
commit 8c92f9407d

View File

@@ -399,7 +399,7 @@ def refresh_backtest_ohlcv_data(
exchange=exchange, exchange=exchange,
pairs=pairs, pairs=pairs,
timeframe=timeframe, timeframe=timeframe,
trading_mode=trading_mode, candle_type=candle_type,
timerange=timerange, timerange=timerange,
) )
) )
@@ -461,7 +461,7 @@ def _download_all_pairs_history_parallel(
exchange: Exchange, exchange: Exchange,
pairs: list[str], pairs: list[str],
timeframe: str, timeframe: str,
trading_mode: str, candle_type: CandleType,
timerange: TimeRange | None = None, timerange: TimeRange | None = None,
) -> dict[PairWithTimeframe, DataFrame]: ) -> dict[PairWithTimeframe, DataFrame]:
""" """
@@ -476,7 +476,7 @@ def _download_all_pairs_history_parallel(
if timerange.starttype == "date": if timerange.starttype == "date":
since = timerange.startts * 1000 since = timerange.startts * 1000
candle_limit = exchange.ohlcv_candle_limit(timeframe, CandleType.get_default(trading_mode)) candle_limit = exchange.ohlcv_candle_limit(timeframe, candle_type)
one_call_min_time_dt = dt_ts(date_minus_candles(timeframe, candle_limit)) one_call_min_time_dt = dt_ts(date_minus_candles(timeframe, candle_limit))
# check if we can get all candles in one go, if so then we can download them in parallel # check if we can get all candles in one go, if so then we can download them in parallel
if since > one_call_min_time_dt: if since > one_call_min_time_dt:
@@ -485,7 +485,7 @@ def _download_all_pairs_history_parallel(
f"since {format_ms_time(since)}" f"since {format_ms_time(since)}"
) )
needed_pairs: ListPairsWithTimeframes = [ needed_pairs: ListPairsWithTimeframes = [
(p, timeframe, CandleType.get_default(trading_mode)) for p in [p for p in pairs] (p, timeframe, candle_type) for p in [p for p in pairs]
] ]
candles = exchange.refresh_latest_ohlcv(needed_pairs, since_ms=since, cache=False) candles = exchange.refresh_latest_ohlcv(needed_pairs, since_ms=since, cache=False)