Fix unnecessary deep intend

This commit is contained in:
Joe Schr
2024-02-06 20:13:23 +01:00
parent aa663b926a
commit c04cce52ea

View File

@@ -1996,7 +1996,8 @@ class Exchange:
logger.debug(
"one_call: %s msecs (%s)",
one_call,
dt_humanize(dt_now() - timedelta(milliseconds=one_call), only_distance=True)
dt_humanize(dt_now() - timedelta(milliseconds=one_call),
only_distance=True)
)
input_coroutines = [self._async_get_candle_history(
pair, timeframe, candle_type, since) for since in
@@ -2009,7 +2010,8 @@ class Exchange:
results = await asyncio.gather(*input_coro, return_exceptions=True)
for res in results:
if isinstance(res, Exception):
logger.warning(f"Async code raised an exception: {repr(res)}")
logger.warning(
f"Async code raised an exception: {repr(res)}")
if raise_:
raise
continue
@@ -2022,7 +2024,6 @@ class Exchange:
data = sorted(data, key=lambda x: x[0])
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
async def _async_get_historic_trades(self, pair: str, timeframe: str,
since_ms: int, candle_type: CandleType,
is_new_pair: bool = False, raise_: bool = False,
@@ -2062,7 +2063,7 @@ class Exchange:
if p == pair and c == candle_type:
data.extend(new_data)
# Sort data again after extending the result - above calls return in "async order"
data = sorted(data, key=lambda x: x['timestamp'])# TODO: sort via 'timestamp' or 'id'?
data = sorted(data, key=lambda x: x['timestamp']) # TODO: sort via 'timestamp' or 'id'?
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
def _build_coroutine_get_ohlcv(
@@ -2071,7 +2072,8 @@ class Exchange:
not_all_data = cache and self.required_candle_call_count > 1
if cache and (pair, timeframe, candle_type) in self._klines:
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
min_date = date_minus_candles(timeframe, candle_limit - 5).timestamp()
min_date = date_minus_candles(
timeframe, candle_limit - 5).timestamp()
# Check if 1 call can get us updated candles without hole in the data.
if min_date < self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0):
# Cache can be used - do one-off call.
@@ -2084,7 +2086,7 @@ class Exchange:
if (not since_ms and (self._ft_has["ohlcv_require_since"] or not_all_data)):
# Multiple calls for one pair - to get more history
since_ms = self.needed_candle_ms(timeframe,candle_type)
since_ms = self.needed_candle_ms(timeframe, candle_type)
# TODO: fetch_trades and return as results
if since_ms:
@@ -2095,7 +2097,6 @@ class Exchange:
return self._async_get_candle_history(
pair, timeframe, since_ms=since_ms, candle_type=candle_type)
def _build_coroutine_get_trades(
self, pair: str, timeframe: str, candle_type: CandleType,
since_ms: Optional[int], cache: bool) -> Coroutine[Any, Any, OHLCVResponse]:
@@ -2129,7 +2130,6 @@ class Exchange:
return self._async_get_trades_history(
pair, timeframe, since_ms=since_ms, candle_type=candle_type)
def _build_ohlcv_dl_jobs(
self, pair_list: ListPairsWithTimeframes, since_ms: Optional[int],
cache: bool) -> Tuple[List[Coroutine], List[Tuple[str, str, CandleType]]]:
@@ -2161,7 +2161,6 @@ class Exchange:
return input_coroutines, cached_pairs
def _build_trades_dl_jobs(
self, pair_list: ListPairsWithTimeframes, since_ms: Optional[int],
cache: bool) -> Tuple[List[Coroutine], List[Tuple[str, str, CandleType]]]:
@@ -2212,7 +2211,8 @@ class Exchange:
cache: bool, drop_incomplete: bool) -> DataFrame:
# keeping last candle time as last refreshed time of the pair
if ticks and cache:
self._pairs_last_refresh_time[(pair, timeframe, c_type)] = ticks[-1][0] // 1000
self._pairs_last_refresh_time[(
pair, timeframe, c_type)] = ticks[-1][0] // 1000
# keeping parsed dataframe in cache
ohlcv_df = ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
drop_incomplete=drop_incomplete)
@@ -2222,9 +2222,11 @@ class Exchange:
# Reassign so we return the updated, combined df
ohlcv_df = clean_ohlcv_dataframe(concat([old, ohlcv_df], axis=0), timeframe, pair,
fill_missing=True, drop_incomplete=False)
candle_limit = self.ohlcv_candle_limit(timeframe, self._config['candle_type_def'])
candle_limit = self.ohlcv_candle_limit(
timeframe, self._config['candle_type_def'])
# Age out old candles
ohlcv_df = ohlcv_df.tail(candle_limit + self._startup_candle_count)
ohlcv_df = ohlcv_df.tail(
candle_limit + self._startup_candle_count)
ohlcv_df = ohlcv_df.reset_index(drop=True)
self._klines[(pair, timeframe, c_type)] = ohlcv_df
else:
@@ -2232,7 +2234,7 @@ class Exchange:
return ohlcv_df
def _process_trades_df(self, pair: str, timeframe: str, c_type: CandleType, ticks: List[List],
cache: bool, drop_incomplete: bool, first_required_candle_date:Optional[int]) -> DataFrame:
cache: bool, drop_incomplete: bool, first_required_candle_date: Optional[int]) -> DataFrame:
# keeping parsed dataframe in cache
# TODO: pass last_full_candle_date to drop as incomplete
trades_df = public_trades_to_dataframe(ticks, timeframe, pair=pair, fill_missing=False,
@@ -2245,12 +2247,14 @@ class Exchange:
if (pair, timeframe, c_type) in self._trades:
old = self._trades[(pair, timeframe, c_type)]
# Reassign so we return the updated, combined df
trades_df = clean_duplicate_trades(concat([old, trades_df], axis=0), timeframe, pair, fill_missing=False, drop_incomplete=False)
trades_df = clean_duplicate_trades(concat(
[old, trades_df], axis=0), timeframe, pair, fill_missing=False, drop_incomplete=False)
# warn_of_tick_duplicates(trades_df, pair)
# Age out old candles
if first_required_candle_date:
# slice of older dates
trades_df = trades_df[first_required_candle_date < trades_df['timestamp']]
trades_df = trades_df[first_required_candle_date <
trades_df['timestamp']]
trades_df = trades_df.reset_index(drop=True)
self._trades[(pair, timeframe, c_type)] = trades_df
return trades_df
@@ -2270,10 +2274,12 @@ class Exchange:
Specifying None defaults to _ohlcv_partial_candle
:return: Dict of [{(pair, timeframe): Dataframe}]
"""
logger.debug("Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
logger.debug(
"Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
# Gather coroutines to run
input_coroutines, cached_pairs = self._build_ohlcv_dl_jobs(pair_list, since_ms, cache)
input_coroutines, cached_pairs = self._build_ohlcv_dl_jobs(
pair_list, since_ms, cache)
results_df = {}
# Chunk requests into batches of 100 to avoid overwelming ccxt Throttling
@@ -2287,7 +2293,8 @@ class Exchange:
for res in results:
if isinstance(res, Exception):
logger.warning(f"Async code raised an exception: {repr(res)}")
logger.warning(
f"Async code raised an exception: {repr(res)}")
continue
# Deconstruct tuple (has 5 elements)
pair, timeframe, c_type, ticks, drop_hint = res
@@ -2307,7 +2314,7 @@ class Exchange:
return results_df
def needed_candle_ms(self, timeframe:str, candle_type:CandleType):
def needed_candle_ms(self, timeframe: str, candle_type:CandleType):
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(
timeframe, candle_type)
move_to = one_call * self.required_candle_call_count
@@ -2315,8 +2322,8 @@ class Exchange:
return int((now - timedelta(seconds=move_to // 1000)).timestamp() * 1000)
def refresh_latest_trades(self,
pair_list: ListPairsWithTimeframes ,
data_handler: Callable,# using IDataHandler ends with circular import,
pair_list: ListPairsWithTimeframes,
data_handler: Callable, # using IDataHandler ends with circular import,
*,
cache: bool = True,
) -> Dict[PairWithTimeframe, DataFrame]: