mirror of
https://github.com/freqtrade/freqtrade.git
synced 2025-12-16 21:01:14 +00:00
Fix superfluous formatting
This commit is contained in:
@@ -31,8 +31,7 @@ def ohlcv_to_dataframe(ohlcv: list, timeframe: str, pair: str, *,
|
|||||||
:param drop_incomplete: Drop the last candle of the dataframe, assuming it's incomplete
|
:param drop_incomplete: Drop the last candle of the dataframe, assuming it's incomplete
|
||||||
:return: DataFrame
|
:return: DataFrame
|
||||||
"""
|
"""
|
||||||
logger.debug(
|
logger.debug(f"Converting candle (OHLCV) data to dataframe for pair {pair}.")
|
||||||
f"Converting candle (OHLCV) data to dataframe for pair {pair}.")
|
|
||||||
cols = DEFAULT_DATAFRAME_COLUMNS
|
cols = DEFAULT_DATAFRAME_COLUMNS
|
||||||
df = DataFrame(ohlcv, columns=cols)
|
df = DataFrame(ohlcv, columns=cols)
|
||||||
|
|
||||||
@@ -449,8 +448,7 @@ def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str)
|
|||||||
df.reset_index(inplace=True)
|
df.reset_index(inplace=True)
|
||||||
len_before = len(dataframe)
|
len_before = len(dataframe)
|
||||||
len_after = len(df)
|
len_after = len(df)
|
||||||
pct_missing = (len_after - len_before) / \
|
pct_missing = (len_after - len_before) / len_before if len_before > 0 else 0
|
||||||
len_before if len_before > 0 else 0
|
|
||||||
if len_before != len_after:
|
if len_before != len_after:
|
||||||
message = (f"Missing data fillup for {pair}, {timeframe}: "
|
message = (f"Missing data fillup for {pair}, {timeframe}: "
|
||||||
f"before: {len_before} - after: {len_after} - {pct_missing:.2%}")
|
f"before: {len_before} - after: {len_after} - {pct_missing:.2%}")
|
||||||
@@ -495,8 +493,7 @@ def trim_dataframes(preprocessed: Dict[str, DataFrame], timerange,
|
|||||||
processed: Dict[str, DataFrame] = {}
|
processed: Dict[str, DataFrame] = {}
|
||||||
|
|
||||||
for pair, df in preprocessed.items():
|
for pair, df in preprocessed.items():
|
||||||
trimed_df = trim_dataframe(
|
trimed_df = trim_dataframe(df, timerange, startup_candles=startup_candles)
|
||||||
df, timerange, startup_candles=startup_candles)
|
|
||||||
if not trimed_df.empty:
|
if not trimed_df.empty:
|
||||||
processed[pair] = trimed_df
|
processed[pair] = trimed_df
|
||||||
else:
|
else:
|
||||||
@@ -552,18 +549,15 @@ def convert_ohlcv_format(
|
|||||||
candle_types = [CandleType.from_string(ct) for ct in config.get('candle_types', [
|
candle_types = [CandleType.from_string(ct) for ct in config.get('candle_types', [
|
||||||
c.value for c in CandleType])]
|
c.value for c in CandleType])]
|
||||||
logger.info(candle_types)
|
logger.info(candle_types)
|
||||||
paircombs = src.ohlcv_get_available_data(
|
paircombs = src.ohlcv_get_available_data(config['datadir'], TradingMode.SPOT)
|
||||||
config['datadir'], TradingMode.SPOT)
|
paircombs.extend(src.ohlcv_get_available_data(config['datadir'], TradingMode.FUTURES))
|
||||||
paircombs.extend(src.ohlcv_get_available_data(
|
|
||||||
config['datadir'], TradingMode.FUTURES))
|
|
||||||
|
|
||||||
if 'pairs' in config:
|
if 'pairs' in config:
|
||||||
# Filter pairs
|
# Filter pairs
|
||||||
paircombs = [comb for comb in paircombs if comb[0] in config['pairs']]
|
paircombs = [comb for comb in paircombs if comb[0] in config['pairs']]
|
||||||
|
|
||||||
if 'timeframes' in config:
|
if 'timeframes' in config:
|
||||||
paircombs = [comb for comb in paircombs if comb[1]
|
paircombs = [comb for comb in paircombs if comb[1] in config['timeframes']]
|
||||||
in config['timeframes']]
|
|
||||||
paircombs = [comb for comb in paircombs if comb[2] in candle_types]
|
paircombs = [comb for comb in paircombs if comb[2] in candle_types]
|
||||||
|
|
||||||
paircombs = sorted(paircombs, key=lambda x: (x[0], x[1], x[2].value))
|
paircombs = sorted(paircombs, key=lambda x: (x[0], x[1], x[2].value))
|
||||||
@@ -580,8 +574,7 @@ def convert_ohlcv_format(
|
|||||||
drop_incomplete=False,
|
drop_incomplete=False,
|
||||||
startup_candles=0,
|
startup_candles=0,
|
||||||
candle_type=candle_type)
|
candle_type=candle_type)
|
||||||
logger.info(
|
logger.info(f"Converting {len(data)} {timeframe} {candle_type} candles for {pair}")
|
||||||
f"Converting {len(data)} {timeframe} {candle_type} candles for {pair}")
|
|
||||||
if len(data) > 0:
|
if len(data) > 0:
|
||||||
trg.ohlcv_store(
|
trg.ohlcv_store(
|
||||||
pair=pair,
|
pair=pair,
|
||||||
@@ -591,8 +584,7 @@ def convert_ohlcv_format(
|
|||||||
)
|
)
|
||||||
if erase and convert_from != convert_to:
|
if erase and convert_from != convert_to:
|
||||||
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
||||||
src.ohlcv_purge(pair=pair, timeframe=timeframe,
|
src.ohlcv_purge(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||||
candle_type=candle_type)
|
|
||||||
|
|
||||||
|
|
||||||
def reduce_dataframe_footprint(df: DataFrame) -> DataFrame:
|
def reduce_dataframe_footprint(df: DataFrame) -> DataFrame:
|
||||||
|
|||||||
@@ -46,27 +46,23 @@ class DataProvider:
|
|||||||
self._exchange = exchange
|
self._exchange = exchange
|
||||||
self._pairlists = pairlists
|
self._pairlists = pairlists
|
||||||
self.__rpc = rpc
|
self.__rpc = rpc
|
||||||
self.__cached_pairs: Dict[PairWithTimeframe,
|
self.__cached_pairs: Dict[PairWithTimeframe, Tuple[DataFrame, datetime]] = {}
|
||||||
Tuple[DataFrame, datetime]] = {}
|
|
||||||
self.__slice_index: Optional[int] = None
|
self.__slice_index: Optional[int] = None
|
||||||
self.__slice_date: Optional[datetime] = None
|
self.__slice_date: Optional[datetime] = None
|
||||||
|
|
||||||
self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {
|
self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {}
|
||||||
}
|
|
||||||
self.__producer_pairs_df: Dict[str,
|
self.__producer_pairs_df: Dict[str,
|
||||||
Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]] = {}
|
Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]] = {}
|
||||||
self.__producer_pairs: Dict[str, List[str]] = {}
|
self.__producer_pairs: Dict[str, List[str]] = {}
|
||||||
self._msg_queue: deque = deque()
|
self._msg_queue: deque = deque()
|
||||||
|
|
||||||
self._default_candle_type = self._config.get(
|
self._default_candle_type = self._config.get('candle_type_def', CandleType.SPOT)
|
||||||
'candle_type_def', CandleType.SPOT)
|
|
||||||
self._default_timeframe = self._config.get('timeframe', '1h')
|
self._default_timeframe = self._config.get('timeframe', '1h')
|
||||||
|
|
||||||
self.__msg_cache = PeriodicCache(
|
self.__msg_cache = PeriodicCache(
|
||||||
maxsize=1000, ttl=timeframe_to_seconds(self._default_timeframe))
|
maxsize=1000, ttl=timeframe_to_seconds(self._default_timeframe))
|
||||||
|
|
||||||
self.producers = self._config.get(
|
self.producers = self._config.get('external_message_consumer', {}).get('producers', [])
|
||||||
'external_message_consumer', {}).get('producers', [])
|
|
||||||
self.external_data_enabled = len(self.producers) > 0
|
self.external_data_enabled = len(self.producers) > 0
|
||||||
|
|
||||||
def _set_dataframe_max_index(self, limit_index: int):
|
def _set_dataframe_max_index(self, limit_index: int):
|
||||||
@@ -137,19 +133,19 @@ class DataProvider:
|
|||||||
"""
|
"""
|
||||||
if self.__rpc:
|
if self.__rpc:
|
||||||
msg: RPCAnalyzedDFMsg = {
|
msg: RPCAnalyzedDFMsg = {
|
||||||
'type': RPCMessageType.ANALYZED_DF,
|
'type': RPCMessageType.ANALYZED_DF,
|
||||||
'data': {
|
'data': {
|
||||||
'key': pair_key,
|
'key': pair_key,
|
||||||
'df': dataframe.tail(1),
|
'df': dataframe.tail(1),
|
||||||
'la': datetime.now(timezone.utc)
|
'la': datetime.now(timezone.utc)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
self.__rpc.send_msg(msg)
|
self.__rpc.send_msg(msg)
|
||||||
if new_candle:
|
if new_candle:
|
||||||
self.__rpc.send_msg({
|
self.__rpc.send_msg({
|
||||||
'type': RPCMessageType.NEW_CANDLE,
|
'type': RPCMessageType.NEW_CANDLE,
|
||||||
'data': pair_key,
|
'data': pair_key,
|
||||||
})
|
})
|
||||||
|
|
||||||
def _replace_external_df(
|
def _replace_external_df(
|
||||||
self,
|
self,
|
||||||
@@ -172,13 +168,10 @@ class DataProvider:
|
|||||||
if producer_name not in self.__producer_pairs_df:
|
if producer_name not in self.__producer_pairs_df:
|
||||||
self.__producer_pairs_df[producer_name] = {}
|
self.__producer_pairs_df[producer_name] = {}
|
||||||
|
|
||||||
_last_analyzed = datetime.now(
|
_last_analyzed = datetime.now(timezone.utc) if not last_analyzed else last_analyzed
|
||||||
timezone.utc) if not last_analyzed else last_analyzed
|
|
||||||
|
|
||||||
self.__producer_pairs_df[producer_name][pair_key] = (
|
self.__producer_pairs_df[producer_name][pair_key] = (dataframe, _last_analyzed)
|
||||||
dataframe, _last_analyzed)
|
logger.debug(f"External DataFrame for {pair_key} from {producer_name} added.")
|
||||||
logger.debug(
|
|
||||||
f"External DataFrame for {pair_key} from {producer_name} added.")
|
|
||||||
|
|
||||||
def _add_external_df(
|
def _add_external_df(
|
||||||
self,
|
self,
|
||||||
@@ -229,8 +222,7 @@ class DataProvider:
|
|||||||
# CHECK FOR MISSING CANDLES
|
# CHECK FOR MISSING CANDLES
|
||||||
# Convert the timeframe to a timedelta for pandas
|
# Convert the timeframe to a timedelta for pandas
|
||||||
timeframe_delta: Timedelta = to_timedelta(timeframe)
|
timeframe_delta: Timedelta = to_timedelta(timeframe)
|
||||||
# We want the last date from our copy
|
local_last: Timestamp = existing_df.iloc[-1]['date'] # We want the last date from our copy
|
||||||
local_last: Timestamp = existing_df.iloc[-1]['date']
|
|
||||||
# We want the first date from the incoming
|
# We want the first date from the incoming
|
||||||
incoming_first: Timestamp = dataframe.iloc[0]['date']
|
incoming_first: Timestamp = dataframe.iloc[0]['date']
|
||||||
|
|
||||||
@@ -253,13 +245,13 @@ class DataProvider:
|
|||||||
|
|
||||||
# Everything is good, we appended
|
# Everything is good, we appended
|
||||||
self._replace_external_df(
|
self._replace_external_df(
|
||||||
pair,
|
pair,
|
||||||
appended_df,
|
appended_df,
|
||||||
last_analyzed=last_analyzed,
|
last_analyzed=last_analyzed,
|
||||||
timeframe=timeframe,
|
timeframe=timeframe,
|
||||||
candle_type=candle_type,
|
candle_type=candle_type,
|
||||||
producer_name=producer_name
|
producer_name=producer_name
|
||||||
)
|
)
|
||||||
return (True, 0)
|
return (True, 0)
|
||||||
|
|
||||||
def get_producer_df(
|
def get_producer_df(
|
||||||
@@ -347,13 +339,10 @@ class DataProvider:
|
|||||||
startup_candles = self._config.get('startup_candle_count', 0)
|
startup_candles = self._config.get('startup_candle_count', 0)
|
||||||
indicator_periods = freqai_config['feature_parameters']['indicator_periods_candles']
|
indicator_periods = freqai_config['feature_parameters']['indicator_periods_candles']
|
||||||
# make sure the startupcandles is at least the set maximum indicator periods
|
# make sure the startupcandles is at least the set maximum indicator periods
|
||||||
self._config['startup_candle_count'] = max(
|
self._config['startup_candle_count'] = max(startup_candles, max(indicator_periods))
|
||||||
startup_candles, max(indicator_periods))
|
|
||||||
tf_seconds = timeframe_to_seconds(timeframe)
|
tf_seconds = timeframe_to_seconds(timeframe)
|
||||||
train_candles = freqai_config['train_period_days'] * \
|
train_candles = freqai_config['train_period_days'] * 86400 / tf_seconds
|
||||||
86400 / tf_seconds
|
total_candles = int(self._config['startup_candle_count'] + train_candles)
|
||||||
total_candles = int(
|
|
||||||
self._config['startup_candle_count'] + train_candles)
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f'Increasing startup_candle_count for freqai on {timeframe} to {total_candles}')
|
f'Increasing startup_candle_count for freqai on {timeframe} to {total_candles}')
|
||||||
return total_candles
|
return total_candles
|
||||||
@@ -376,22 +365,18 @@ class DataProvider:
|
|||||||
"""
|
"""
|
||||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||||
# Get live OHLCV data.
|
# Get live OHLCV data.
|
||||||
data = self.ohlcv(pair=pair, timeframe=timeframe,
|
data = self.ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||||
candle_type=candle_type)
|
|
||||||
else:
|
else:
|
||||||
# Get historical OHLCV data (cached on disk).
|
# Get historical OHLCV data (cached on disk).
|
||||||
timeframe = timeframe or self._config['timeframe']
|
timeframe = timeframe or self._config['timeframe']
|
||||||
data = self.historic_ohlcv(
|
data = self.historic_ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||||
pair=pair, timeframe=timeframe, candle_type=candle_type)
|
|
||||||
# Cut date to timeframe-specific date.
|
# Cut date to timeframe-specific date.
|
||||||
# This is necessary to prevent lookahead bias in callbacks through informative pairs.
|
# This is necessary to prevent lookahead bias in callbacks through informative pairs.
|
||||||
if self.__slice_date:
|
if self.__slice_date:
|
||||||
cutoff_date = timeframe_to_prev_date(
|
cutoff_date = timeframe_to_prev_date(timeframe, self.__slice_date)
|
||||||
timeframe, self.__slice_date)
|
|
||||||
data = data.loc[data['date'] < cutoff_date]
|
data = data.loc[data['date'] < cutoff_date]
|
||||||
if len(data) == 0:
|
if len(data) == 0:
|
||||||
logger.warning(
|
logger.warning(f"No data found for ({pair}, {timeframe}, {candle_type}).")
|
||||||
f"No data found for ({pair}, {timeframe}, {candle_type}).")
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_analyzed_dataframe(self, pair: str, timeframe: str) -> Tuple[DataFrame, datetime]:
|
def get_analyzed_dataframe(self, pair: str, timeframe: str) -> Tuple[DataFrame, datetime]:
|
||||||
@@ -404,8 +389,7 @@ class DataProvider:
|
|||||||
combination.
|
combination.
|
||||||
Returns empty dataframe and Epoch 0 (1970-01-01) if no dataframe was cached.
|
Returns empty dataframe and Epoch 0 (1970-01-01) if no dataframe was cached.
|
||||||
"""
|
"""
|
||||||
pair_key = (pair, timeframe, self._config.get(
|
pair_key = (pair, timeframe, self._config.get('candle_type_def', CandleType.SPOT))
|
||||||
'candle_type_def', CandleType.SPOT))
|
|
||||||
if pair_key in self.__cached_pairs:
|
if pair_key in self.__cached_pairs:
|
||||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||||
df, date = self.__cached_pairs[pair_key]
|
df, date = self.__cached_pairs[pair_key]
|
||||||
@@ -413,8 +397,7 @@ class DataProvider:
|
|||||||
df, date = self.__cached_pairs[pair_key]
|
df, date = self.__cached_pairs[pair_key]
|
||||||
if self.__slice_index is not None:
|
if self.__slice_index is not None:
|
||||||
max_index = self.__slice_index
|
max_index = self.__slice_index
|
||||||
df = df.iloc[max(
|
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES):max_index]
|
||||||
0, max_index - MAX_DATAFRAME_CANDLES):max_index]
|
|
||||||
return df, date
|
return df, date
|
||||||
else:
|
else:
|
||||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
||||||
@@ -439,8 +422,7 @@ class DataProvider:
|
|||||||
if self._pairlists:
|
if self._pairlists:
|
||||||
return self._pairlists.whitelist.copy()
|
return self._pairlists.whitelist.copy()
|
||||||
else:
|
else:
|
||||||
raise OperationalException(
|
raise OperationalException("Dataprovider was not initialized with a pairlist provider.")
|
||||||
"Dataprovider was not initialized with a pairlist provider.")
|
|
||||||
|
|
||||||
def clear_cache(self):
|
def clear_cache(self):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -107,11 +107,9 @@ def load_data(datadir: Path,
|
|||||||
result[pair] = hist
|
result[pair] = hist
|
||||||
else:
|
else:
|
||||||
if candle_type is CandleType.FUNDING_RATE and user_futures_funding_rate is not None:
|
if candle_type is CandleType.FUNDING_RATE and user_futures_funding_rate is not None:
|
||||||
logger.warn(
|
logger.warn(f"{pair} using user specified [{user_futures_funding_rate}]")
|
||||||
f"{pair} using user specified [{user_futures_funding_rate}]")
|
|
||||||
elif candle_type not in (CandleType.SPOT, CandleType.FUTURES):
|
elif candle_type not in (CandleType.SPOT, CandleType.FUTURES):
|
||||||
result[pair] = DataFrame(
|
result[pair] = DataFrame(columns=["date", "open", "close", "high", "low", "volume"])
|
||||||
columns=["date", "open", "close", "high", "low", "volume"])
|
|
||||||
|
|
||||||
if fail_without_data and not result:
|
if fail_without_data and not result:
|
||||||
raise OperationalException("No data found. Terminating.")
|
raise OperationalException("No data found. Terminating.")
|
||||||
@@ -219,8 +217,7 @@ def _download_pair_history(pair: str, *,
|
|||||||
try:
|
try:
|
||||||
if erase:
|
if erase:
|
||||||
if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
||||||
logger.info(
|
logger.info(f'Deleting existing data for pair {pair}, {timeframe}, {candle_type}.')
|
||||||
f'Deleting existing data for pair {pair}, {timeframe}, {candle_type}.')
|
|
||||||
|
|
||||||
data, since_ms, until_ms = _load_cached_data_for_updating(
|
data, since_ms, until_ms = _load_cached_data_for_updating(
|
||||||
pair, timeframe, timerange,
|
pair, timeframe, timerange,
|
||||||
@@ -269,8 +266,7 @@ def _download_pair_history(pair: str, *,
|
|||||||
f"{data.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}"
|
f"{data.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}"
|
||||||
if not data.empty else 'None')
|
if not data.empty else 'None')
|
||||||
|
|
||||||
data_handler.ohlcv_store(
|
data_handler.ohlcv_store(pair, timeframe, data=data, candle_type=candle_type)
|
||||||
pair, timeframe, data=data, candle_type=candle_type)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
@@ -303,8 +299,7 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
|||||||
continue
|
continue
|
||||||
for timeframe in timeframes:
|
for timeframe in timeframes:
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(f'Downloading pair {pair}, {candle_type}, interval {timeframe}.')
|
||||||
f'Downloading pair {pair}, {candle_type}, interval {timeframe}.')
|
|
||||||
process = f'{idx}/{len(pairs)}'
|
process = f'{idx}/{len(pairs)}'
|
||||||
_download_pair_history(pair=pair, process=process,
|
_download_pair_history(pair=pair, process=process,
|
||||||
datadir=datadir, exchange=exchange,
|
datadir=datadir, exchange=exchange,
|
||||||
@@ -318,15 +313,12 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
|||||||
tf_mark = exchange.get_option('mark_ohlcv_timeframe')
|
tf_mark = exchange.get_option('mark_ohlcv_timeframe')
|
||||||
tf_funding_rate = exchange.get_option('funding_fee_timeframe')
|
tf_funding_rate = exchange.get_option('funding_fee_timeframe')
|
||||||
|
|
||||||
fr_candle_type = CandleType.from_string(
|
fr_candle_type = CandleType.from_string(exchange.get_option('mark_ohlcv_price'))
|
||||||
exchange.get_option('mark_ohlcv_price'))
|
|
||||||
# All exchanges need FundingRate for futures trading.
|
# All exchanges need FundingRate for futures trading.
|
||||||
# The timeframe is aligned to the mark-price timeframe.
|
# The timeframe is aligned to the mark-price timeframe.
|
||||||
combs = ((CandleType.FUNDING_RATE, tf_funding_rate),
|
combs = ((CandleType.FUNDING_RATE, tf_funding_rate), (fr_candle_type, tf_mark))
|
||||||
(fr_candle_type, tf_mark))
|
|
||||||
for candle_type_f, tf in combs:
|
for candle_type_f, tf in combs:
|
||||||
logger.debug(
|
logger.debug(f'Downloading pair {pair}, {candle_type_f}, interval {tf}.')
|
||||||
f'Downloading pair {pair}, {candle_type_f}, interval {tf}.')
|
|
||||||
_download_pair_history(pair=pair, process=process,
|
_download_pair_history(pair=pair, process=process,
|
||||||
datadir=datadir, exchange=exchange,
|
datadir=datadir, exchange=exchange,
|
||||||
timerange=timerange, data_handler=data_handler,
|
timerange=timerange, data_handler=data_handler,
|
||||||
@@ -452,8 +444,7 @@ def get_timerange(data: Dict[str, DataFrame]) -> Tuple[datetime, datetime]:
|
|||||||
:return: tuple containing min_date, max_date
|
:return: tuple containing min_date, max_date
|
||||||
"""
|
"""
|
||||||
timeranges = [
|
timeranges = [
|
||||||
(frame['date'].min().to_pydatetime(),
|
(frame['date'].min().to_pydatetime(), frame['date'].max().to_pydatetime())
|
||||||
frame['date'].max().to_pydatetime())
|
|
||||||
for frame in data.values()
|
for frame in data.values()
|
||||||
]
|
]
|
||||||
return (min(timeranges, key=operator.itemgetter(0))[0],
|
return (min(timeranges, key=operator.itemgetter(0))[0],
|
||||||
@@ -472,8 +463,7 @@ def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime,
|
|||||||
:param timeframe_min: Timeframe in minutes
|
:param timeframe_min: Timeframe in minutes
|
||||||
"""
|
"""
|
||||||
# total difference in minutes / timeframe-minutes
|
# total difference in minutes / timeframe-minutes
|
||||||
expected_frames = int(
|
expected_frames = int((max_date - min_date).total_seconds() // 60 // timeframe_min)
|
||||||
(max_date - min_date).total_seconds() // 60 // timeframe_min)
|
|
||||||
found_missing = False
|
found_missing = False
|
||||||
dflen = len(data)
|
dflen = len(data)
|
||||||
if dflen < expected_frames:
|
if dflen < expected_frames:
|
||||||
@@ -487,8 +477,7 @@ def download_data_main(config: Config) -> None:
|
|||||||
|
|
||||||
timerange = TimeRange()
|
timerange = TimeRange()
|
||||||
if 'days' in config:
|
if 'days' in config:
|
||||||
time_since = (datetime.now() -
|
time_since = (datetime.now() - timedelta(days=config['days'])).strftime("%Y%m%d")
|
||||||
timedelta(days=config['days'])).strftime("%Y%m%d")
|
|
||||||
timerange = TimeRange.parse_timerange(f'{time_since}-')
|
timerange = TimeRange.parse_timerange(f'{time_since}-')
|
||||||
|
|
||||||
if 'timerange' in config:
|
if 'timerange' in config:
|
||||||
@@ -505,7 +494,7 @@ def download_data_main(config: Config) -> None:
|
|||||||
available_pairs = [
|
available_pairs = [
|
||||||
p for p in exchange.get_markets(
|
p for p in exchange.get_markets(
|
||||||
tradable_only=True, active_only=not config.get('include_inactive')
|
tradable_only=True, active_only=not config.get('include_inactive')
|
||||||
).keys()
|
).keys()
|
||||||
]
|
]
|
||||||
|
|
||||||
expanded_pairs = dynamic_expand_pairlist(config, available_pairs)
|
expanded_pairs = dynamic_expand_pairlist(config, available_pairs)
|
||||||
@@ -538,8 +527,7 @@ def download_data_main(config: Config) -> None:
|
|||||||
# Convert downloaded trade data to different timeframes
|
# Convert downloaded trade data to different timeframes
|
||||||
convert_trades_to_ohlcv(
|
convert_trades_to_ohlcv(
|
||||||
pairs=expanded_pairs, timeframes=config['timeframes'],
|
pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||||
datadir=config['datadir'], timerange=timerange, erase=bool(
|
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||||
config.get('erase')),
|
|
||||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||||
data_format_trades=config['dataformat_trades'],
|
data_format_trades=config['dataformat_trades'],
|
||||||
)
|
)
|
||||||
@@ -549,7 +537,7 @@ def download_data_main(config: Config) -> None:
|
|||||||
f"Historic klines not available for {exchange.name}. "
|
f"Historic klines not available for {exchange.name}. "
|
||||||
"Please use `--dl-trades` instead for this exchange "
|
"Please use `--dl-trades` instead for this exchange "
|
||||||
"(will unfortunately take a long time)."
|
"(will unfortunately take a long time)."
|
||||||
)
|
)
|
||||||
migrate_data(config, exchange)
|
migrate_data(config, exchange)
|
||||||
pairs_not_available = refresh_backtest_ohlcv_data(
|
pairs_not_available = refresh_backtest_ohlcv_data(
|
||||||
exchange, pairs=expanded_pairs, timeframes=config['timeframes'],
|
exchange, pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||||
|
|||||||
Reference in New Issue
Block a user