mirror of
https://github.com/freqtrade/freqtrade.git
synced 2026-02-05 05:40:25 +00:00
Clean up no longer used method
This commit is contained in:
@@ -1,9 +1,7 @@
|
||||
"""
|
||||
Functions to convert data from one format to another
|
||||
"""
|
||||
import itertools
|
||||
import logging
|
||||
from operator import itemgetter
|
||||
from typing import Dict, List
|
||||
|
||||
import numpy as np
|
||||
@@ -196,17 +194,6 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
|
||||
return frame
|
||||
|
||||
|
||||
def trades_remove_duplicates(trades: List[List]) -> List[List]:
|
||||
"""
|
||||
Removes duplicates from the trades list.
|
||||
Uses itertools.groupby to avoid converting to pandas.
|
||||
Tests show it as being pretty efficient on lists of 4M Lists.
|
||||
:param trades: List of Lists with constants.DEFAULT_TRADES_COLUMNS as columns
|
||||
:return: same format as above, but with duplicates removed
|
||||
"""
|
||||
return [i for i, _ in itertools.groupby(sorted(trades, key=itemgetter(0, 1)))]
|
||||
|
||||
|
||||
def trades_df_remove_duplicates(trades: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
Removes duplicates from the trades DataFrame.
|
||||
|
||||
@@ -11,8 +11,7 @@ from freqtrade.configuration.timerange import TimeRange
|
||||
from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format,
|
||||
ohlcv_fill_up_missing_data, ohlcv_to_dataframe,
|
||||
reduce_dataframe_footprint, trades_df_remove_duplicates,
|
||||
trades_dict_to_list, trades_remove_duplicates,
|
||||
trades_to_ohlcv, trim_dataframe)
|
||||
trades_dict_to_list, trades_to_ohlcv, trim_dataframe)
|
||||
from freqtrade.data.history import (get_timerange, load_data, load_pair_history,
|
||||
validate_backtest_data)
|
||||
from freqtrade.data.history.idatahandler import IDataHandler
|
||||
@@ -299,15 +298,6 @@ def test_trim_dataframe(testdatadir) -> None:
|
||||
assert all(data_modify.iloc[0] == data.iloc[25])
|
||||
|
||||
|
||||
def test_trades_remove_duplicates(trades_history):
|
||||
trades_history1 = trades_history * 3
|
||||
assert len(trades_history1) == len(trades_history) * 3
|
||||
res = trades_remove_duplicates(trades_history1)
|
||||
assert len(res) == len(trades_history)
|
||||
for i, t in enumerate(res):
|
||||
assert t == trades_history[i]
|
||||
|
||||
|
||||
def test_trades_df_remove_duplicates(trades_history_df):
|
||||
trades_history1 = pd.concat([trades_history_df, trades_history_df, trades_history_df]
|
||||
).reset_index(drop=True)
|
||||
|
||||
Reference in New Issue
Block a user