From 5de3b9d7aee561674fd77662ee45c2d8e7b2d931 Mon Sep 17 00:00:00 2001 From: Matthias Date: Fri, 18 Aug 2023 09:14:41 +0200 Subject: [PATCH] Clean up no longer used method --- freqtrade/data/converter.py | 13 ------------- tests/data/test_converter.py | 12 +----------- 2 files changed, 1 insertion(+), 24 deletions(-) diff --git a/freqtrade/data/converter.py b/freqtrade/data/converter.py index c19d89f8f..324ce5def 100644 --- a/freqtrade/data/converter.py +++ b/freqtrade/data/converter.py @@ -1,9 +1,7 @@ """ Functions to convert data from one format to another """ -import itertools import logging -from operator import itemgetter from typing import Dict, List import numpy as np @@ -196,17 +194,6 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame: return frame -def trades_remove_duplicates(trades: List[List]) -> List[List]: - """ - Removes duplicates from the trades list. - Uses itertools.groupby to avoid converting to pandas. - Tests show it as being pretty efficient on lists of 4M Lists. - :param trades: List of Lists with constants.DEFAULT_TRADES_COLUMNS as columns - :return: same format as above, but with duplicates removed - """ - return [i for i, _ in itertools.groupby(sorted(trades, key=itemgetter(0, 1)))] - - def trades_df_remove_duplicates(trades: pd.DataFrame) -> pd.DataFrame: """ Removes duplicates from the trades DataFrame. diff --git a/tests/data/test_converter.py b/tests/data/test_converter.py index 9f733f316..7ca236442 100644 --- a/tests/data/test_converter.py +++ b/tests/data/test_converter.py @@ -11,8 +11,7 @@ from freqtrade.configuration.timerange import TimeRange from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format, ohlcv_fill_up_missing_data, ohlcv_to_dataframe, reduce_dataframe_footprint, trades_df_remove_duplicates, - trades_dict_to_list, trades_remove_duplicates, - trades_to_ohlcv, trim_dataframe) + trades_dict_to_list, trades_to_ohlcv, trim_dataframe) from freqtrade.data.history import (get_timerange, load_data, load_pair_history, validate_backtest_data) from freqtrade.data.history.idatahandler import IDataHandler @@ -299,15 +298,6 @@ def test_trim_dataframe(testdatadir) -> None: assert all(data_modify.iloc[0] == data.iloc[25]) -def test_trades_remove_duplicates(trades_history): - trades_history1 = trades_history * 3 - assert len(trades_history1) == len(trades_history) * 3 - res = trades_remove_duplicates(trades_history1) - assert len(res) == len(trades_history) - for i, t in enumerate(res): - assert t == trades_history[i] - - def test_trades_df_remove_duplicates(trades_history_df): trades_history1 = pd.concat([trades_history_df, trades_history_df, trades_history_df] ).reset_index(drop=True)