Merge branch 'freqtrade:develop' into develop

This commit is contained in:
JamesLinxun
2025-04-29 10:18:13 -04:00
committed by GitHub
20 changed files with 162 additions and 101 deletions

View File

@@ -38,8 +38,9 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
with:
activate-environment: true
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: "requirements**.txt"
@@ -144,7 +145,7 @@ jobs:
mypy freqtrade scripts tests
- name: Discord notification
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: error
@@ -170,8 +171,9 @@ jobs:
check-latest: true
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
with:
activate-environment: true
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: "requirements**.txt"
@@ -270,7 +272,7 @@ jobs:
mypy freqtrade scripts
- name: Discord notification
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: info
@@ -296,8 +298,9 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
with:
activate-environment: true
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: "requirements**.txt"
@@ -363,7 +366,7 @@ jobs:
shell: powershell
- name: Discord notification
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: error
@@ -421,7 +424,7 @@ jobs:
mkdocs build
- name: Discord notification
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: error
@@ -443,8 +446,9 @@ jobs:
python-version: "3.12"
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
with:
activate-environment: true
enable-cache: true
python-version: "3.12"
cache-dependency-glob: "requirements**.txt"
@@ -508,7 +512,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Discord notification
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
if: always() && steps.check.outputs.has-permission && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: info
@@ -703,7 +707,7 @@ jobs:
build_helpers/publish_docker_arm64.sh
- name: Discord notification
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
if: always() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false) && (github.event_name != 'schedule')
with:
severity: info

View File

@@ -2,6 +2,6 @@ markdown==3.8
mkdocs==1.6.1
mkdocs-material==9.6.12
mdx_truly_sane_lists==1.3
pymdown-extensions==10.14.3
pymdown-extensions==10.15
jinja2==3.1.6
mike==2.1.3

View File

@@ -0,0 +1,31 @@
# flake8: noqa: F401
from .bt_fileutils import (
BT_DATA_COLUMNS,
delete_backtest_result,
extract_trades_of_period,
find_existing_backtest_stats,
get_backtest_market_change,
get_backtest_result,
get_backtest_resultlist,
get_latest_backtest_filename,
get_latest_hyperopt_file,
get_latest_hyperopt_filename,
get_latest_optimize_filename,
load_and_merge_backtest_result,
load_backtest_analysis_data,
load_backtest_data,
load_backtest_metadata,
load_backtest_stats,
load_exit_signal_candles,
load_file_from_zip,
load_rejected_signals,
load_signal_candles,
load_trades,
load_trades_from_db,
trade_list_to_dataframe,
update_backtest_metadata,
)
from .trade_parallelism import (
analyze_trade_parallelism,
evaluate_result_multi,
)

View File

@@ -13,7 +13,7 @@ from typing import Any, Literal
import numpy as np
import pandas as pd
from freqtrade.constants import LAST_BT_RESULT_FN, IntOrInf
from freqtrade.constants import LAST_BT_RESULT_FN
from freqtrade.exceptions import ConfigurationError, OperationalException
from freqtrade.ft_types import BacktestHistoryEntryType, BacktestResultType
from freqtrade.misc import file_dump_json, json_load
@@ -491,55 +491,6 @@ def load_exit_signal_candles(backtest_dir: Path) -> dict[str, dict[str, pd.DataF
return load_backtest_analysis_data(backtest_dir, "exited")
def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataFrame:
"""
Find overlapping trades by expanding each trade once per period it was open
and then counting overlaps.
:param results: Results Dataframe - can be loaded
:param timeframe: Timeframe used for backtest
:return: dataframe with open-counts per time-period in timeframe
"""
from freqtrade.exchange import timeframe_to_resample_freq
timeframe_freq = timeframe_to_resample_freq(timeframe)
dates = [
pd.Series(
pd.date_range(
row[1]["open_date"],
row[1]["close_date"],
freq=timeframe_freq,
# Exclude right boundary - the date is the candle open date.
inclusive="left",
)
)
for row in results[["open_date", "close_date"]].iterrows()
]
deltas = [len(x) for x in dates]
dates = pd.Series(pd.concat(dates).values, name="date")
df2 = pd.DataFrame(np.repeat(results.values, deltas, axis=0), columns=results.columns)
df2 = pd.concat([dates, df2], axis=1)
df2 = df2.set_index("date")
df_final = df2.resample(timeframe_freq)[["pair"]].count()
df_final = df_final.rename({"pair": "open_trades"}, axis=1)
return df_final
def evaluate_result_multi(
results: pd.DataFrame, timeframe: str, max_open_trades: IntOrInf
) -> pd.DataFrame:
"""
Find overlapping trades by expanding each trade once per period it was open
and then counting overlaps
:param results: Results Dataframe - can be loaded
:param timeframe: Frequency used for the backtest
:param max_open_trades: parameter max_open_trades used during backtest run
:return: dataframe with open-counts per time-period in freq
"""
df_final = analyze_trade_parallelism(results, timeframe)
return df_final[df_final["open_trades"] > max_open_trades]
def trade_list_to_dataframe(trades: list[Trade] | list[LocalTrade]) -> pd.DataFrame:
"""
Convert list of Trade objects to pandas Dataframe

View File

@@ -0,0 +1,60 @@
import logging
import numpy as np
import pandas as pd
from freqtrade.constants import IntOrInf
logger = logging.getLogger(__name__)
def analyze_trade_parallelism(trades: pd.DataFrame, timeframe: str) -> pd.DataFrame:
"""
Find overlapping trades by expanding each trade once per period it was open
and then counting overlaps.
:param trades: Trades Dataframe - can be loaded from backtest, or created
via trade_list_to_dataframe
:param timeframe: Timeframe used for backtest
:return: dataframe with open-counts per time-period in timeframe
"""
from freqtrade.exchange import timeframe_to_resample_freq
timeframe_freq = timeframe_to_resample_freq(timeframe)
dates = [
pd.Series(
pd.date_range(
row[1]["open_date"],
row[1]["close_date"],
freq=timeframe_freq,
# Exclude right boundary - the date is the candle open date.
inclusive="left",
)
)
for row in trades[["open_date", "close_date"]].iterrows()
]
deltas = [len(x) for x in dates]
dates = pd.Series(pd.concat(dates).values, name="date")
df2 = pd.DataFrame(np.repeat(trades.values, deltas, axis=0), columns=trades.columns)
df2 = pd.concat([dates, df2], axis=1)
df2 = df2.set_index("date")
df_final = df2.resample(timeframe_freq)[["pair"]].count()
df_final = df_final.rename({"pair": "open_trades"}, axis=1)
return df_final
def evaluate_result_multi(
trades: pd.DataFrame, timeframe: str, max_open_trades: IntOrInf
) -> pd.DataFrame:
"""
Find overlapping trades by expanding each trade once per period it was open
and then counting overlaps
:param trades: Trades Dataframe - can be loaded from backtest, or created
via trade_list_to_dataframe
:param timeframe: Frequency used for the backtest
:param max_open_trades: parameter max_open_trades used during backtest run
:return: dataframe with open-counts per time-period in freq
"""
df_final = analyze_trade_parallelism(trades, timeframe)
return df_final[df_final["open_trades"] > max_open_trades]

View File

@@ -123,7 +123,7 @@ class Backtesting:
config["dry_run"] = True
self.run_ids: dict[str, str] = {}
self.strategylist: list[IStrategy] = []
self.all_results: dict[str, BacktestContentType] = {}
self.all_bt_content: dict[str, BacktestContentType] = {}
self.analysis_results: dict[str, dict[str, DataFrame]] = {
"signals": {},
"rejected": {},
@@ -1717,7 +1717,7 @@ class Backtesting:
"backtest_end_time": int(backtest_end_time.timestamp()),
}
)
self.all_results[strategy_name] = results
self.all_bt_content[strategy_name] = results
if (
self.config.get("export", "none") == "signals"
@@ -1780,9 +1780,9 @@ class Backtesting:
min_date, max_date = self.backtest_one_strategy(strat, data, timerange)
# Update old results with new ones.
if len(self.all_results) > 0:
if len(self.all_bt_content) > 0:
results = generate_backtest_stats(
data, self.all_results, min_date=min_date, max_date=max_date
data, self.all_bt_content, min_date=min_date, max_date=max_date
)
if self.results:
self.results["metadata"].update(results["metadata"])

View File

@@ -1,6 +1,6 @@
from datetime import datetime, timezone
from enum import Enum
from typing import ClassVar
from typing import ClassVar, Literal
from sqlalchemy import String
from sqlalchemy.orm import Mapped, mapped_column
@@ -18,9 +18,11 @@ class ValueTypesEnum(str, Enum):
INT = "int"
class KeyStoreKeys(str, Enum):
BOT_START_TIME = "bot_start_time"
STARTUP_TIME = "startup_time"
KeyStoreKeys = Literal[
"bot_start_time",
"startup_time",
"binance_migration",
]
class _KeyValueStoreModel(ModelBase):
@@ -192,7 +194,7 @@ class KeyValueStore:
return kv.int_value
def set_startup_time():
def set_startup_time() -> None:
"""
sets bot_start_time to the first trade open date - or "now" on new databases.
sets startup_time to "now"

View File

@@ -1,6 +1,6 @@
import logging
from sqlalchemy import inspect, select, text, update
from sqlalchemy import Engine, inspect, select, text, update
from freqtrade.exceptions import OperationalException
from freqtrade.persistence.trade_model import Order, Trade
@@ -9,7 +9,7 @@ from freqtrade.persistence.trade_model import Order, Trade
logger = logging.getLogger(__name__)
def get_table_names_for_table(inspector, tabletype) -> list[str]:
def get_table_names_for_table(inspector, tabletype: str) -> list[str]:
return [t for t in inspector.get_table_names() if t.startswith(tabletype)]
@@ -350,7 +350,7 @@ def fix_wrong_max_stake_amount(engine):
connection.execute(stmt)
def check_migrate(engine, decl_base, previous_tables) -> None:
def check_migrate(engine: Engine, decl_base, previous_tables: list[str]) -> None:
"""
Checks if migration is necessary and migrates if necessary
"""

View File

@@ -96,7 +96,10 @@ def __run_backtest_bg(btconfig: Config):
)
ApiBG.bt["bt"].results = generate_backtest_stats(
ApiBG.bt["data"], ApiBG.bt["bt"].all_results, min_date=min_date, max_date=max_date
ApiBG.bt["data"],
ApiBG.bt["bt"].all_bt_content,
min_date=min_date,
max_date=max_date,
)
if btconfig.get("export", "none") == "trades":

View File

@@ -33,7 +33,7 @@ from freqtrade.exceptions import ExchangeError, PricingError
from freqtrade.exchange import Exchange, timeframe_to_minutes, timeframe_to_msecs
from freqtrade.exchange.exchange_utils import price_to_precision
from freqtrade.loggers import bufferHandler
from freqtrade.persistence import CustomDataWrapper, KeyStoreKeys, KeyValueStore, PairLocks, Trade
from freqtrade.persistence import CustomDataWrapper, KeyValueStore, PairLocks, Trade
from freqtrade.persistence.models import PairLock
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
@@ -635,7 +635,7 @@ class RPC:
first_date = trades[0].open_date_utc if trades else None
last_date = trades[-1].open_date_utc if trades else None
num = float(len(durations) or 1)
bot_start = KeyValueStore.get_datetime_value(KeyStoreKeys.BOT_START_TIME)
bot_start = KeyValueStore.get_datetime_value("bot_start_time")
return {
"profit_closed_coin": profit_closed_coin_sum,
"profit_closed_percent_mean": round(profit_closed_ratio_mean * 100, 2),
@@ -1601,7 +1601,7 @@ class RPC:
}
)
if bot_start := KeyValueStore.get_datetime_value(KeyStoreKeys.BOT_START_TIME):
if bot_start := KeyValueStore.get_datetime_value("bot_start_time"):
res.update(
{
"bot_start": str(bot_start),
@@ -1609,7 +1609,7 @@ class RPC:
"bot_start_ts": int(bot_start.timestamp()),
}
)
if bot_startup := KeyValueStore.get_datetime_value(KeyStoreKeys.STARTUP_TIME):
if bot_startup := KeyValueStore.get_datetime_value("startup_time"):
res.update(
{
"bot_startup": str(bot_startup),

View File

@@ -4,6 +4,9 @@ from freqtrade.util.migrations.funding_rate_mig import migrate_funding_fee_timef
def migrate_data(config, exchange: Exchange | None = None):
"""
Migrate persisted data from old formats to new formats
"""
migrate_binance_futures_data(config)
migrate_funding_fee_timeframe(config, exchange)

View File

@@ -6,8 +6,8 @@ from sqlalchemy import select
from freqtrade.constants import DOCS_LINK, Config
from freqtrade.enums import TradingMode
from freqtrade.exceptions import OperationalException
from freqtrade.persistence import KeyValueStore, Trade
from freqtrade.persistence.pairlock import PairLock
from freqtrade.persistence.trade_model import Trade
logger = logging.getLogger(__name__)
@@ -20,6 +20,9 @@ def migrate_binance_futures_names(config: Config):
):
# only act on new futures
return
if KeyValueStore.get_int_value("binance_migration"):
# already migrated
return
import ccxt
if version.parse("2.6.26") > version.parse(ccxt.__version__):
@@ -29,10 +32,11 @@ def migrate_binance_futures_names(config: Config):
)
_migrate_binance_futures_db(config)
migrate_binance_futures_data(config)
KeyValueStore.store_value("binance_migration", 1)
def _migrate_binance_futures_db(config: Config):
logger.warning("Migrating binance futures pairs in database.")
logger.info("Migrating binance futures pairs in database.")
trades = Trade.get_trades([Trade.exchange == "binance", Trade.trading_mode == "FUTURES"]).all()
for trade in trades:
if ":" in trade.pair:
@@ -52,7 +56,7 @@ def _migrate_binance_futures_db(config: Config):
# print(pls)
# pls.update({'pair': concat(PairLock.pair,':USDT')})
Trade.commit()
logger.warning("Done migrating binance futures pairs in database.")
logger.info("Done migrating binance futures pairs in database.")
def migrate_binance_futures_data(config: Config):

View File

@@ -7,7 +7,7 @@
-r docs/requirements-docs.txt
coveralls==4.0.1
ruff==0.11.6
ruff==0.11.7
mypy==1.15.0
pre-commit==4.2.0
pytest==8.3.5

View File

@@ -2,7 +2,7 @@
-r requirements-freqai.txt
# Required for freqai-rl
torch==2.6.0; sys_platform != 'darwin' or platform_machine != 'x86_64'
torch==2.7.0; sys_platform != 'darwin' or platform_machine != 'x86_64'
gymnasium==0.29.1
# SB3 >=2.5.0 depends on torch 2.3.0 - which implies it dropped support x86 macos
stable_baselines3==2.4.1; sys_platform == 'darwin' and platform_machine == 'x86_64'

View File

@@ -4,7 +4,7 @@ bottleneck==1.4.2
numexpr==2.10.2
pandas-ta==0.3.14b
ccxt==4.4.75
ccxt==4.4.77
cryptography==44.0.2
aiohttp==3.9.5
SQLAlchemy==2.0.40
@@ -23,7 +23,7 @@ pycoingecko==3.2.0
jinja2==3.1.6
joblib==1.4.2
rich==14.0.0
pyarrow==19.0.1; platform_machine != 'armv7l'
pyarrow==20.0.0; platform_machine != 'armv7l'
# find first, C search in arrays
py_find_1st==1.1.7

View File

@@ -56,7 +56,7 @@ def test_get_latest_backtest_filename(testdatadir, mocker):
res = get_latest_backtest_filename(str(testdir_bt))
assert res == "backtest-result.json"
mocker.patch("freqtrade.data.btanalysis.json_load", return_value={})
mocker.patch("freqtrade.data.btanalysis.bt_fileutils.json_load", return_value={})
with pytest.raises(ValueError, match=r"Invalid '.last_result.json' format."):
get_latest_backtest_filename(testdir_bt)
@@ -84,8 +84,8 @@ def test_load_backtest_metadata(mocker, testdatadir):
res = load_backtest_metadata(testdatadir / "nonexistent.file.json")
assert res == {}
mocker.patch("freqtrade.data.btanalysis.get_backtest_metadata_filename")
mocker.patch("freqtrade.data.btanalysis.json_load", side_effect=Exception())
mocker.patch("freqtrade.data.btanalysis.bt_fileutils.get_backtest_metadata_filename")
mocker.patch("freqtrade.data.btanalysis.bt_fileutils.json_load", side_effect=Exception())
with pytest.raises(
OperationalException, match=r"Unexpected error.*loading backtest metadata\."
):
@@ -94,7 +94,7 @@ def test_load_backtest_metadata(mocker, testdatadir):
def test_load_backtest_data_old_format(testdatadir, mocker):
filename = testdatadir / "backtest-result_test222.json"
mocker.patch("freqtrade.data.btanalysis.load_backtest_stats", return_value=[])
mocker.patch("freqtrade.data.btanalysis.bt_fileutils.load_backtest_stats", return_value=[])
with pytest.raises(
OperationalException,
@@ -149,7 +149,7 @@ def test_load_backtest_data_multi(testdatadir):
def test_load_trades_from_db(default_conf, fee, is_short, mocker):
create_mock_trades(fee, is_short)
# remove init so it does not init again
init_mock = mocker.patch("freqtrade.data.btanalysis.init_db", MagicMock())
init_mock = mocker.patch("freqtrade.data.btanalysis.bt_fileutils.init_db", MagicMock())
trades = load_trades_from_db(db_url=default_conf["db_url"])
assert init_mock.call_count == 1
@@ -221,8 +221,10 @@ def test_analyze_trade_parallelism(testdatadir):
def test_load_trades(default_conf, mocker):
db_mock = mocker.patch("freqtrade.data.btanalysis.load_trades_from_db", MagicMock())
bt_mock = mocker.patch("freqtrade.data.btanalysis.load_backtest_data", MagicMock())
db_mock = mocker.patch(
"freqtrade.data.btanalysis.bt_fileutils.load_trades_from_db", MagicMock()
)
bt_mock = mocker.patch("freqtrade.data.btanalysis.bt_fileutils.load_backtest_data", MagicMock())
load_trades(
"DB",

View File

@@ -408,13 +408,14 @@ EXCHANGES = {
"candle_count": 200,
"orderbook_max_entries": 50,
},
"htx": {
"pair": "ETH/BTC",
"stake_currency": "BTC",
"hasQuoteVolume": True,
"timeframe": "1h",
"candle_count": 1000,
},
# TODO: verify why htx is not working in CI.
# "htx": {
# "pair": "ETH/BTC",
# "stake_currency": "BTC",
# "hasQuoteVolume": True,
# "timeframe": "1h",
# "candle_count": 1000,
# },
"bitvavo": {
"pair": "BTC/EUR",
"stake_currency": "EUR",

View File

@@ -2576,7 +2576,7 @@ def test_backtest_start_multi_strat_caching(
],
)
mocker.patch.multiple(
"freqtrade.data.btanalysis",
"freqtrade.data.btanalysis.bt_fileutils",
load_backtest_metadata=load_backtest_metadata,
load_backtest_stats=load_backtest_stats,
)

View File

@@ -2869,7 +2869,7 @@ def test_api_backtesting(botclient, mocker, fee, caplog, tmp_path):
def test_api_backtest_history(botclient, mocker, testdatadir):
ftbot, client = botclient
mocker.patch(
"freqtrade.data.btanalysis._get_backtest_files",
"freqtrade.data.btanalysis.bt_fileutils._get_backtest_files",
return_value=[
testdatadir / "backtest_results/backtest-result_multistrat.json",
testdatadir / "backtest_results/backtest-result.json",