mirror of
https://github.com/freqtrade/freqtrade.git
synced 2025-12-16 12:51:14 +00:00
Merge branch 'main-stash' of https://github.com/stash86/freqtrade into main-stash
This commit is contained in:
@@ -15,7 +15,7 @@ jobs:
|
|||||||
environment:
|
environment:
|
||||||
name: develop
|
name: develop
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
|
|||||||
30
.github/workflows/ci.yml
vendored
30
.github/workflows/ci.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
|||||||
python-version: ["3.11", "3.12", "3.13"]
|
python-version: ["3.11", "3.12", "3.13"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -38,7 +38,7 @@ jobs:
|
|||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
uses: astral-sh/setup-uv@d9e0f98d3fc6adb07d1e3d37f3043649ddad06a1 # v6.5.0
|
||||||
with:
|
with:
|
||||||
activate-environment: true
|
activate-environment: true
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
@@ -148,7 +148,7 @@ jobs:
|
|||||||
python-version: ["3.11", "3.12", "3.13"]
|
python-version: ["3.11", "3.12", "3.13"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -159,7 +159,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
uses: astral-sh/setup-uv@d9e0f98d3fc6adb07d1e3d37f3043649ddad06a1 # v6.5.0
|
||||||
with:
|
with:
|
||||||
activate-environment: true
|
activate-environment: true
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
@@ -258,10 +258,10 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ windows-latest ]
|
os: [ windows-latest ]
|
||||||
python-version: ["3.11", "3.12", "3.13"]
|
python-version: ["3.11", "3.12", "3.13.6"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -271,7 +271,7 @@ jobs:
|
|||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
uses: astral-sh/setup-uv@d9e0f98d3fc6adb07d1e3d37f3043649ddad06a1 # v6.5.0
|
||||||
with:
|
with:
|
||||||
activate-environment: true
|
activate-environment: true
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
@@ -351,7 +351,7 @@ jobs:
|
|||||||
mypy-version-check:
|
mypy-version-check:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -368,7 +368,7 @@ jobs:
|
|||||||
pre-commit:
|
pre-commit:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -380,7 +380,7 @@ jobs:
|
|||||||
docs-check:
|
docs-check:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -411,7 +411,7 @@ jobs:
|
|||||||
# Run pytest with "live" checks
|
# Run pytest with "live" checks
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -421,7 +421,7 @@ jobs:
|
|||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
uses: astral-sh/setup-uv@d9e0f98d3fc6adb07d1e3d37f3043649ddad06a1 # v6.5.0
|
||||||
with:
|
with:
|
||||||
activate-environment: true
|
activate-environment: true
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
@@ -484,7 +484,7 @@ jobs:
|
|||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -531,7 +531,7 @@ jobs:
|
|||||||
id-token: write
|
id-token: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -560,7 +560,7 @@ jobs:
|
|||||||
id-token: write
|
id-token: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/deploy-docs.yml
vendored
2
.github/workflows/deploy-docs.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
name: Deploy Docs through mike
|
name: Deploy Docs through mike
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: true
|
persist-credentials: true
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/devcontainer-build.yml
vendored
2
.github/workflows/devcontainer-build.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
|
|||||||
4
.github/workflows/docker-build.yml
vendored
4
.github/workflows/docker-build.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
if: github.repository == 'freqtrade/freqtrade'
|
if: github.repository == 'freqtrade/freqtrade'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ jobs:
|
|||||||
if: github.repository == 'freqtrade/freqtrade'
|
if: github.repository == 'freqtrade/freqtrade'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/docker-update-readme.yml
vendored
2
.github/workflows/docker-update-readme.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
dockerHubDescription:
|
dockerHubDescription:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/pre-commit-update.yml
vendored
2
.github/workflows/pre-commit-update.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
auto-update:
|
auto-update:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/zizmor.yml
vendored
4
.github/workflows/zizmor.yml
vendored
@@ -21,9 +21,9 @@ jobs:
|
|||||||
# actions: read # only needed for private repos
|
# actions: read # only needed for private repos
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Run zizmor 🌈
|
- name: Run zizmor 🌈
|
||||||
uses: zizmorcore/zizmor-action@f52a838cfabf134edcbaa7c8b3677dde20045018 # v0.1.1
|
uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ repos:
|
|||||||
- types-tabulate==0.9.0.20241207
|
- types-tabulate==0.9.0.20241207
|
||||||
- types-python-dateutil==2.9.0.20250809
|
- types-python-dateutil==2.9.0.20250809
|
||||||
- scipy-stubs==1.16.1.0
|
- scipy-stubs==1.16.1.0
|
||||||
- SQLAlchemy==2.0.42
|
- SQLAlchemy==2.0.43
|
||||||
# stages: [push]
|
# stages: [push]
|
||||||
|
|
||||||
- repo: https://github.com/pycqa/isort
|
- repo: https://github.com/pycqa/isort
|
||||||
@@ -44,7 +44,7 @@ repos:
|
|||||||
|
|
||||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: 'v0.12.8'
|
rev: 'v0.12.9'
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
@@ -83,6 +83,6 @@ repos:
|
|||||||
|
|
||||||
# Ensure github actions remain safe
|
# Ensure github actions remain safe
|
||||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||||
rev: v1.11.0
|
rev: v1.12.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: zizmor
|
- id: zizmor
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM python:3.13.6-slim-bookworm AS base
|
FROM python:3.13.7-slim-bookworm AS base
|
||||||
|
|
||||||
# Setup env
|
# Setup env
|
||||||
ENV LANG=C.UTF-8
|
ENV LANG=C.UTF-8
|
||||||
|
|||||||
@@ -46,29 +46,32 @@ ranging from the simplest (0) to the most detailed per pair, per buy and per sel
|
|||||||
|
|
||||||
More options are available by running with the `-h` option.
|
More options are available by running with the `-h` option.
|
||||||
|
|
||||||
### Using export-filename
|
### Using backtest-filename
|
||||||
|
|
||||||
Normally, `backtesting-analysis` uses the latest backtest results, but if you wanted to go
|
By default, `backtesting-analysis` processes the most recent backtest results in the `user_data/backtest_results` directory.
|
||||||
back to a previous backtest output, you need to supply the `--export-filename` option.
|
If you want to analyze results from an earlier backtest, use the `--backtest-filename` option to specify the desired file. This lets you revisit and re-analyze historical backtest outputs at any time by providing the filename of the relevant backtest result:
|
||||||
You can supply the same parameter to `backtest-analysis` with the name of the final backtest
|
|
||||||
output file. This allows you to keep historical versions of backtest results and re-analyse
|
|
||||||
them at a later date:
|
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
freqtrade backtesting-analysis -c <config.json> --timeframe <tf> --strategy <strategy_name> --timerange=<timerange> --export=signals --export-filename=user_data/backtest-results/backtest-result-2025-03-05_20-38-34.zip
|
freqtrade backtesting-analysis -c <config.json> --timeframe <tf> --strategy <strategy_name> --timerange <timerange> --export signals --backtest-filename backtest-result-2025-03-05_20-38-34.zip
|
||||||
```
|
```
|
||||||
|
|
||||||
You should see some output similar to below in the logs with the name of the timestamped
|
You should see some output similar to below in the logs with the name of the timestamped
|
||||||
filename that was exported:
|
filename that was exported:
|
||||||
|
|
||||||
```
|
```
|
||||||
2022-06-14 16:28:32,698 - freqtrade.misc - INFO - dumping json to "/tmp/mystrat_backtest-2022-06-14_16-28-32.json"
|
2022-06-14 16:28:32,698 - freqtrade.misc - INFO - dumping json to "mystrat_backtest-2022-06-14_16-28-32.json"
|
||||||
```
|
```
|
||||||
|
|
||||||
You can then use that filename in `backtesting-analysis`:
|
You can then use that filename in `backtesting-analysis`:
|
||||||
|
|
||||||
```
|
```
|
||||||
freqtrade backtesting-analysis -c <config.json> --export-filename=/tmp/mystrat_backtest-2022-06-14_16-28-32.json
|
freqtrade backtesting-analysis -c <config.json> --backtest-filename=mystrat_backtest-2022-06-14_16-28-32.json
|
||||||
|
```
|
||||||
|
|
||||||
|
To use a result from a different results directory, you can use `--backtest-directory` to specify the directory
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
freqtrade backtesting-analysis -c <config.json> --backtest-directory custom_results/ --backtest-filename mystrat_backtest-2022-06-14_16-28-32.json
|
||||||
```
|
```
|
||||||
|
|
||||||
### Tuning the buy tags and sell tags to display
|
### Tuning the buy tags and sell tags to display
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ Only use this if you're sure you'll not want to plot or analyze your results fur
|
|||||||
Exporting trades to file specifying a custom directory
|
Exporting trades to file specifying a custom directory
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
freqtrade backtesting --strategy backtesting --export trades --export-filename=user_data/custom-backtest-results
|
freqtrade backtesting --strategy backtesting --export trades --backtest-directory=user_data/custom-backtest-results
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ usage: freqtrade backtesting-analysis [-h] [-v] [--no-color] [--logfile FILE]
|
|||||||
[-V] [-c PATH] [-d PATH]
|
[-V] [-c PATH] [-d PATH]
|
||||||
[--userdir PATH]
|
[--userdir PATH]
|
||||||
[--backtest-filename PATH]
|
[--backtest-filename PATH]
|
||||||
|
[--backtest-directory PATH]
|
||||||
[--analysis-groups {0,1,2,3,4,5} [{0,1,2,3,4,5} ...]]
|
[--analysis-groups {0,1,2,3,4,5} [{0,1,2,3,4,5} ...]]
|
||||||
[--enter-reason-list ENTER_REASON_LIST [ENTER_REASON_LIST ...]]
|
[--enter-reason-list ENTER_REASON_LIST [ENTER_REASON_LIST ...]]
|
||||||
[--exit-reason-list EXIT_REASON_LIST [EXIT_REASON_LIST ...]]
|
[--exit-reason-list EXIT_REASON_LIST [EXIT_REASON_LIST ...]]
|
||||||
@@ -16,7 +17,13 @@ options:
|
|||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
--backtest-filename PATH, --export-filename PATH
|
--backtest-filename PATH, --export-filename PATH
|
||||||
Use this filename for backtest results.Example:
|
Use this filename for backtest results.Example:
|
||||||
`--backtest-filename=user_data/backtest_results/`
|
`--backtest-
|
||||||
|
filename=backtest_results_2020-09-27_16-20-48.json`.
|
||||||
|
Assumes either `user_data/backtest_results/` or
|
||||||
|
`--export-directory` as base directory.
|
||||||
|
--backtest-directory PATH, --export-directory PATH
|
||||||
|
Directory to use for backtest results. Example:
|
||||||
|
`--export-directory=user_data/backtest_results/`.
|
||||||
--analysis-groups {0,1,2,3,4,5} [{0,1,2,3,4,5} ...]
|
--analysis-groups {0,1,2,3,4,5} [{0,1,2,3,4,5} ...]
|
||||||
grouping output - 0: simple wins/losses by enter tag,
|
grouping output - 0: simple wins/losses by enter tag,
|
||||||
1: by enter_tag, 2: by enter_tag and exit_tag, 3: by
|
1: by enter_tag, 2: by enter_tag and exit_tag, 3: by
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
usage: freqtrade backtesting-show [-h] [-v] [--no-color] [--logfile FILE] [-V]
|
usage: freqtrade backtesting-show [-h] [-v] [--no-color] [--logfile FILE] [-V]
|
||||||
[-c PATH] [-d PATH] [--userdir PATH]
|
[-c PATH] [-d PATH] [--userdir PATH]
|
||||||
[--backtest-filename PATH]
|
[--backtest-filename PATH]
|
||||||
|
[--backtest-directory PATH]
|
||||||
[--show-pair-list]
|
[--show-pair-list]
|
||||||
[--breakdown {day,week,month,year} [{day,week,month,year} ...]]
|
[--breakdown {day,week,month,year} [{day,week,month,year} ...]]
|
||||||
|
|
||||||
@@ -9,7 +10,13 @@ options:
|
|||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
--backtest-filename PATH, --export-filename PATH
|
--backtest-filename PATH, --export-filename PATH
|
||||||
Use this filename for backtest results.Example:
|
Use this filename for backtest results.Example:
|
||||||
`--backtest-filename=user_data/backtest_results/`
|
`--backtest-
|
||||||
|
filename=backtest_results_2020-09-27_16-20-48.json`.
|
||||||
|
Assumes either `user_data/backtest_results/` or
|
||||||
|
`--export-directory` as base directory.
|
||||||
|
--backtest-directory PATH, --export-directory PATH
|
||||||
|
Directory to use for backtest results. Example:
|
||||||
|
`--export-directory=user_data/backtest_results/`.
|
||||||
--show-pair-list Show backtesting pairlist sorted by profit.
|
--show-pair-list Show backtesting pairlist sorted by profit.
|
||||||
--breakdown {day,week,month,year} [{day,week,month,year} ...]
|
--breakdown {day,week,month,year} [{day,week,month,year} ...]
|
||||||
Show backtesting breakdown per [day, week, month,
|
Show backtesting breakdown per [day, week, month,
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ usage: freqtrade backtesting [-h] [-v] [--no-color] [--logfile FILE] [-V]
|
|||||||
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
|
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
|
||||||
[--export {none,trades,signals}]
|
[--export {none,trades,signals}]
|
||||||
[--backtest-filename PATH]
|
[--backtest-filename PATH]
|
||||||
|
[--backtest-directory PATH]
|
||||||
[--breakdown {day,week,month,year} [{day,week,month,year} ...]]
|
[--breakdown {day,week,month,year} [{day,week,month,year} ...]]
|
||||||
[--cache {none,day,week,month}]
|
[--cache {none,day,week,month}]
|
||||||
[--freqai-backtest-live-models] [--notes TEXT]
|
[--freqai-backtest-live-models] [--notes TEXT]
|
||||||
@@ -63,7 +64,13 @@ options:
|
|||||||
Export backtest results (default: trades).
|
Export backtest results (default: trades).
|
||||||
--backtest-filename PATH, --export-filename PATH
|
--backtest-filename PATH, --export-filename PATH
|
||||||
Use this filename for backtest results.Example:
|
Use this filename for backtest results.Example:
|
||||||
`--backtest-filename=user_data/backtest_results/`
|
`--backtest-
|
||||||
|
filename=backtest_results_2020-09-27_16-20-48.json`.
|
||||||
|
Assumes either `user_data/backtest_results/` or
|
||||||
|
`--export-directory` as base directory.
|
||||||
|
--backtest-directory PATH, --export-directory PATH
|
||||||
|
Directory to use for backtest results. Example:
|
||||||
|
`--export-directory=user_data/backtest_results/`.
|
||||||
--breakdown {day,week,month,year} [{day,week,month,year} ...]
|
--breakdown {day,week,month,year} [{day,week,month,year} ...]
|
||||||
Show backtesting breakdown per [day, week, month,
|
Show backtesting breakdown per [day, week, month,
|
||||||
year].
|
year].
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ usage: freqtrade lookahead-analysis [-h] [-v] [--no-color] [--logfile FILE]
|
|||||||
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
|
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
|
||||||
[--export {none,trades,signals}]
|
[--export {none,trades,signals}]
|
||||||
[--backtest-filename PATH]
|
[--backtest-filename PATH]
|
||||||
|
[--backtest-directory PATH]
|
||||||
[--freqai-backtest-live-models]
|
[--freqai-backtest-live-models]
|
||||||
[--minimum-trade-amount INT]
|
[--minimum-trade-amount INT]
|
||||||
[--targeted-trade-amount INT]
|
[--targeted-trade-amount INT]
|
||||||
@@ -62,7 +63,13 @@ options:
|
|||||||
Export backtest results (default: trades).
|
Export backtest results (default: trades).
|
||||||
--backtest-filename PATH, --export-filename PATH
|
--backtest-filename PATH, --export-filename PATH
|
||||||
Use this filename for backtest results.Example:
|
Use this filename for backtest results.Example:
|
||||||
`--backtest-filename=user_data/backtest_results/`
|
`--backtest-
|
||||||
|
filename=backtest_results_2020-09-27_16-20-48.json`.
|
||||||
|
Assumes either `user_data/backtest_results/` or
|
||||||
|
`--export-directory` as base directory.
|
||||||
|
--backtest-directory PATH, --export-directory PATH
|
||||||
|
Directory to use for backtest results. Example:
|
||||||
|
`--export-directory=user_data/backtest_results/`.
|
||||||
--freqai-backtest-live-models
|
--freqai-backtest-live-models
|
||||||
Run backtest with ready models.
|
Run backtest with ready models.
|
||||||
--minimum-trade-amount INT
|
--minimum-trade-amount INT
|
||||||
|
|||||||
@@ -40,7 +40,10 @@ options:
|
|||||||
Export backtest results (default: trades).
|
Export backtest results (default: trades).
|
||||||
--backtest-filename PATH, --export-filename PATH
|
--backtest-filename PATH, --export-filename PATH
|
||||||
Use this filename for backtest results.Example:
|
Use this filename for backtest results.Example:
|
||||||
`--backtest-filename=user_data/backtest_results/`
|
`--backtest-
|
||||||
|
filename=backtest_results_2020-09-27_16-20-48.json`.
|
||||||
|
Assumes either `user_data/backtest_results/` or
|
||||||
|
`--export-directory` as base directory.
|
||||||
--timerange TIMERANGE
|
--timerange TIMERANGE
|
||||||
Specify what timerange of data to use.
|
Specify what timerange of data to use.
|
||||||
-i TIMEFRAME, --timeframe TIMEFRAME
|
-i TIMEFRAME, --timeframe TIMEFRAME
|
||||||
|
|||||||
@@ -21,7 +21,10 @@ options:
|
|||||||
Export backtest results (default: trades).
|
Export backtest results (default: trades).
|
||||||
--backtest-filename PATH, --export-filename PATH
|
--backtest-filename PATH, --export-filename PATH
|
||||||
Use this filename for backtest results.Example:
|
Use this filename for backtest results.Example:
|
||||||
`--backtest-filename=user_data/backtest_results/`
|
`--backtest-
|
||||||
|
filename=backtest_results_2020-09-27_16-20-48.json`.
|
||||||
|
Assumes either `user_data/backtest_results/` or
|
||||||
|
`--export-directory` as base directory.
|
||||||
--db-url PATH Override trades database URL, this is useful in custom
|
--db-url PATH Override trades database URL, this is useful in custom
|
||||||
deployments (default: `sqlite:///tradesv3.sqlite` for
|
deployments (default: `sqlite:///tradesv3.sqlite` for
|
||||||
Live Run mode, `sqlite:///tradesv3.dryrun.sqlite` for
|
Live Run mode, `sqlite:///tradesv3.dryrun.sqlite` for
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
markdown==3.8.2
|
markdown==3.8.2
|
||||||
mkdocs==1.6.1
|
mkdocs==1.6.1
|
||||||
mkdocs-material==9.6.16
|
mkdocs-material==9.6.17
|
||||||
mdx_truly_sane_lists==1.3
|
mdx_truly_sane_lists==1.3
|
||||||
pymdown-extensions==10.16.1
|
pymdown-extensions==10.16.1
|
||||||
jinja2==3.1.6
|
jinja2==3.1.6
|
||||||
|
|||||||
@@ -54,6 +54,7 @@ ARGS_BACKTEST = [
|
|||||||
"strategy_list",
|
"strategy_list",
|
||||||
"export",
|
"export",
|
||||||
"exportfilename",
|
"exportfilename",
|
||||||
|
"exportdirectory",
|
||||||
"backtest_breakdown",
|
"backtest_breakdown",
|
||||||
"backtest_cache",
|
"backtest_cache",
|
||||||
"freqai_backtest_live_models",
|
"freqai_backtest_live_models",
|
||||||
@@ -94,7 +95,12 @@ ARGS_LIST_FREQAIMODELS = ["freqaimodel_path", "print_one_column"]
|
|||||||
|
|
||||||
ARGS_LIST_HYPEROPTS = ["hyperopt_path", "print_one_column"]
|
ARGS_LIST_HYPEROPTS = ["hyperopt_path", "print_one_column"]
|
||||||
|
|
||||||
ARGS_BACKTEST_SHOW = ["exportfilename", "backtest_show_pair_list", "backtest_breakdown"]
|
ARGS_BACKTEST_SHOW = [
|
||||||
|
"exportfilename",
|
||||||
|
"exportdirectory",
|
||||||
|
"backtest_show_pair_list",
|
||||||
|
"backtest_breakdown",
|
||||||
|
]
|
||||||
|
|
||||||
ARGS_LIST_EXCHANGES = ["print_one_column", "list_exchanges_all", "trading_mode", "dex_exchanges"]
|
ARGS_LIST_EXCHANGES = ["print_one_column", "list_exchanges_all", "trading_mode", "dex_exchanges"]
|
||||||
|
|
||||||
@@ -233,6 +239,7 @@ ARGS_HYPEROPT_SHOW = [
|
|||||||
|
|
||||||
ARGS_ANALYZE_ENTRIES_EXITS = [
|
ARGS_ANALYZE_ENTRIES_EXITS = [
|
||||||
"exportfilename",
|
"exportfilename",
|
||||||
|
"exportdirectory",
|
||||||
"analysis_groups",
|
"analysis_groups",
|
||||||
"enter_reason_list",
|
"enter_reason_list",
|
||||||
"exit_reason_list",
|
"exit_reason_list",
|
||||||
|
|||||||
@@ -199,21 +199,29 @@ AVAILABLE_CLI_OPTIONS = {
|
|||||||
"(so `backtest-data.json` becomes `backtest-data-SampleStrategy.json`",
|
"(so `backtest-data.json` becomes `backtest-data-SampleStrategy.json`",
|
||||||
nargs="+",
|
nargs="+",
|
||||||
),
|
),
|
||||||
"export": Arg(
|
|
||||||
"--export",
|
|
||||||
help="Export backtest results (default: trades).",
|
|
||||||
choices=constants.EXPORT_OPTIONS,
|
|
||||||
),
|
|
||||||
"backtest_notes": Arg(
|
"backtest_notes": Arg(
|
||||||
"--notes",
|
"--notes",
|
||||||
help="Add notes to the backtest results.",
|
help="Add notes to the backtest results.",
|
||||||
metavar="TEXT",
|
metavar="TEXT",
|
||||||
),
|
),
|
||||||
|
"export": Arg(
|
||||||
|
"--export",
|
||||||
|
help="Export backtest results (default: trades).",
|
||||||
|
choices=constants.EXPORT_OPTIONS,
|
||||||
|
),
|
||||||
|
"exportdirectory": Arg(
|
||||||
|
"--backtest-directory",
|
||||||
|
"--export-directory",
|
||||||
|
help="Directory to use for backtest results. "
|
||||||
|
"Example: `--export-directory=user_data/backtest_results/`. ",
|
||||||
|
metavar="PATH",
|
||||||
|
),
|
||||||
"exportfilename": Arg(
|
"exportfilename": Arg(
|
||||||
"--backtest-filename",
|
"--backtest-filename",
|
||||||
"--export-filename",
|
"--export-filename",
|
||||||
help="Use this filename for backtest results."
|
help="Use this filename for backtest results."
|
||||||
"Example: `--backtest-filename=user_data/backtest_results/`",
|
"Example: `--backtest-filename=backtest_results_2020-09-27_16-20-48.json`. "
|
||||||
|
"Assumes either `user_data/backtest_results/` or `--export-directory` as base directory.",
|
||||||
metavar="PATH",
|
metavar="PATH",
|
||||||
),
|
),
|
||||||
"disableparamexport": Arg(
|
"disableparamexport": Arg(
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ def start_backtesting_show(args: dict[str, Any]) -> None:
|
|||||||
from freqtrade.data.btanalysis import load_backtest_stats
|
from freqtrade.data.btanalysis import load_backtest_stats
|
||||||
from freqtrade.optimize.optimize_reports import show_backtest_results, show_sorted_pairlist
|
from freqtrade.optimize.optimize_reports import show_backtest_results, show_sorted_pairlist
|
||||||
|
|
||||||
results = load_backtest_stats(config["exportfilename"])
|
results = load_backtest_stats(config["exportdirectory"], config["exportfilename"])
|
||||||
|
|
||||||
show_backtest_results(config, results)
|
show_backtest_results(config, results)
|
||||||
show_sorted_pairlist(config, results)
|
show_sorted_pairlist(config, results)
|
||||||
|
|||||||
@@ -209,13 +209,28 @@ class Configuration:
|
|||||||
config.update({"datadir": create_datadir(config, self.args.get("datadir"))})
|
config.update({"datadir": create_datadir(config, self.args.get("datadir"))})
|
||||||
logger.info("Using data directory: %s ...", config.get("datadir"))
|
logger.info("Using data directory: %s ...", config.get("datadir"))
|
||||||
|
|
||||||
|
self._args_to_config(
|
||||||
|
config, argname="exportdirectory", logstring="Using {} as backtest directory ..."
|
||||||
|
)
|
||||||
|
|
||||||
if self.args.get("exportfilename"):
|
if self.args.get("exportfilename"):
|
||||||
self._args_to_config(
|
self._args_to_config(
|
||||||
config, argname="exportfilename", logstring="Storing backtest results to {} ..."
|
config, argname="exportfilename", logstring="Storing backtest results to {} ..."
|
||||||
)
|
)
|
||||||
config["exportfilename"] = Path(config["exportfilename"])
|
config["exportfilename"] = Path(config["exportfilename"])
|
||||||
else:
|
if config.get("exportdirectory") and Path(config["exportdirectory"]).is_dir():
|
||||||
config["exportfilename"] = config["user_data_dir"] / "backtest_results"
|
logger.warning(
|
||||||
|
"DEPRECATED: Using `--export-filename` with directories is deprecated, "
|
||||||
|
"use `--backtest-directory` instead."
|
||||||
|
)
|
||||||
|
if config.get("exportdirectory") is None:
|
||||||
|
# Fallback - assign export-directory directly.
|
||||||
|
config["exportdirectory"] = config["exportfilename"]
|
||||||
|
if not config.get("exportdirectory"):
|
||||||
|
config["exportdirectory"] = config["user_data_dir"] / "backtest_results"
|
||||||
|
if not config.get("exportfilename"):
|
||||||
|
config["exportfilename"] = None
|
||||||
|
config["exportdirectory"] = Path(config["exportdirectory"])
|
||||||
|
|
||||||
if self.args.get("show_sensitive"):
|
if self.args.get("show_sensitive"):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
|
|||||||
@@ -155,33 +155,55 @@ def load_backtest_metadata(filename: Path | str) -> dict[str, Any]:
|
|||||||
raise OperationalException("Unexpected error while loading backtest metadata.") from e
|
raise OperationalException("Unexpected error while loading backtest metadata.") from e
|
||||||
|
|
||||||
|
|
||||||
def load_backtest_stats(filename: Path | str) -> BacktestResultType:
|
def _normalize_filename(file_or_directory: Path | str, filename: Path | str | None) -> Path:
|
||||||
|
"""
|
||||||
|
Normalize the filename by ensuring it is a Path object.
|
||||||
|
:param file_or_directory: The directory or file to normalize.
|
||||||
|
:param filename: The filename to normalize.
|
||||||
|
:return: A Path object representing the normalized filename.
|
||||||
|
"""
|
||||||
|
if isinstance(file_or_directory, str):
|
||||||
|
file_or_directory = Path(file_or_directory)
|
||||||
|
if file_or_directory.is_dir():
|
||||||
|
if not filename:
|
||||||
|
filename = get_latest_backtest_filename(file_or_directory)
|
||||||
|
if Path(filename).is_file():
|
||||||
|
fn = Path(filename)
|
||||||
|
else:
|
||||||
|
fn = file_or_directory / filename
|
||||||
|
else:
|
||||||
|
fn = file_or_directory
|
||||||
|
return fn
|
||||||
|
|
||||||
|
|
||||||
|
def load_backtest_stats(
|
||||||
|
file_or_directory: Path | str, filename: Path | str | None = None
|
||||||
|
) -> BacktestResultType:
|
||||||
"""
|
"""
|
||||||
Load backtest statistics file.
|
Load backtest statistics file.
|
||||||
:param filename: pathlib.Path object, or string pointing to the file.
|
:param file_or_directory: pathlib.Path object, or string pointing to the directory,
|
||||||
|
or absolute/relative path to the backtest results file.
|
||||||
|
:param filename: Optional filename to load from (if different from the main filename).
|
||||||
|
Only valid when loading from a directory.
|
||||||
:return: a dictionary containing the resulting file.
|
:return: a dictionary containing the resulting file.
|
||||||
"""
|
"""
|
||||||
if isinstance(filename, str):
|
fn = _normalize_filename(file_or_directory, filename)
|
||||||
filename = Path(filename)
|
|
||||||
if filename.is_dir():
|
|
||||||
filename = filename / get_latest_backtest_filename(filename)
|
|
||||||
if not filename.is_file():
|
|
||||||
raise ValueError(f"File {filename} does not exist.")
|
|
||||||
logger.info(f"Loading backtest result from {filename}")
|
|
||||||
|
|
||||||
if filename.suffix == ".zip":
|
if not fn.is_file():
|
||||||
|
raise ValueError(f"File or directory {fn} does not exist.")
|
||||||
|
logger.info(f"Loading backtest result from {fn}")
|
||||||
|
|
||||||
|
if fn.suffix == ".zip":
|
||||||
data = json_load(
|
data = json_load(
|
||||||
StringIO(
|
StringIO(load_file_from_zip(fn, fn.with_suffix(".json").name).decode("utf-8"))
|
||||||
load_file_from_zip(filename, filename.with_suffix(".json").name).decode("utf-8")
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
with filename.open() as file:
|
with fn.open() as file:
|
||||||
data = json_load(file)
|
data = json_load(file)
|
||||||
|
|
||||||
# Legacy list format does not contain metadata.
|
# Legacy list format does not contain metadata.
|
||||||
if isinstance(data, dict):
|
if isinstance(data, dict):
|
||||||
data["metadata"] = load_backtest_metadata(filename)
|
data["metadata"] = load_backtest_metadata(fn)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@@ -362,16 +384,21 @@ def _load_backtest_data_df_compatibility(df: pd.DataFrame) -> pd.DataFrame:
|
|||||||
return df
|
return df
|
||||||
|
|
||||||
|
|
||||||
def load_backtest_data(filename: Path | str, strategy: str | None = None) -> pd.DataFrame:
|
def load_backtest_data(
|
||||||
|
file_or_directory: Path | str, strategy: str | None = None, filename: Path | str | None = None
|
||||||
|
) -> pd.DataFrame:
|
||||||
"""
|
"""
|
||||||
Load backtest data file.
|
Load backtest data file, returns a dataframe with the individual trades.
|
||||||
:param filename: pathlib.Path object, or string pointing to a file or directory
|
:param file_or_directory: pathlib.Path object, or string pointing to the directory,
|
||||||
|
or absolute/relative path to the backtest results file.
|
||||||
:param strategy: Strategy to load - mainly relevant for multi-strategy backtests
|
:param strategy: Strategy to load - mainly relevant for multi-strategy backtests
|
||||||
Can also serve as protection to load the correct result.
|
Can also serve as protection to load the correct result.
|
||||||
|
:param filename: Optional filename to load from (if different from the main filename).
|
||||||
|
Only valid when loading from a directory.
|
||||||
:return: a dataframe with the analysis results
|
:return: a dataframe with the analysis results
|
||||||
:raise: ValueError if loading goes wrong.
|
:raise: ValueError if loading goes wrong.
|
||||||
"""
|
"""
|
||||||
data = load_backtest_stats(filename)
|
data = load_backtest_stats(file_or_directory, filename)
|
||||||
if not isinstance(data, list):
|
if not isinstance(data, list):
|
||||||
# new, nested format
|
# new, nested format
|
||||||
if "strategy" not in data:
|
if "strategy" not in data:
|
||||||
@@ -430,20 +457,23 @@ def load_file_from_zip(zip_path: Path, filename: str) -> bytes:
|
|||||||
raise ValueError(f"Bad zip file: {zip_path}.") from None
|
raise ValueError(f"Bad zip file: {zip_path}.") from None
|
||||||
|
|
||||||
|
|
||||||
def load_backtest_analysis_data(backtest_dir: Path, name: Literal["signals", "rejected", "exited"]):
|
def load_backtest_analysis_data(
|
||||||
|
file_or_directory: Path,
|
||||||
|
name: Literal["signals", "rejected", "exited"],
|
||||||
|
filename: Path | str | None = None,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Load backtest analysis data either from a pickle file or from within a zip file
|
Load backtest analysis data either from a pickle file or from within a zip file
|
||||||
:param backtest_dir: Directory containing backtest results
|
:param file_or_directory: pathlib.Path object, or string pointing to the directory,
|
||||||
|
or absolute/relative path to the backtest results file.
|
||||||
:param name: Name of the analysis data to load (signals, rejected, exited)
|
:param name: Name of the analysis data to load (signals, rejected, exited)
|
||||||
|
:param filename: Optional filename to load from (if different from the main filename).
|
||||||
|
Only valid when loading from a directory.
|
||||||
:return: Analysis data
|
:return: Analysis data
|
||||||
"""
|
"""
|
||||||
import joblib
|
import joblib
|
||||||
|
|
||||||
if backtest_dir.is_dir():
|
zip_path = _normalize_filename(file_or_directory, filename)
|
||||||
lbf = Path(get_latest_backtest_filename(backtest_dir))
|
|
||||||
zip_path = backtest_dir / lbf
|
|
||||||
else:
|
|
||||||
zip_path = backtest_dir
|
|
||||||
|
|
||||||
if zip_path.suffix == ".zip":
|
if zip_path.suffix == ".zip":
|
||||||
# Load from zip file
|
# Load from zip file
|
||||||
@@ -458,10 +488,10 @@ def load_backtest_analysis_data(backtest_dir: Path, name: Literal["signals", "re
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
# Load from separate pickle file
|
# Load from separate pickle file
|
||||||
if backtest_dir.is_dir():
|
if file_or_directory.is_dir():
|
||||||
scpf = Path(backtest_dir, f"{zip_path.stem}_{name}.pkl")
|
scpf = Path(file_or_directory, f"{zip_path.stem}_{name}.pkl")
|
||||||
else:
|
else:
|
||||||
scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_{name}.pkl")
|
scpf = Path(file_or_directory.parent / f"{file_or_directory.stem}_{name}.pkl")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with scpf.open("rb") as scp:
|
with scpf.open("rb") as scp:
|
||||||
|
|||||||
@@ -330,7 +330,7 @@ def process_entry_exit_reasons(config: Config):
|
|||||||
do_rejected = config.get("analysis_rejected", False)
|
do_rejected = config.get("analysis_rejected", False)
|
||||||
to_csv = config.get("analysis_to_csv", False)
|
to_csv = config.get("analysis_to_csv", False)
|
||||||
csv_path = Path(
|
csv_path = Path(
|
||||||
config.get("analysis_csv_path", config["exportfilename"]), # type: ignore[arg-type]
|
config.get("analysis_csv_path", config["exportdirectory"]), # type: ignore[arg-type]
|
||||||
)
|
)
|
||||||
|
|
||||||
if entry_only is True and exit_only is True:
|
if entry_only is True and exit_only is True:
|
||||||
@@ -344,21 +344,29 @@ def process_entry_exit_reasons(config: Config):
|
|||||||
None if config.get("timerange") is None else str(config.get("timerange"))
|
None if config.get("timerange") is None else str(config.get("timerange"))
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
backtest_stats = load_backtest_stats(config["exportfilename"])
|
backtest_stats = load_backtest_stats(
|
||||||
|
config["exportdirectory"], config["exportfilename"]
|
||||||
|
)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ConfigurationError(e) from e
|
raise ConfigurationError(e) from e
|
||||||
|
|
||||||
for strategy_name, results in backtest_stats["strategy"].items():
|
for strategy_name, results in backtest_stats["strategy"].items():
|
||||||
trades = load_backtest_data(config["exportfilename"], strategy_name)
|
trades = load_backtest_data(
|
||||||
|
config["exportdirectory"], strategy_name, config["exportfilename"]
|
||||||
|
)
|
||||||
|
|
||||||
if trades is not None and not trades.empty:
|
if trades is not None and not trades.empty:
|
||||||
signal_candles = load_backtest_analysis_data(config["exportfilename"], "signals")
|
signal_candles = load_backtest_analysis_data(
|
||||||
exit_signals = load_backtest_analysis_data(config["exportfilename"], "exited")
|
config["exportdirectory"], "signals", config["exportfilename"]
|
||||||
|
)
|
||||||
|
exit_signals = load_backtest_analysis_data(
|
||||||
|
config["exportdirectory"], "exited", config["exportfilename"]
|
||||||
|
)
|
||||||
|
|
||||||
rej_df = None
|
rej_df = None
|
||||||
if do_rejected:
|
if do_rejected:
|
||||||
rejected_signals_dict = load_backtest_analysis_data(
|
rejected_signals_dict = load_backtest_analysis_data(
|
||||||
config["exportfilename"], "rejected"
|
config["exportdirectory"], "rejected", config["exportfilename"]
|
||||||
)
|
)
|
||||||
rej_df = prepare_results(
|
rej_df = prepare_results(
|
||||||
rejected_signals_dict,
|
rejected_signals_dict,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -64,7 +64,7 @@ def store_backtest_results(
|
|||||||
:param market_change_data: Dataframe containing market change data
|
:param market_change_data: Dataframe containing market change data
|
||||||
:param analysis_results: Dictionary containing analysis results
|
:param analysis_results: Dictionary containing analysis results
|
||||||
"""
|
"""
|
||||||
recordfilename: Path = config["exportfilename"]
|
recordfilename: Path = config["exportdirectory"]
|
||||||
zip_filename = _generate_filename(recordfilename, dtappendix, ".zip")
|
zip_filename = _generate_filename(recordfilename, dtappendix, ".zip")
|
||||||
base_filename = _generate_filename(recordfilename, dtappendix, "")
|
base_filename = _generate_filename(recordfilename, dtappendix, "")
|
||||||
json_filename = _generate_filename(recordfilename, dtappendix, ".json")
|
json_filename = _generate_filename(recordfilename, dtappendix, ".json")
|
||||||
|
|||||||
@@ -31,8 +31,9 @@ class AgeFilter(IPairList):
|
|||||||
|
|
||||||
self._min_days_listed = self._pairlistconfig.get("min_days_listed", 10)
|
self._min_days_listed = self._pairlistconfig.get("min_days_listed", 10)
|
||||||
self._max_days_listed = self._pairlistconfig.get("max_days_listed")
|
self._max_days_listed = self._pairlistconfig.get("max_days_listed")
|
||||||
|
self._def_candletype = self._config["candle_type_def"]
|
||||||
|
|
||||||
candle_limit = self._exchange.ohlcv_candle_limit("1d", self._config["candle_type_def"])
|
candle_limit = self._exchange.ohlcv_candle_limit("1d", self._def_candletype)
|
||||||
if self._min_days_listed < 1:
|
if self._min_days_listed < 1:
|
||||||
raise OperationalException("AgeFilter requires min_days_listed to be >= 1")
|
raise OperationalException("AgeFilter requires min_days_listed to be >= 1")
|
||||||
if self._min_days_listed > candle_limit:
|
if self._min_days_listed > candle_limit:
|
||||||
@@ -100,7 +101,7 @@ class AgeFilter(IPairList):
|
|||||||
:return: new allowlist
|
:return: new allowlist
|
||||||
"""
|
"""
|
||||||
needed_pairs: ListPairsWithTimeframes = [
|
needed_pairs: ListPairsWithTimeframes = [
|
||||||
(p, "1d", self._config["candle_type_def"])
|
(p, "1d", self._def_candletype)
|
||||||
for p in pairlist
|
for p in pairlist
|
||||||
if p not in self._symbolsChecked and p not in self._symbolsCheckFailed
|
if p not in self._symbolsChecked and p not in self._symbolsCheckFailed
|
||||||
]
|
]
|
||||||
@@ -116,8 +117,8 @@ class AgeFilter(IPairList):
|
|||||||
if self._enabled:
|
if self._enabled:
|
||||||
for p in deepcopy(pairlist):
|
for p in deepcopy(pairlist):
|
||||||
daily_candles = (
|
daily_candles = (
|
||||||
candles[(p, "1d", self._config["candle_type_def"])]
|
candles[(p, "1d", self._def_candletype)]
|
||||||
if (p, "1d", self._config["candle_type_def"]) in candles
|
if (p, "1d", self._def_candletype) in candles
|
||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
if not self._validate_pair_loc(p, daily_candles):
|
if not self._validate_pair_loc(p, daily_candles):
|
||||||
|
|||||||
@@ -37,7 +37,6 @@ class MarketCapPairList(IPairList):
|
|||||||
self._refresh_period = self._pairlistconfig.get("refresh_period", 86400)
|
self._refresh_period = self._pairlistconfig.get("refresh_period", 86400)
|
||||||
self._categories = self._pairlistconfig.get("categories", [])
|
self._categories = self._pairlistconfig.get("categories", [])
|
||||||
self._marketcap_cache: TTLCache = TTLCache(maxsize=1, ttl=self._refresh_period)
|
self._marketcap_cache: TTLCache = TTLCache(maxsize=1, ttl=self._refresh_period)
|
||||||
self._def_candletype = self._config["candle_type_def"]
|
|
||||||
|
|
||||||
_coingecko_config = self._config.get("coingecko", {})
|
_coingecko_config = self._config.get("coingecko", {})
|
||||||
|
|
||||||
@@ -191,7 +190,7 @@ class MarketCapPairList(IPairList):
|
|||||||
if marketcap_list:
|
if marketcap_list:
|
||||||
filtered_pairlist = []
|
filtered_pairlist = []
|
||||||
|
|
||||||
market = self._config["trading_mode"]
|
market = self._exchange._config["trading_mode"]
|
||||||
pair_format = f"{self._stake_currency.upper()}"
|
pair_format = f"{self._stake_currency.upper()}"
|
||||||
if market == "futures":
|
if market == "futures":
|
||||||
pair_format += f":{self._stake_currency.upper()}"
|
pair_format += f":{self._stake_currency.upper()}"
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ class PercentChangePairList(IPairList):
|
|||||||
)
|
)
|
||||||
|
|
||||||
candle_limit = self._exchange.ohlcv_candle_limit(
|
candle_limit = self._exchange.ohlcv_candle_limit(
|
||||||
self._lookback_timeframe, self._config["candle_type_def"]
|
self._lookback_timeframe, self._def_candletype
|
||||||
)
|
)
|
||||||
|
|
||||||
if self._lookback_period > candle_limit:
|
if self._lookback_period > candle_limit:
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ class VolatilityFilter(IPairList):
|
|||||||
|
|
||||||
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
||||||
|
|
||||||
candle_limit = self._exchange.ohlcv_candle_limit("1d", self._config["candle_type_def"])
|
candle_limit = self._exchange.ohlcv_candle_limit("1d", self._def_candletype)
|
||||||
if self._days < 1:
|
if self._days < 1:
|
||||||
raise OperationalException("VolatilityFilter requires lookback_days to be >= 1")
|
raise OperationalException("VolatilityFilter requires lookback_days to be >= 1")
|
||||||
if self._days > candle_limit:
|
if self._days > candle_limit:
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ class VolumePairList(IPairList):
|
|||||||
raise OperationalException(f"key {self._sort_key} not in {SORT_VALUES}")
|
raise OperationalException(f"key {self._sort_key} not in {SORT_VALUES}")
|
||||||
|
|
||||||
candle_limit = self._exchange.ohlcv_candle_limit(
|
candle_limit = self._exchange.ohlcv_candle_limit(
|
||||||
self._lookback_timeframe, self._config["candle_type_def"]
|
self._lookback_timeframe, self._def_candletype
|
||||||
)
|
)
|
||||||
if self._lookback_period < 0:
|
if self._lookback_period < 0:
|
||||||
raise OperationalException("VolumeFilter requires lookback_period to be >= 0")
|
raise OperationalException("VolumeFilter requires lookback_period to be >= 0")
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ class RangeStabilityFilter(IPairList):
|
|||||||
|
|
||||||
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
||||||
|
|
||||||
candle_limit = self._exchange.ohlcv_candle_limit("1d", self._config["candle_type_def"])
|
candle_limit = self._exchange.ohlcv_candle_limit("1d", self._def_candletype)
|
||||||
if self._days < 1:
|
if self._days < 1:
|
||||||
raise OperationalException("RangeStabilityFilter requires lookback_days to be >= 1")
|
raise OperationalException("RangeStabilityFilter requires lookback_days to be >= 1")
|
||||||
if self._days > candle_limit:
|
if self._days > candle_limit:
|
||||||
|
|||||||
@@ -54,6 +54,7 @@ def __run_pairlist(job_id: str, config_loc: Config):
|
|||||||
|
|
||||||
with FtNoDBContext():
|
with FtNoDBContext():
|
||||||
exchange = get_exchange(config_loc)
|
exchange = get_exchange(config_loc)
|
||||||
|
config_loc["candle_type_def"] = exchange._config["candle_type_def"]
|
||||||
pairlists = PairListManager(exchange, config_loc)
|
pairlists = PairListManager(exchange, config_loc)
|
||||||
pairlists.refresh_pairlist()
|
pairlists.refresh_pairlist()
|
||||||
ApiBG.jobs[job_id]["result"] = {
|
ApiBG.jobs[job_id]["result"] = {
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
-r requirements-freqai-rl.txt
|
-r requirements-freqai-rl.txt
|
||||||
-r docs/requirements-docs.txt
|
-r docs/requirements-docs.txt
|
||||||
|
|
||||||
ruff==0.12.8
|
ruff==0.12.9
|
||||||
mypy==1.17.1
|
mypy==1.17.1
|
||||||
pre-commit==4.3.0
|
pre-commit==4.3.0
|
||||||
pytest==8.4.1
|
pytest==8.4.1
|
||||||
|
|||||||
@@ -7,6 +7,6 @@ scikit-learn==1.7.1
|
|||||||
joblib==1.5.1
|
joblib==1.5.1
|
||||||
catboost==1.2.8; 'arm' not in platform_machine
|
catboost==1.2.8; 'arm' not in platform_machine
|
||||||
lightgbm==4.6.0
|
lightgbm==4.6.0
|
||||||
xgboost==3.0.3
|
xgboost==3.0.4
|
||||||
tensorboard==2.20.0
|
tensorboard==2.20.0
|
||||||
datasieve==0.1.9
|
datasieve==0.1.9
|
||||||
|
|||||||
@@ -4,6 +4,6 @@
|
|||||||
# Required for hyperopt
|
# Required for hyperopt
|
||||||
scipy==1.16.1
|
scipy==1.16.1
|
||||||
scikit-learn==1.7.1
|
scikit-learn==1.7.1
|
||||||
filelock==3.18.0
|
filelock==3.19.1
|
||||||
optuna==4.4.0
|
optuna==4.4.0
|
||||||
cmaes==0.12.0
|
cmaes==0.12.0
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Include all requirements to run the bot.
|
# Include all requirements to run the bot.
|
||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
|
||||||
plotly==6.2.0
|
plotly==6.3.0
|
||||||
|
|||||||
@@ -9,10 +9,10 @@ ft-pandas-ta==0.3.15
|
|||||||
ta-lib==0.6.5
|
ta-lib==0.6.5
|
||||||
technical==1.5.2
|
technical==1.5.2
|
||||||
|
|
||||||
ccxt==4.4.99
|
ccxt==4.5.0
|
||||||
cryptography==45.0.6
|
cryptography==45.0.6
|
||||||
aiohttp==3.12.15
|
aiohttp==3.12.15
|
||||||
SQLAlchemy==2.0.42
|
SQLAlchemy==2.0.43
|
||||||
python-telegram-bot==22.3
|
python-telegram-bot==22.3
|
||||||
# can't be hard-pinned due to telegram-bot pinning httpx with ~
|
# can't be hard-pinned due to telegram-bot pinning httpx with ~
|
||||||
httpx>=0.24.1
|
httpx>=0.24.1
|
||||||
@@ -32,7 +32,7 @@ pyarrow==21.0.0; platform_machine != 'armv7l'
|
|||||||
# Load ticker files 30% faster
|
# Load ticker files 30% faster
|
||||||
python-rapidjson==1.21
|
python-rapidjson==1.21
|
||||||
# Properly format api responses
|
# Properly format api responses
|
||||||
orjson==3.11.1
|
orjson==3.11.2
|
||||||
|
|
||||||
# Notify systemd
|
# Notify systemd
|
||||||
sdnotify==0.3.2
|
sdnotify==0.3.2
|
||||||
|
|||||||
@@ -1862,8 +1862,10 @@ def test_backtesting_show(mocker, testdatadir, capsys):
|
|||||||
sbr = mocker.patch("freqtrade.optimize.optimize_reports.show_backtest_results")
|
sbr = mocker.patch("freqtrade.optimize.optimize_reports.show_backtest_results")
|
||||||
args = [
|
args = [
|
||||||
"backtesting-show",
|
"backtesting-show",
|
||||||
|
"--export-directory",
|
||||||
|
f"{testdatadir / 'backtest_results'}",
|
||||||
"--export-filename",
|
"--export-filename",
|
||||||
f"{testdatadir / 'backtest_results/backtest-result.json'}",
|
"backtest-result.json",
|
||||||
"--show-pair-list",
|
"--show-pair-list",
|
||||||
]
|
]
|
||||||
pargs = get_args(args)
|
pargs = get_args(args)
|
||||||
|
|||||||
@@ -236,7 +236,7 @@ def test_generate_backtest_stats(default_conf, testdatadir, tmp_path):
|
|||||||
filename_last = tmp_path / LAST_BT_RESULT_FN
|
filename_last = tmp_path / LAST_BT_RESULT_FN
|
||||||
_backup_file(filename_last, copy_file=True)
|
_backup_file(filename_last, copy_file=True)
|
||||||
assert not filename.is_file()
|
assert not filename.is_file()
|
||||||
default_conf["exportfilename"] = filename
|
default_conf["exportdirectory"] = filename
|
||||||
|
|
||||||
store_backtest_results(default_conf, stats, "2022_01_01_15_05_13")
|
store_backtest_results(default_conf, stats, "2022_01_01_15_05_13")
|
||||||
|
|
||||||
@@ -263,7 +263,7 @@ def test_store_backtest_results(testdatadir, mocker):
|
|||||||
zip_mock = mocker.patch("freqtrade.optimize.optimize_reports.bt_storage.ZipFile")
|
zip_mock = mocker.patch("freqtrade.optimize.optimize_reports.bt_storage.ZipFile")
|
||||||
data = {"metadata": {}, "strategy": {}, "strategy_comparison": []}
|
data = {"metadata": {}, "strategy": {}, "strategy_comparison": []}
|
||||||
store_backtest_results(
|
store_backtest_results(
|
||||||
{"exportfilename": testdatadir, "original_config": {}}, data, "2022_01_01_15_05_13"
|
{"exportdirectory": testdatadir, "original_config": {}}, data, "2022_01_01_15_05_13"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert dump_mock.call_count == 2
|
assert dump_mock.call_count == 2
|
||||||
@@ -275,7 +275,7 @@ def test_store_backtest_results(testdatadir, mocker):
|
|||||||
zip_mock.reset_mock()
|
zip_mock.reset_mock()
|
||||||
filename = testdatadir / "testresult.json"
|
filename = testdatadir / "testresult.json"
|
||||||
store_backtest_results(
|
store_backtest_results(
|
||||||
{"exportfilename": filename, "original_config": {}}, data, "2022_01_01_15_05_13"
|
{"exportdirectory": filename, "original_config": {}}, data, "2022_01_01_15_05_13"
|
||||||
)
|
)
|
||||||
assert dump_mock.call_count == 2
|
assert dump_mock.call_count == 2
|
||||||
assert zip_mock.call_count == 1
|
assert zip_mock.call_count == 1
|
||||||
@@ -287,7 +287,7 @@ def test_store_backtest_results(testdatadir, mocker):
|
|||||||
def test_store_backtest_results_real(tmp_path, caplog):
|
def test_store_backtest_results_real(tmp_path, caplog):
|
||||||
data = {"metadata": {}, "strategy": {}, "strategy_comparison": []}
|
data = {"metadata": {}, "strategy": {}, "strategy_comparison": []}
|
||||||
config = {
|
config = {
|
||||||
"exportfilename": tmp_path,
|
"exportdirectory": tmp_path,
|
||||||
"original_config": {},
|
"original_config": {},
|
||||||
}
|
}
|
||||||
store_backtest_results(
|
store_backtest_results(
|
||||||
@@ -356,7 +356,7 @@ def test_write_read_backtest_candles(tmp_path):
|
|||||||
bt_results = {"metadata": {}, "strategy": {}, "strategy_comparison": []}
|
bt_results = {"metadata": {}, "strategy": {}, "strategy_comparison": []}
|
||||||
|
|
||||||
mock_conf = {
|
mock_conf = {
|
||||||
"exportfilename": tmp_path,
|
"exportdirectory": tmp_path,
|
||||||
"export": "signals",
|
"export": "signals",
|
||||||
"runmode": "backtest",
|
"runmode": "backtest",
|
||||||
"original_config": {},
|
"original_config": {},
|
||||||
@@ -393,33 +393,6 @@ def test_write_read_backtest_candles(tmp_path):
|
|||||||
|
|
||||||
_clean_test_file(stored_file)
|
_clean_test_file(stored_file)
|
||||||
|
|
||||||
# test file exporting
|
|
||||||
filename = tmp_path / "testresult"
|
|
||||||
mock_conf["exportfilename"] = filename
|
|
||||||
store_backtest_results(mock_conf, bt_results, sample_date, analysis_results=data)
|
|
||||||
stored_file = tmp_path / f"testresult-{sample_date}.zip"
|
|
||||||
signals_pkl = f"testresult-{sample_date}_signals.pkl"
|
|
||||||
rejected_pkl = f"testresult-{sample_date}_rejected.pkl"
|
|
||||||
exited_pkl = f"testresult-{sample_date}_exited.pkl"
|
|
||||||
assert not (tmp_path / signals_pkl).is_file()
|
|
||||||
assert stored_file.is_file()
|
|
||||||
|
|
||||||
with ZipFile(stored_file, "r") as zipf:
|
|
||||||
assert signals_pkl in zipf.namelist()
|
|
||||||
assert rejected_pkl in zipf.namelist()
|
|
||||||
assert exited_pkl in zipf.namelist()
|
|
||||||
|
|
||||||
with zipf.open(signals_pkl) as scp:
|
|
||||||
pickled_signal_candles2 = joblib.load(scp)
|
|
||||||
|
|
||||||
assert pickled_signal_candles2.keys() == candle_dict.keys()
|
|
||||||
assert pickled_signal_candles2["DefStrat"].keys() == pickled_signal_candles2["DefStrat"].keys()
|
|
||||||
assert pickled_signal_candles2["DefStrat"]["UNITTEST/BTC"].equals(
|
|
||||||
pickled_signal_candles2["DefStrat"]["UNITTEST/BTC"]
|
|
||||||
)
|
|
||||||
|
|
||||||
_clean_test_file(stored_file)
|
|
||||||
|
|
||||||
|
|
||||||
def test_generate_pair_metrics():
|
def test_generate_pair_metrics():
|
||||||
results = pd.DataFrame(
|
results = pd.DataFrame(
|
||||||
|
|||||||
@@ -2802,8 +2802,8 @@ def test_api_backtesting(botclient, mocker, fee, caplog, tmp_path):
|
|||||||
ftbot.config["export"] = "trades"
|
ftbot.config["export"] = "trades"
|
||||||
ftbot.config["backtest_cache"] = "day"
|
ftbot.config["backtest_cache"] = "day"
|
||||||
ftbot.config["user_data_dir"] = tmp_path
|
ftbot.config["user_data_dir"] = tmp_path
|
||||||
ftbot.config["exportfilename"] = tmp_path / "backtest_results"
|
ftbot.config["exportdirectory"] = tmp_path / "backtest_results"
|
||||||
ftbot.config["exportfilename"].mkdir()
|
ftbot.config["exportdirectory"].mkdir()
|
||||||
|
|
||||||
# start backtesting
|
# start backtesting
|
||||||
data = {
|
data = {
|
||||||
|
|||||||
Reference in New Issue
Block a user