mirror of
https://github.com/freqtrade/freqtrade.git
synced 2025-11-29 08:33:07 +00:00
Compare commits
288 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b11119dbe5 | ||
|
|
cefc833ab1 | ||
|
|
5c7c3a88fb | ||
|
|
14e2e160af | ||
|
|
000eb875a2 | ||
|
|
33c8969411 | ||
|
|
d90666d2e8 | ||
|
|
f568d63c41 | ||
|
|
78943bf88d | ||
|
|
1b8a42d472 | ||
|
|
faa398053a | ||
|
|
23e4943b3a | ||
|
|
b3ec69fa8a | ||
|
|
dc4aaf0a82 | ||
|
|
d5b9c6dde8 | ||
|
|
bdfd3396cc | ||
|
|
c9256e0d9f | ||
|
|
c7e02b9a7f | ||
|
|
7d5cd998a3 | ||
|
|
96e8319e82 | ||
|
|
12d8fc8c3a | ||
|
|
1ad0e423c9 | ||
|
|
2ef1e203c6 | ||
|
|
959f81f4ac | ||
|
|
3090e47a12 | ||
|
|
02ab0eb139 | ||
|
|
5d8759de78 | ||
|
|
b13b00df17 | ||
|
|
d8b51875bf | ||
|
|
7f12f3a0e4 | ||
|
|
6afcc80937 | ||
|
|
efdb726362 | ||
|
|
3ca7407b09 | ||
|
|
22ba0e61e3 | ||
|
|
6e83890a13 | ||
|
|
8a8cc5e563 | ||
|
|
e1bf3bb825 | ||
|
|
875216cdc5 | ||
|
|
903b580026 | ||
|
|
073b625355 | ||
|
|
b3b21e6b93 | ||
|
|
b8b4b2d2f3 | ||
|
|
0cf1f6dc88 | ||
|
|
502d50c988 | ||
|
|
ed8b8fbf61 | ||
|
|
956398ab21 | ||
|
|
f3154423bd | ||
|
|
124c051432 | ||
|
|
67bcf38ea5 | ||
|
|
a98dd0eea3 | ||
|
|
103f64227f | ||
|
|
5b481009d6 | ||
|
|
3da6a8146e | ||
|
|
a8b313d387 | ||
|
|
574419f3a4 | ||
|
|
5793888113 | ||
|
|
b835af58c5 | ||
|
|
ca0b6998f0 | ||
|
|
77c3894fe2 | ||
|
|
0c46afaee4 | ||
|
|
b6e9609039 | ||
|
|
1b6dadbc87 | ||
|
|
1b18856d0b | ||
|
|
35e9805ef1 | ||
|
|
dcf38557ee | ||
|
|
54d78fd1c6 | ||
|
|
6040c391d3 | ||
|
|
0c6ed588b5 | ||
|
|
45dc667233 | ||
|
|
10d79f6232 | ||
|
|
3c20106e7d | ||
|
|
1e1c9a28f2 | ||
|
|
6a01985fd1 | ||
|
|
be572ba046 | ||
|
|
f711afd843 | ||
|
|
d6f58cd6cf | ||
|
|
9dd1ce71ca | ||
|
|
0a0a8428d5 | ||
|
|
fdc248a1b1 | ||
|
|
bfadd54ea3 | ||
|
|
ed23dc0f72 | ||
|
|
27e3ae8c24 | ||
|
|
f6cb446bff | ||
|
|
72786ca706 | ||
|
|
3e2a799d9f | ||
|
|
841f57800e | ||
|
|
5b6ea5ca90 | ||
|
|
162c1c606c | ||
|
|
de0759c36a | ||
|
|
1dbef58fe9 | ||
|
|
a63280bfa5 | ||
|
|
093a3f0e25 | ||
|
|
f8fc8a7f33 | ||
|
|
f8bf850673 | ||
|
|
d06a531c72 | ||
|
|
b341bc5a8c | ||
|
|
5ac1cf929b | ||
|
|
eca2ca3dc2 | ||
|
|
1d8fd059c6 | ||
|
|
a95972ae3f | ||
|
|
37985dfeea | ||
|
|
768efc45ff | ||
|
|
e6b5233d2f | ||
|
|
6fa1133c61 | ||
|
|
b1adb2b13d | ||
|
|
16378d32d7 | ||
|
|
33105a996e | ||
|
|
a6d76cad39 | ||
|
|
3d1568783e | ||
|
|
194d53acf8 | ||
|
|
82cd343fc1 | ||
|
|
751d98495f | ||
|
|
49b119f1dc | ||
|
|
6ba0eaf593 | ||
|
|
87f07d5f91 | ||
|
|
a24f5d64a1 | ||
|
|
30eb32862c | ||
|
|
6ae5724ad1 | ||
|
|
90a57e7384 | ||
|
|
16379bbcc5 | ||
|
|
05bb7e2fad | ||
|
|
6e98635345 | ||
|
|
de8dd83340 | ||
|
|
b3296b75e9 | ||
|
|
2f9ec970a5 | ||
|
|
568c579cab | ||
|
|
e116b71400 | ||
|
|
a85dc6972d | ||
|
|
feab28de91 | ||
|
|
0aa05855d5 | ||
|
|
44591053b6 | ||
|
|
c11de3ac38 | ||
|
|
b797a5bf2f | ||
|
|
1d9d2fce7f | ||
|
|
0e92372b0b | ||
|
|
68fbab5c32 | ||
|
|
596f6711f3 | ||
|
|
c2296d83c3 | ||
|
|
9a3ada65f5 | ||
|
|
df50e03239 | ||
|
|
9d0f5df549 | ||
|
|
6b41594c53 | ||
|
|
2d194995bc | ||
|
|
904580b914 | ||
|
|
9373779fba | ||
|
|
692bb36c80 | ||
|
|
8de5e2b7b8 | ||
|
|
6185761727 | ||
|
|
6c625c77c0 | ||
|
|
9950ed715e | ||
|
|
9b9ef6ae6d | ||
|
|
8075106286 | ||
|
|
6b033c211e | ||
|
|
8be31275d7 | ||
|
|
a3d3a4de51 | ||
|
|
f1b890954a | ||
|
|
0f2ff70bdf | ||
|
|
4c42bd7100 | ||
|
|
9acf8943ed | ||
|
|
4986f356ee | ||
|
|
ca390eea6a | ||
|
|
cd9c45e875 | ||
|
|
c90ac30077 | ||
|
|
466914d5c7 | ||
|
|
f86f955e24 | ||
|
|
affab24185 | ||
|
|
125126fb86 | ||
|
|
010ed6992b | ||
|
|
d8bbd4054e | ||
|
|
baefaeb311 | ||
|
|
288db16198 | ||
|
|
eb4199fe80 | ||
|
|
9a151b1ecf | ||
|
|
7dddab18c4 | ||
|
|
a85afd9af3 | ||
|
|
d09efa6dd1 | ||
|
|
f817b21d17 | ||
|
|
b7298f472c | ||
|
|
96baa90003 | ||
|
|
fae7198bb4 | ||
|
|
55be046933 | ||
|
|
38fa7068ca | ||
|
|
e5389be209 | ||
|
|
19a997a2db | ||
|
|
eac440649b | ||
|
|
21a47bb1ac | ||
|
|
b839e159a8 | ||
|
|
91df1257e7 | ||
|
|
88f8e831d7 | ||
|
|
627eee5fcf | ||
|
|
26c7752b7c | ||
|
|
1aaf0c2034 | ||
|
|
ac3a5900b4 | ||
|
|
60695fe205 | ||
|
|
2e2f40ca10 | ||
|
|
1f09b90dbe | ||
|
|
9c39b99ec6 | ||
|
|
2e343b9fbd | ||
|
|
087fb31f26 | ||
|
|
3edc442f48 | ||
|
|
b6cf3cc57d | ||
|
|
be2d97b559 | ||
|
|
a3f23fd4fb | ||
|
|
68f32d76ae | ||
|
|
bb596d6f8d | ||
|
|
ca573a828f | ||
|
|
e9cd840f5b | ||
|
|
7894c08b83 | ||
|
|
296c14afc0 | ||
|
|
b2b1518708 | ||
|
|
93967ad8f0 | ||
|
|
6082beb58d | ||
|
|
8309853439 | ||
|
|
2522a3be61 | ||
|
|
19b38a02fa | ||
|
|
4935e23271 | ||
|
|
a4f9a25007 | ||
|
|
795a0b81ee | ||
|
|
e5268a0449 | ||
|
|
c81761e3e2 | ||
|
|
8fbb4e1e60 | ||
|
|
937aa5c70e | ||
|
|
db647ab4d5 | ||
|
|
98652bfd89 | ||
|
|
63010bc5e9 | ||
|
|
dd388a51e0 | ||
|
|
22f109bab1 | ||
|
|
67e2d9c730 | ||
|
|
07fb941758 | ||
|
|
6fca35adae | ||
|
|
ac9b26cc57 | ||
|
|
31aa397492 | ||
|
|
b77739b5ca | ||
|
|
722d5b2319 | ||
|
|
91ace759c5 | ||
|
|
3d8d2fc0c6 | ||
|
|
58154d76ae | ||
|
|
38feb90f9e | ||
|
|
4632839fc5 | ||
|
|
26ea4fdcc9 | ||
|
|
3637d7a54c | ||
|
|
583e20dc9d | ||
|
|
bb08880c4a | ||
|
|
948487518d | ||
|
|
be5d158761 | ||
|
|
4f2681500a | ||
|
|
00237d68e4 | ||
|
|
20325a2b5e | ||
|
|
be40e828a9 | ||
|
|
916ef43f7f | ||
|
|
2b01d2e06b | ||
|
|
0553486e55 | ||
|
|
4fa8c3f9ab | ||
|
|
543c77fe00 | ||
|
|
e77ce8d481 | ||
|
|
285867f8c6 | ||
|
|
b2898cf742 | ||
|
|
31625befd1 | ||
|
|
394535c2e8 | ||
|
|
16576d37b1 | ||
|
|
53bd2d71a0 | ||
|
|
df4b44d09e | ||
|
|
b6b3429b62 | ||
|
|
0adb264c9b | ||
|
|
445c3a67db | ||
|
|
06c4b661f7 | ||
|
|
b8dffe0eb0 | ||
|
|
85772ac7f7 | ||
|
|
3ec72f88e2 | ||
|
|
4f4f927b97 | ||
|
|
46fab55378 | ||
|
|
1d44f75659 | ||
|
|
a4416b885a | ||
|
|
209c2e0ceb | ||
|
|
324aada2bc | ||
|
|
6f58e01f8e | ||
|
|
50d60fad89 | ||
|
|
7286568447 | ||
|
|
3116fd34cf | ||
|
|
f70815dd8e | ||
|
|
47151e77e1 | ||
|
|
5a1f96d35c | ||
|
|
fe03bf7ce0 | ||
|
|
d09160fff8 | ||
|
|
f46c8cdc4a | ||
|
|
db57f83922 | ||
|
|
bbf0ac83a1 | ||
|
|
a9714727b1 |
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -12,7 +12,7 @@ Have you searched for similar issues before posting it?
|
||||
If you have discovered a bug in the bot, please [search the issue tracker](https://github.com/freqtrade/freqtrade/issues?q=is%3Aissue).
|
||||
If it hasn't been reported, please create a new issue.
|
||||
|
||||
Please do not use bug reports to request new features.
|
||||
Please do not use the bug report template to request new features.
|
||||
-->
|
||||
|
||||
## Describe your environment
|
||||
|
||||
7
.github/ISSUE_TEMPLATE/question.md
vendored
7
.github/ISSUE_TEMPLATE/question.md
vendored
@@ -8,9 +8,12 @@ assignees: ''
|
||||
---
|
||||
<!--
|
||||
Have you searched for similar issues before posting it?
|
||||
Did you have a VERY good look at the [documentation](https://www.freqtrade.io/en/latest/) and are sure that the question is not explained there
|
||||
Did you have a VERY good look at the [documentation](https://www.freqtrade.io/) and are sure that the question is not explained there
|
||||
|
||||
Please do not use the question template to report bugs or to request new features.
|
||||
|
||||
Has your strategy or configuration been generated by an AI model, and is now not working?
|
||||
Please consult the documentation. We'll close such issues and point to the documentation.
|
||||
-->
|
||||
|
||||
## Describe your environment
|
||||
@@ -22,4 +25,4 @@ Please do not use the question template to report bugs or to request new feature
|
||||
|
||||
## Your question
|
||||
|
||||
*Ask the question you have not been able to find an answer in the [Documentation](https://www.freqtrade.io/en/latest/)*
|
||||
*Ask the question you have not been able to find an answer in the [Documentation](https://www.freqtrade.io/)*
|
||||
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
run: python build_helpers/binance_update_lev_tiers.py
|
||||
|
||||
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
token: ${{ secrets.REPO_SCOPED_TOKEN }}
|
||||
add-paths: freqtrade/exchange/binance_leverage_tiers.json
|
||||
|
||||
44
.github/workflows/ci.yml
vendored
44
.github/workflows/ci.yml
vendored
@@ -38,8 +38,9 @@ jobs:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
|
||||
with:
|
||||
activate-environment: true
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache-dependency-glob: "requirements**.txt"
|
||||
@@ -73,17 +74,17 @@ jobs:
|
||||
python build_helpers/freqtrade_client_version_align.py
|
||||
|
||||
- name: Tests
|
||||
if: (!(runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-22.04'))
|
||||
if: (!(runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-24.04'))
|
||||
run: |
|
||||
pytest --random-order
|
||||
|
||||
- name: Tests with Coveralls
|
||||
if: (runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-22.04')
|
||||
if: (runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-24.04')
|
||||
run: |
|
||||
pytest --random-order --cov=freqtrade --cov=freqtrade_client --cov-config=.coveragerc
|
||||
|
||||
- name: Coveralls
|
||||
if: (runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-22.04')
|
||||
if: (runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-24.04')
|
||||
env:
|
||||
# Coveralls token. Not used as secret due to github not providing secrets to forked repositories
|
||||
COVERALLS_REPO_TOKEN: 6D1m0xupS3FgutfuGao8keFf9Hc0FpIXu
|
||||
@@ -139,11 +140,12 @@ jobs:
|
||||
ruff format --check
|
||||
|
||||
- name: Mypy
|
||||
if: matrix.os == 'ubuntu-24.04'
|
||||
run: |
|
||||
mypy freqtrade scripts tests
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
|
||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: error
|
||||
@@ -169,8 +171,9 @@ jobs:
|
||||
check-latest: true
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
|
||||
with:
|
||||
activate-environment: true
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache-dependency-glob: "requirements**.txt"
|
||||
@@ -264,11 +267,12 @@ jobs:
|
||||
ruff format --check
|
||||
|
||||
- name: Mypy
|
||||
if: matrix.os == 'macos-15'
|
||||
run: |
|
||||
mypy freqtrade scripts
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
|
||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: info
|
||||
@@ -294,8 +298,9 @@ jobs:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
|
||||
with:
|
||||
activate-environment: true
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache-dependency-glob: "requirements**.txt"
|
||||
@@ -361,7 +366,7 @@ jobs:
|
||||
shell: powershell
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
|
||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: error
|
||||
@@ -395,7 +400,7 @@ jobs:
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- uses: pre-commit/action@v3.0.1
|
||||
- uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1
|
||||
|
||||
docs-check:
|
||||
runs-on: ubuntu-22.04
|
||||
@@ -419,7 +424,7 @@ jobs:
|
||||
mkdocs build
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
|
||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: error
|
||||
@@ -441,8 +446,9 @@ jobs:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
|
||||
with:
|
||||
activate-environment: true
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
cache-dependency-glob: "requirements**.txt"
|
||||
@@ -499,14 +505,14 @@ jobs:
|
||||
|
||||
- name: Check user permission
|
||||
id: check
|
||||
uses: scherermichael-oss/action-has-permission@1.0.6
|
||||
uses: scherermichael-oss/action-has-permission@136e061bfe093832d87f090dd768e14e27a740d3 # 1.0.6
|
||||
with:
|
||||
required-permission: write
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
|
||||
if: always() && steps.check.outputs.has-permission && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: info
|
||||
@@ -578,7 +584,7 @@ jobs:
|
||||
merge-multiple: true
|
||||
|
||||
- name: Publish to PyPI (Test)
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4
|
||||
with:
|
||||
repository-url: https://test.pypi.org/legacy/
|
||||
|
||||
@@ -607,7 +613,7 @@ jobs:
|
||||
merge-multiple: true
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4
|
||||
|
||||
|
||||
deploy-docker:
|
||||
@@ -648,11 +654,11 @@ jobs:
|
||||
docker version -f '{{.Server.Experimental}}'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 #v3.10.0
|
||||
|
||||
- name: Available platforms
|
||||
run: echo ${PLATFORMS}
|
||||
@@ -701,7 +707,7 @@ jobs:
|
||||
build_helpers/publish_docker_arm64.sh
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
|
||||
if: always() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false) && (github.event_name != 'schedule')
|
||||
with:
|
||||
severity: info
|
||||
|
||||
4
.github/workflows/devcontainer-build.yml
vendored
4
.github/workflows/devcontainer-build.yml
vendored
@@ -28,13 +28,13 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Pre-build dev container image
|
||||
uses: devcontainers/ci@v0.3
|
||||
uses: devcontainers/ci@8bf61b26e9c3a98f69cb6ce2f88d24ff59b785c6 # v0.3.19
|
||||
with:
|
||||
subFolder: .github
|
||||
imageName: ghcr.io/${{ github.repository }}-devcontainer
|
||||
|
||||
2
.github/workflows/docker-update-readme.yml
vendored
2
.github/workflows/docker-update-readme.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Docker Hub Description
|
||||
uses: peter-evans/dockerhub-description@v4
|
||||
uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
2
.github/workflows/pre-commit-update.yml
vendored
2
.github/workflows/pre-commit-update.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
- name: Run auto-update
|
||||
run: pre-commit autoupdate
|
||||
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
token: ${{ secrets.REPO_SCOPED_TOKEN }}
|
||||
add-paths: .pre-commit-config.yaml
|
||||
|
||||
@@ -1,8 +1,20 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
|
||||
- repo: local
|
||||
# Keep json schema in sync with the config schema
|
||||
# This will write the files - and fail pre-commit if a file has been changed.
|
||||
hooks:
|
||||
- id: Extract config json schema
|
||||
name: extract-config-json-schema
|
||||
entry: "python build_helpers/extract_config_json_schema.py"
|
||||
language: python
|
||||
pass_filenames: false
|
||||
additional_dependencies: ["python-rapidjson", "jsonschema"]
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: "7.1.2"
|
||||
rev: "7.2.0"
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [Flake8-pyproject]
|
||||
@@ -16,10 +28,10 @@ repos:
|
||||
additional_dependencies:
|
||||
- types-cachetools==5.5.0.20240820
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.32.0.20250306
|
||||
- types-requests==2.32.0.20250328
|
||||
- types-tabulate==0.9.0.20241207
|
||||
- types-python-dateutil==2.9.0.20241206
|
||||
- SQLAlchemy==2.0.39
|
||||
- SQLAlchemy==2.0.40
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
@@ -31,7 +43,7 @@ repos:
|
||||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.11.2'
|
||||
rev: 'v0.11.7'
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
@@ -70,6 +82,6 @@ repos:
|
||||
|
||||
# Ensure github actions remain safe
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.5.2
|
||||
rev: v1.6.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12.9-slim-bookworm as base
|
||||
FROM python:3.12.10-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
@@ -12,7 +12,12 @@ secret = os.environ.get("FREQTRADE__EXCHANGE__SECRET")
|
||||
proxy = os.environ.get("CI_WEB_PROXY")
|
||||
|
||||
exchange = ccxt.binance(
|
||||
{"apiKey": key, "secret": secret, "httpsProxy": proxy, "options": {"defaultType": "swap"}}
|
||||
{
|
||||
"apiKey": key,
|
||||
"secret": secret,
|
||||
"httpsProxy": proxy,
|
||||
"options": {"defaultType": "swap"},
|
||||
}
|
||||
)
|
||||
_ = exchange.load_markets()
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import subprocess
|
||||
import subprocess # noqa: S404, RUF100
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
|
||||
@@ -4,10 +4,23 @@ from pathlib import Path
|
||||
|
||||
import rapidjson
|
||||
|
||||
from freqtrade.configuration.config_schema import CONF_SCHEMA
|
||||
|
||||
|
||||
def extract_config_json_schema():
|
||||
try:
|
||||
# Try to import from the installed package
|
||||
from freqtrade.config_schema import CONF_SCHEMA
|
||||
except ImportError:
|
||||
# If freqtrade is not installed, add the parent directory to sys.path
|
||||
# to import directly from the source
|
||||
import sys
|
||||
|
||||
script_dir = Path(__file__).parent
|
||||
freqtrade_dir = script_dir.parent
|
||||
sys.path.insert(0, str(freqtrade_dir))
|
||||
|
||||
# Now try to import from the source
|
||||
from freqtrade.config_schema import CONF_SCHEMA
|
||||
|
||||
schema_filename = Path(__file__).parent / "schema.json"
|
||||
with schema_filename.open("w") as f:
|
||||
rapidjson.dump(CONF_SCHEMA, f, indent=2)
|
||||
|
||||
Binary file not shown.
@@ -1032,6 +1032,7 @@
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"running",
|
||||
"paused",
|
||||
"stopped"
|
||||
]
|
||||
},
|
||||
@@ -1515,6 +1516,14 @@
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"indicator_periods_candles": {
|
||||
"description": "Time periods to calculate indicators for. The indicators are added to the base indicator dataset.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "number",
|
||||
"minimum": 1
|
||||
}
|
||||
},
|
||||
"use_SVM_to_remove_outliers": {
|
||||
"description": "Use SVM to remove outliers from the features.",
|
||||
"type": "boolean",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11.11-slim-bookworm as base
|
||||
FROM python:3.11.12-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
@@ -177,7 +177,7 @@ sudo loginctl enable-linger "$USER"
|
||||
If you run the bot as a service, you can use systemd service manager as a software watchdog monitoring freqtrade bot
|
||||
state and restarting it in the case of failures. If the `internals.sd_notify` parameter is set to true in the
|
||||
configuration or the `--sd-notify` command line option is used, the bot will send keep-alive ping messages to systemd
|
||||
using the sd_notify (systemd notifications) protocol and will also tell systemd its current state (Running or Stopped)
|
||||
using the sd_notify (systemd notifications) protocol and will also tell systemd its current state (Running, Paused or Stopped)
|
||||
when it changes.
|
||||
|
||||
The `freqtrade.service.watchdog` file contains an example of the service unit configuration file which uses systemd
|
||||
@@ -189,13 +189,15 @@ as the watchdog.
|
||||
## Advanced Logging
|
||||
|
||||
Freqtrade uses the default logging module provided by python.
|
||||
Python allows for extensive [logging configuration](https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig) in this regards - way more than what can be covered here.
|
||||
Python allows for extensive [logging configuration](https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig) in this regard - way more than what can be covered here.
|
||||
|
||||
Default logging (Colored terminal output) is setup by default if no `log_config` is provided.
|
||||
Default logging format (coloured terminal output) is set up by default if no `log_config` is provided in your freqtrade configuration.
|
||||
Using `--logfile logfile.log` will enable the RotatingFileHandler.
|
||||
If you're not content with the log format - or with the default settings provided for the RotatingFileHandler, you can customize logging to your liking.
|
||||
|
||||
The default configuration looks roughly like the below - with the file handler being provided - but not enabled.
|
||||
If you're not content with the log format, or with the default settings provided for the RotatingFileHandler, you can customize logging to your liking by adding the `log_config` configuration to your freqtrade configuration file(s).
|
||||
|
||||
The default configuration looks roughly like the below, with the file handler being provided but not enabled as the `filename` is commented out.
|
||||
Uncomment this line and supply a valid path/filename to enable it.
|
||||
|
||||
``` json hl_lines="5-7 13-16 27"
|
||||
{
|
||||
@@ -237,12 +239,12 @@ The default configuration looks roughly like the below - with the file handler b
|
||||
Highlighted lines in the above code-block define the Rich handler and belong together.
|
||||
The formatter "standard" and "file" will belong to the FileHandler.
|
||||
|
||||
Each handler must use one of the defined formatters (by name) - and it's class must be available and a valid logging class.
|
||||
To actually use a handler - it must be in the "handlers" section inside the "root" segment.
|
||||
Each handler must use one of the defined formatters (by name), its class must be available, and must be a valid logging class.
|
||||
To actually use a handler, it must be in the "handlers" section inside the "root" segment.
|
||||
If this section is left out, freqtrade will provide no output (in the non-configured handler, anyway).
|
||||
|
||||
!!! Tip "Explicit log configuration"
|
||||
We recommend to extract the logging configuration from your main configuration, and provide it to your bot via [multiple configuration files](configuration.md#multiple-configuration-files) functionality. This will avoid unnecessary code duplication.
|
||||
We recommend to extract the logging configuration from your main freqtrade configuration file, and provide it to your bot via [multiple configuration files](configuration.md#multiple-configuration-files) functionality. This will avoid unnecessary code duplication.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -59,7 +59,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -30,7 +30,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -89,7 +89,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -45,7 +45,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -34,7 +34,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -63,7 +63,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -50,7 +50,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -55,7 +55,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -37,7 +37,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -79,6 +79,7 @@ options:
|
||||
SortinoHyperOptLoss, SortinoHyperOptLossDaily,
|
||||
CalmarHyperOptLoss, MaxDrawDownHyperOptLoss,
|
||||
MaxDrawDownRelativeHyperOptLoss,
|
||||
MaxDrawDownPerPairHyperOptLoss,
|
||||
ProfitDrawDownHyperOptLoss, MultiMetricHyperOptLoss
|
||||
--disable-param-export
|
||||
Disable automatic hyperopt parameter export.
|
||||
@@ -102,7 +103,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
```
|
||||
usage: freqtrade install-ui [-h] [--erase] [--ui-version UI_VERSION]
|
||||
usage: freqtrade install-ui [-h] [--erase] [--prerelease]
|
||||
[--ui-version UI_VERSION]
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--erase Clean UI folder, don't download new version.
|
||||
--prerelease Install the latest pre-release version of FreqUI. This
|
||||
is not recommended for production use.
|
||||
--ui-version UI_VERSION
|
||||
Specify a specific version of FreqUI to install. Not
|
||||
specifying this installs the latest version.
|
||||
|
||||
@@ -41,7 +41,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -22,7 +22,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -24,7 +24,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -24,7 +24,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -39,7 +39,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -39,7 +39,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -27,7 +27,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -23,7 +23,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -89,7 +89,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -63,7 +63,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -49,7 +49,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -41,7 +41,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -30,7 +30,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -34,7 +34,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -36,7 +36,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -41,7 +41,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -20,7 +20,9 @@ Common arguments:
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
Path to the base directory of the exchange with
|
||||
historical backtesting data. To see futures data, use
|
||||
trading-mode additionally.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
@@ -205,7 +205,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
||||
| `exit_pricing.use_order_book` | Enable exiting of open trades using [Order Book Exit](#exit-price-with-orderbook-enabled). <br> *Defaults to `true`.*<br> **Datatype:** Boolean
|
||||
| `exit_pricing.order_book_top` | Bot will use the top N rate in Order Book "price_side" to exit. I.e. a value of 2 will allow the bot to pick the 2nd ask rate in [Order Book Exit](#exit-price-with-orderbook-enabled)<br>*Defaults to `1`.* <br> **Datatype:** Positive Integer
|
||||
| `custom_price_max_distance_ratio` | Configure maximum distance ratio between current and custom entry or exit price. <br>*Defaults to `0.02` 2%).*<br> **Datatype:** Positive float
|
||||
| | **TODO**
|
||||
| | **Order/Signal handling**
|
||||
| `use_exit_signal` | Use exit signals produced by the strategy in addition to the `minimal_roi`. <br>Setting this to false disables the usage of `"exit_long"` and `"exit_short"` columns. Has no influence on other exit methods (Stoploss, ROI, callbacks). [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `true`.* <br> **Datatype:** Boolean
|
||||
| `exit_profit_only` | Wait until the bot reaches `exit_profit_offset` before taking an exit decision. [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
|
||||
| `exit_profit_offset` | Exit-signal is only active above this value. Only active in combination with `exit_profit_only=True`. [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `0.0`.* <br> **Datatype:** Float (as ratio)
|
||||
@@ -266,7 +266,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
||||
| `bot_name` | Name of the bot. Passed via API to a client - can be shown to distinguish / name bots.<br> *Defaults to `freqtrade`*<br> **Datatype:** String
|
||||
| `external_message_consumer` | Enable [Producer/Consumer mode](producer-consumer.md) for more details. <br> **Datatype:** Dict
|
||||
| | **Other**
|
||||
| `initial_state` | Defines the initial application state. If set to stopped, then the bot has to be explicitly started via `/start` RPC command. <br>*Defaults to `stopped`.* <br> **Datatype:** Enum, either `stopped` or `running`
|
||||
| `initial_state` | Defines the initial application state. If set to stopped, then the bot has to be explicitly started via `/start` RPC command. <br>*Defaults to `stopped`.* <br> **Datatype:** Enum, either `running`, `paused` or `stopped`
|
||||
| `force_entry_enable` | Enables the RPC Commands to force a Trade entry. More information below. <br> **Datatype:** Boolean
|
||||
| `disable_dataframe_checks` | Disable checking the OHLCV dataframe returned from the strategy methods for correctness. Only use when intentionally changing the dataframe and understand what you are doing. [Strategy Override](#parameters-in-the-strategy).<br> *Defaults to `False`*. <br> **Datatype:** Boolean
|
||||
| `internals.process_throttle_secs` | Set the process throttle, or minimum loop duration for one bot iteration loop. Value in second. <br>*Defaults to `5` seconds.* <br> **Datatype:** Positive Integer
|
||||
@@ -281,7 +281,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
||||
| `add_config_files` | Additional config files. These files will be loaded and merged with the current config file. The files are resolved relative to the initial file.<br> *Defaults to `[]`*. <br> **Datatype:** List of strings
|
||||
| `dataformat_ohlcv` | Data format to use to store historical candle (OHLCV) data. <br> *Defaults to `feather`*. <br> **Datatype:** String
|
||||
| `dataformat_trades` | Data format to use to store historical trades data. <br> *Defaults to `feather`*. <br> **Datatype:** String
|
||||
| `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage (and decreasing train/inference timing in FreqAI). (Currently only affects FreqAI use-cases) <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||
| `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage (and decreasing train/inference timing backtesting/hyperopt and in FreqAI). <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||
| `log_config` | Dictionary containing the log config for python logging. [more info](advanced-setup.md#advanced-logging) <br> **Datatype:** dict. <br> Default: `FtRichHandler`
|
||||
|
||||
### Parameters in the strategy
|
||||
|
||||
@@ -363,6 +363,10 @@ Hyperliquid handles deposits and withdrawals on the Arbitrum One chain, a Layer
|
||||
* Create a different software wallet, only transfer the funds you want to trade with to that wallet, and use that wallet to trade on Hyperliquid.
|
||||
* If you have funds you don't want to use for trading (after making a profit for example), transfer them back to your hardware wallet.
|
||||
|
||||
### Historic Hyperliquid data
|
||||
|
||||
The Hyperliquid API does not provide historic data beyond the single call to fetch current data, so downloading data is not possible, as the downloaded data would not constitute proper historic data.
|
||||
|
||||
## All exchanges
|
||||
|
||||
Should you experience constant errors with Nonce (like `InvalidNonce`), it is best to regenerate the API keys. Resetting Nonce is difficult and it's usually easier to regenerate the API keys.
|
||||
|
||||
@@ -258,6 +258,8 @@ freqtrade trade --config config_examples/config_freqai.example.json --strategy F
|
||||
We do provide an explicit docker-compose file for this in `docker/docker-compose-freqai.yml` - which can be used via `docker compose -f docker/docker-compose-freqai.yml run ...` - or can be copied to replace the original docker file.
|
||||
This docker-compose file also contains a (disabled) section to enable GPU resources within docker containers. This obviously assumes the system has GPU resources available.
|
||||
|
||||
PyTorch dropped support for macOS x64 (intel based Apple devices) in version 2.3. Subsequently, freqtrade also dropped support for PyTorch on this platform.
|
||||
|
||||
### Structure
|
||||
|
||||
#### Model
|
||||
|
||||
@@ -471,6 +471,7 @@ Currently, the following loss functions are builtin:
|
||||
* `SortinoHyperOptLossDaily` - optimizes Sortino Ratio calculated on **daily** trade returns relative to **downside** standard deviation.
|
||||
* `MaxDrawDownHyperOptLoss` - Optimizes Maximum absolute drawdown.
|
||||
* `MaxDrawDownRelativeHyperOptLoss` - Optimizes both maximum absolute drawdown while also adjusting for maximum relative drawdown.
|
||||
* `MaxDrawDownPerPairHyperOptLoss` - Calculates the profit/drawdown ratio per pair and returns the worst result as objective, forcing hyperopt to optimize the parameters for all pairs in the pairlist. This way, we prevent one or more pairs with good results from inflating the metrics, while the pairs with poor results are not represented and therefore not optimized.
|
||||
* `CalmarHyperOptLoss` - Optimizes Calmar Ratio calculated on trade returns relative to max drawdown.
|
||||
* `ProfitDrawDownHyperOptLoss` - Optimizes by max Profit & min Drawdown objective. `DRAWDOWN_MULT` variable within the hyperoptloss file can be adjusted to be stricter or more flexible on drawdown purposes.
|
||||
* `MultiMetricHyperOptLoss` - Optimizes by several key metrics to achieve balanced performance. The primary focus is on maximizing Profit and minimizing Drawdown, while also considering additional metrics such as Profit Factor, Expectancy Ratio and Winrate. Moreover, it applies a penalty for epochs with a low number of trades, encouraging strategies with adequate trade frequency.
|
||||
|
||||
@@ -3,7 +3,7 @@ This section will highlight a few projects from members of the community.
|
||||
The projects below are for the most part not maintained by the freqtrade , therefore use your own caution before using them.
|
||||
|
||||
- [Example freqtrade strategies](https://github.com/freqtrade/freqtrade-strategies/)
|
||||
- [FrequentHippo - Grafana dashboard with dry/live runs and backtests](http://frequenthippo.ddns.net:3000/) (by hippocritical).
|
||||
- [FrequentHippo - Statistics of dry/live runs and backtests](http://frequenthippo.ddns.net) (by hippocritical).
|
||||
- [Online pairlist generator](https://remotepairlist.com/) (by Blood4rc).
|
||||
- [Freqtrade Backtesting Project](https://strat.ninja/) (by Blood4rc).
|
||||
- [Freqtrade analysis notebook](https://github.com/froggleston/freqtrade_analysis_notebook) (by Froggleston).
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
markdown==3.7
|
||||
markdown==3.8
|
||||
mkdocs==1.6.1
|
||||
mkdocs-material==9.6.9
|
||||
mkdocs-material==9.6.12
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.14.3
|
||||
pymdown-extensions==10.15
|
||||
jinja2==3.1.6
|
||||
mike==2.1.3
|
||||
|
||||
@@ -268,6 +268,9 @@ show_config
|
||||
start
|
||||
Start the bot if it's in the stopped state.
|
||||
|
||||
pause
|
||||
Pause the bot if it's in the running state. If triggered on stopped state will handle open positions.
|
||||
|
||||
stats
|
||||
Return the stats report (durations, sell-reasons).
|
||||
|
||||
@@ -333,6 +336,7 @@ All endpoints in the below table need to be prefixed with the base URL of the AP
|
||||
|-----------|--------|--------------------------|
|
||||
| `/ping` | GET | Simple command testing the API Readiness - requires no authentication.
|
||||
| `/start` | POST | Starts the trader.
|
||||
| `/pause` | POST | Pause the trader. Gracefully handle open trades according to their rules. Do not enter new positions.
|
||||
| `/stop` | POST | Stops the trader.
|
||||
| `/stopbuy` | POST | Stops the trader from opening new trades. Gracefully closes open trades according to their rules.
|
||||
| `/reload_config` | POST | Reloads the configuration file.
|
||||
|
||||
@@ -165,18 +165,23 @@ If there is any significant difference, verify that your entry and exit signals
|
||||
|
||||
## Controlling or monitoring a running bot
|
||||
|
||||
Once your bot is running in dry or live mode, Freqtrade has five mechanisms to control or monitor a running bot:
|
||||
Once your bot is running in dry or live mode, Freqtrade has six mechanisms to control or monitor a running bot:
|
||||
|
||||
- **[FreqUI](freq-ui.md)**: The easiest to get started with, FreqUI is a web interface to see and control current activity of your bot.
|
||||
- **[Telegram](telegram-usage.md)**: On mobile devices, Telegram integration is available to get alerts about your bot activity and to control certain aspects.
|
||||
- **[FTUI](https://github.com/freqtrade/ftui)**: FTUI is a terminal (command line) interface to Freqtrade, and allows monitoring of a running bot only.
|
||||
- **[REST API](rest-api.md)**: The REST API allows programmers to develop their own tools to interact with a Freqtrade bot.
|
||||
- **[freqtrade-client](rest-api.md#consuming-the-api)**: A python implementation of the REST API, making it easy to make requests and consume bot responses from your python apps or the command line.
|
||||
- **[REST API endpoints](rest-api.md#available-endpoints)**: The REST API allows programmers to develop their own tools to interact with a Freqtrade bot.
|
||||
- **[Webhooks](webhook-config.md)**: Freqtrade can send information to other services, e.g. discord, by webhooks.
|
||||
|
||||
### Logs
|
||||
|
||||
Freqtrade generates extensive debugging logs to help you understand what's happening. Please familiarise yourself with the information and error messages you might see in your bot logs.
|
||||
|
||||
Logging by default occurs on standard out (the command line). If you want to write out to a file instead, many freqtrade commands, including the `trade` command, accept the `--logfile` option to write to a file.
|
||||
|
||||
Check the [FAQ](faq.md#how-do-i-search-the-bot-logs-for-something) for examples.
|
||||
|
||||
## Final Thoughts
|
||||
|
||||
Algo trading is difficult, and most public strategies are not good performers due to the time and effort to make a strategy work profitably in multiple scenarios.
|
||||
|
||||
@@ -188,7 +188,7 @@ You can create your own keyboard in `config.json`:
|
||||
!!! Note "Supported Commands"
|
||||
Only the following commands are allowed. Command arguments are not supported!
|
||||
|
||||
`/start`, `/stop`, `/status`, `/status table`, `/trades`, `/profit`, `/performance`, `/daily`, `/stats`, `/count`, `/locks`, `/balance`, `/stopentry`, `/reload_config`, `/show_config`, `/logs`, `/whitelist`, `/blacklist`, `/edge`, `/help`, `/version`, `/marketdir`
|
||||
`/start`, `/pause`, `/stop`, `/status`, `/status table`, `/trades`, `/profit`, `/performance`, `/daily`, `/stats`, `/count`, `/locks`, `/balance`, `/stopentry`, `/reload_config`, `/show_config`, `/logs`, `/whitelist`, `/blacklist`, `/edge`, `/help`, `/version`, `/marketdir`
|
||||
|
||||
## Telegram commands
|
||||
|
||||
@@ -200,8 +200,8 @@ official commands. You can ask at any moment for help with `/help`.
|
||||
|----------|-------------|
|
||||
| **System commands**
|
||||
| `/start` | Starts the trader
|
||||
| `/pause | /stopentry | /stopbuy` | Pause the trader. Gracefully handle open trades according to their rules. Do not enter new positions.
|
||||
| `/stop` | Stops the trader
|
||||
| `/stopbuy | /stopentry` | Stops the trader from opening new trades. Gracefully closes open trades according to their rules.
|
||||
| `/reload_config` | Reloads the configuration file
|
||||
| `/show_config` | Shows part of the current configuration with relevant settings to operation
|
||||
| `/logs [limit]` | Show last log messages.
|
||||
@@ -250,25 +250,27 @@ Below, example of Telegram message you will receive for each command.
|
||||
|
||||
> **Status:** `running`
|
||||
|
||||
### /pause | /stopentry | /stopbuy
|
||||
|
||||
> **Status:** `paused, no more entries will occur from now. Run /start to enable entries.`
|
||||
|
||||
Prevents the bot from opening new trades by changing the state to `paused`.
|
||||
Open trades will continue to be managed according to their regular rules (ROI/exit signals, stop-loss, etc.).
|
||||
Note that position adjustment remains active, but only on the exit side — meaning that when the bot is `paused`, it can only reduce the position size of open trades.
|
||||
|
||||
After this, give the bot time to close off open trades (can be checked via `/status table`).
|
||||
Once all positions are closed, run `/stop` to completely stop the bot.
|
||||
|
||||
Use `/start` to resume the bot to the `running` state, allowing it to open new positions.
|
||||
|
||||
!!! Warning
|
||||
The pause/stopentry signal is ONLY active while the bot is running, and is not persisted anyway, so restarting the bot will cause this to reset.
|
||||
|
||||
### /stop
|
||||
|
||||
> `Stopping trader ...`
|
||||
> **Status:** `stopped`
|
||||
|
||||
### /stopbuy
|
||||
|
||||
> **status:** `Setting max_open_trades to 0. Run /reload_config to reset.`
|
||||
|
||||
Prevents the bot from opening new trades by temporarily setting "max_open_trades" to 0. Open trades will be handled via their regular rules (ROI / Sell-signal, stoploss, ...).
|
||||
|
||||
After this, give the bot time to close off open trades (can be checked via `/status table`).
|
||||
Once all positions are sold, run `/stop` to completely stop the bot.
|
||||
|
||||
`/reload_config` resets "max_open_trades" to the value set in the configuration and resets this command.
|
||||
|
||||
!!! Warning
|
||||
The stop-buy signal is ONLY active while the bot is running, and is not persisted anyway, so restarting the bot will cause this to reset.
|
||||
|
||||
### /status
|
||||
|
||||
For each open trade, the bot will send you the following message.
|
||||
|
||||
@@ -25,6 +25,7 @@ The following attributes / properties are available for each individual trade -
|
||||
| `close_date_utc` | datetime | Timestamp when trade was closed - in UTC. |
|
||||
| `close_profit` | float | Relative profit at the time of trade closure. `0.01` == 1% |
|
||||
| `close_profit_abs` | float | Absolute profit (in stake currency) at the time of trade closure. |
|
||||
| `realized_profit` | float | Absolute already realized profit (in stake currency) while the trade is still open. |
|
||||
| `leverage` | float | Leverage used for this trade - defaults to 1.0 in spot markets. |
|
||||
| `enter_tag` | string | Tag provided on entry via the `enter_tag` column in the dataframe. |
|
||||
| `is_short` | boolean | True for short trades, False otherwise. |
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
"""Freqtrade bot"""
|
||||
|
||||
__version__ = "2025.3"
|
||||
__version__ = "2025.4"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import subprocess # noqa: S404
|
||||
import subprocess # noqa: S404, RUF100
|
||||
|
||||
freqtrade_basedir = Path(__file__).parent
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@ ARGS_COMMON = [
|
||||
"user_data_dir",
|
||||
]
|
||||
|
||||
ARGS_MAIN = ["version_main"]
|
||||
|
||||
ARGS_STRATEGY = [
|
||||
"strategy",
|
||||
"strategy_path",
|
||||
@@ -43,7 +45,8 @@ ARGS_COMMON_OPTIMIZE = [
|
||||
"pairs",
|
||||
]
|
||||
|
||||
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + [
|
||||
ARGS_BACKTEST = [
|
||||
*ARGS_COMMON_OPTIMIZE,
|
||||
"position_stacking",
|
||||
"enable_protections",
|
||||
"dry_run_wallet",
|
||||
@@ -56,7 +59,8 @@ ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + [
|
||||
"freqai_backtest_live_models",
|
||||
]
|
||||
|
||||
ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + [
|
||||
ARGS_HYPEROPT = [
|
||||
*ARGS_COMMON_OPTIMIZE,
|
||||
"hyperopt",
|
||||
"hyperopt_path",
|
||||
"position_stacking",
|
||||
@@ -76,7 +80,7 @@ ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + [
|
||||
"analyze_per_epoch",
|
||||
]
|
||||
|
||||
ARGS_EDGE = ARGS_COMMON_OPTIMIZE + ["stoploss_range"]
|
||||
ARGS_EDGE = [*ARGS_COMMON_OPTIMIZE, "stoploss_range"]
|
||||
|
||||
ARGS_LIST_STRATEGIES = [
|
||||
"strategy_path",
|
||||
@@ -125,7 +129,7 @@ ARGS_BUILD_STRATEGY = ["user_data_dir", "strategy", "strategy_path", "template"]
|
||||
|
||||
ARGS_CONVERT_DATA_TRADES = ["pairs", "format_from_trades", "format_to", "erase", "exchange"]
|
||||
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase", "exchange"]
|
||||
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes", "trading_mode", "candle_types"]
|
||||
ARGS_CONVERT_DATA_OHLCV = [*ARGS_CONVERT_DATA, "timeframes", "trading_mode", "candle_types"]
|
||||
|
||||
ARGS_CONVERT_TRADES = [
|
||||
"pairs",
|
||||
@@ -191,7 +195,7 @@ ARGS_PLOT_PROFIT = [
|
||||
|
||||
ARGS_CONVERT_DB = ["db_url", "db_url_from"]
|
||||
|
||||
ARGS_INSTALL_UI = ["erase_ui_only", "ui_version"]
|
||||
ARGS_INSTALL_UI = ["erase_ui_only", "ui_prerelease", "ui_version"]
|
||||
|
||||
ARGS_SHOW_TRADES = ["db_url", "trade_ids", "print_json"]
|
||||
|
||||
@@ -347,7 +351,7 @@ class Arguments:
|
||||
self.parser = ArgumentParser(
|
||||
prog="freqtrade", description="Free, open source crypto trading bot"
|
||||
)
|
||||
self._build_args(optionlist=["version_main"], parser=self.parser)
|
||||
self._build_args(optionlist=ARGS_MAIN, parser=self.parser)
|
||||
|
||||
from freqtrade.commands import (
|
||||
start_analysis_entries_exits,
|
||||
|
||||
@@ -83,7 +83,8 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
"-d",
|
||||
"--datadir",
|
||||
"--data-dir",
|
||||
help="Path to directory with historical backtesting data.",
|
||||
help="Path to the base directory of the exchange with historical backtesting data. "
|
||||
"To see futures data, use trading-mode additionally.",
|
||||
metavar="PATH",
|
||||
),
|
||||
"user_data_dir": Arg(
|
||||
@@ -463,7 +464,7 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
"format_from_trades": Arg(
|
||||
"--format-from",
|
||||
help="Source format for data conversion.",
|
||||
choices=constants.AVAILABLE_DATAHANDLERS + ["kraken_csv"],
|
||||
choices=[*constants.AVAILABLE_DATAHANDLERS, "kraken_csv"],
|
||||
required=True,
|
||||
),
|
||||
"format_from": Arg(
|
||||
@@ -527,6 +528,15 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
),
|
||||
type=str,
|
||||
),
|
||||
"ui_prerelease": Arg(
|
||||
"--prerelease",
|
||||
help=(
|
||||
"Install the latest pre-release version of FreqUI. "
|
||||
"This is not recommended for production use."
|
||||
),
|
||||
action="store_true",
|
||||
default=False,
|
||||
),
|
||||
# Templating options
|
||||
"template": Arg(
|
||||
"--template",
|
||||
|
||||
@@ -23,8 +23,8 @@ def start_create_userdir(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
from freqtrade.configuration.directory_operations import copy_sample_files, create_userdata_dir
|
||||
|
||||
if "user_data_dir" in args and args["user_data_dir"]:
|
||||
userdir = create_userdata_dir(args["user_data_dir"], create_dir=True)
|
||||
if user_data_dir := args.get("user_data_dir"):
|
||||
userdir = create_userdata_dir(user_data_dir, create_dir=True)
|
||||
copy_sample_files(userdir, overwrite=args["reset"])
|
||||
else:
|
||||
logger.warning("`create-userdir` requires --userdir to be set.")
|
||||
@@ -85,22 +85,22 @@ def start_new_strategy(args: dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
if "strategy" in args and args["strategy"]:
|
||||
if "strategy_path" in args and args["strategy_path"]:
|
||||
strategy_dir = Path(args["strategy_path"])
|
||||
if strategy := args.get("strategy"):
|
||||
if strategy_path := args.get("strategy_path"):
|
||||
strategy_dir = Path(strategy_path)
|
||||
else:
|
||||
strategy_dir = config["user_data_dir"] / USERPATH_STRATEGIES
|
||||
if not strategy_dir.is_dir():
|
||||
logger.info(f"Creating strategy directory {strategy_dir}")
|
||||
strategy_dir.mkdir(parents=True)
|
||||
new_path = strategy_dir / (args["strategy"] + ".py")
|
||||
new_path = strategy_dir / (strategy + ".py")
|
||||
|
||||
if new_path.exists():
|
||||
raise OperationalException(
|
||||
f"`{new_path}` already exists. Please choose another Strategy Name."
|
||||
)
|
||||
|
||||
deploy_new_strategy(args["strategy"], new_path, args["template"])
|
||||
deploy_new_strategy(strategy, new_path, args["template"])
|
||||
|
||||
else:
|
||||
raise ConfigurationError("`new-strategy` requires --strategy to be set.")
|
||||
@@ -116,7 +116,9 @@ def start_install_ui(args: dict[str, Any]) -> None:
|
||||
|
||||
dest_folder = Path(__file__).parents[1] / "rpc/api_server/ui/installed/"
|
||||
# First make sure the assets are removed.
|
||||
dl_url, latest_version = get_ui_download_url(args.get("ui_version"))
|
||||
dl_url, latest_version = get_ui_download_url(
|
||||
args.get("ui_version"), args.get("ui_prerelease", False)
|
||||
)
|
||||
|
||||
curr_version = read_ui_version(dest_folder)
|
||||
if curr_version == latest_version and not args.get("erase_ui_only"):
|
||||
|
||||
@@ -51,7 +51,7 @@ def download_and_install_ui(dest_folder: Path, dl_url: str, version: str):
|
||||
f.write(version)
|
||||
|
||||
|
||||
def get_ui_download_url(version: str | None = None) -> tuple[str, str]:
|
||||
def get_ui_download_url(version: str | None, prerelease: bool) -> tuple[str, str]:
|
||||
base_url = "https://api.github.com/repos/freqtrade/frequi/"
|
||||
# Get base UI Repo path
|
||||
|
||||
@@ -61,14 +61,18 @@ def get_ui_download_url(version: str | None = None) -> tuple[str, str]:
|
||||
|
||||
if version:
|
||||
tmp = [x for x in r if x["name"] == version]
|
||||
if tmp:
|
||||
latest_version = tmp[0]["name"]
|
||||
assets = tmp[0].get("assets", [])
|
||||
else:
|
||||
raise ValueError("UI-Version not found.")
|
||||
else:
|
||||
latest_version = r[0]["name"]
|
||||
assets = r[0].get("assets", [])
|
||||
tmp = [x for x in r if prerelease or not x.get("prerelease")]
|
||||
|
||||
if tmp:
|
||||
# Ensure we have the latest version
|
||||
if version is None:
|
||||
tmp.sort(key=lambda x: x["created_at"], reverse=True)
|
||||
latest_version = tmp[0]["name"]
|
||||
assets = tmp[0].get("assets", [])
|
||||
else:
|
||||
raise ValueError("UI-Version not found.")
|
||||
|
||||
dl_url = ""
|
||||
if assets and len(assets) > 0:
|
||||
dl_url = assets[0]["browser_download_url"]
|
||||
|
||||
@@ -5,7 +5,6 @@ from typing import Any
|
||||
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.ft_types import ValidExchangesType
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -21,6 +20,7 @@ def start_list_exchanges(args: dict[str, Any]) -> None:
|
||||
from rich.text import Text
|
||||
|
||||
from freqtrade.exchange import list_available_exchanges
|
||||
from freqtrade.ft_types import ValidExchangesType
|
||||
from freqtrade.loggers.rich_console import get_rich_console
|
||||
|
||||
available_exchanges: list[ValidExchangesType] = list_available_exchanges(
|
||||
|
||||
4
freqtrade/config_schema/__init__.py
Normal file
4
freqtrade/config_schema/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from freqtrade.config_schema.config_schema import CONF_SCHEMA
|
||||
|
||||
|
||||
__all__ = ["CONF_SCHEMA"]
|
||||
@@ -689,7 +689,7 @@ CONF_SCHEMA = {
|
||||
"initial_state": {
|
||||
"description": "Initial state of the system.",
|
||||
"type": "string",
|
||||
"enum": ["running", "stopped"],
|
||||
"enum": ["running", "paused", "stopped"],
|
||||
},
|
||||
"force_entry_enable": {
|
||||
"description": "Force enable entry.",
|
||||
@@ -1146,6 +1146,14 @@ CONF_SCHEMA = {
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
"indicator_periods_candles": {
|
||||
"description": (
|
||||
"Time periods to calculate indicators for. "
|
||||
"The indicators are added to the base indicator dataset."
|
||||
),
|
||||
"type": "array",
|
||||
"items": {"type": "number", "minimum": 1},
|
||||
},
|
||||
"use_SVM_to_remove_outliers": {
|
||||
"description": "Use SVM to remove outliers from the features.",
|
||||
"type": "boolean",
|
||||
@@ -1338,7 +1346,8 @@ SCHEMA_BACKTEST_REQUIRED = [
|
||||
"dataformat_ohlcv",
|
||||
"dataformat_trades",
|
||||
]
|
||||
SCHEMA_BACKTEST_REQUIRED_FINAL = SCHEMA_BACKTEST_REQUIRED + [
|
||||
SCHEMA_BACKTEST_REQUIRED_FINAL = [
|
||||
*SCHEMA_BACKTEST_REQUIRED,
|
||||
"stoploss",
|
||||
"minimal_roi",
|
||||
"max_open_trades",
|
||||
@@ -1350,6 +1359,4 @@ SCHEMA_MINIMAL_REQUIRED = [
|
||||
"dataformat_ohlcv",
|
||||
"dataformat_trades",
|
||||
]
|
||||
SCHEMA_MINIMAL_WEBSERVER = SCHEMA_MINIMAL_REQUIRED + [
|
||||
"api_server",
|
||||
]
|
||||
SCHEMA_MINIMAL_WEBSERVER = [*SCHEMA_MINIMAL_REQUIRED, "api_server"]
|
||||
@@ -6,7 +6,7 @@ from typing import Any
|
||||
from jsonschema import Draft4Validator, validators
|
||||
from jsonschema.exceptions import ValidationError, best_match
|
||||
|
||||
from freqtrade.configuration.config_schema import (
|
||||
from freqtrade.config_schema.config_schema import (
|
||||
CONF_SCHEMA,
|
||||
SCHEMA_BACKTEST_REQUIRED,
|
||||
SCHEMA_BACKTEST_REQUIRED_FINAL,
|
||||
@@ -361,7 +361,7 @@ def _validate_freqai_include_timeframes(conf: dict[str, Any], preliminary: bool)
|
||||
# Ensure that the base timeframe is included in the include_timeframes list
|
||||
if not preliminary and main_tf not in freqai_include_timeframes:
|
||||
feature_parameters = conf.get("freqai", {}).get("feature_parameters", {})
|
||||
include_timeframes = [main_tf] + freqai_include_timeframes
|
||||
include_timeframes = [main_tf, *freqai_include_timeframes]
|
||||
conf.get("freqai", {}).get("feature_parameters", {}).update(
|
||||
{**feature_parameters, "include_timeframes": include_timeframes}
|
||||
)
|
||||
|
||||
@@ -135,7 +135,7 @@ class Configuration:
|
||||
{"verbosity": safe_value_fallback(self.args, "verbosity", default_value=0)}
|
||||
)
|
||||
|
||||
if "logfile" in self.args and self.args["logfile"]:
|
||||
if self.args.get("logfile"):
|
||||
config.update({"logfile": self.args["logfile"]})
|
||||
|
||||
if "print_colorized" in self.args and not self.args["print_colorized"]:
|
||||
@@ -187,7 +187,7 @@ class Configuration:
|
||||
logger.warning("`force_entry_enable` RPC message enabled.")
|
||||
|
||||
# Support for sd_notify
|
||||
if "sd_notify" in self.args and self.args["sd_notify"]:
|
||||
if self.args.get("sd_notify"):
|
||||
config["internals"].update({"sd_notify": True})
|
||||
|
||||
def _process_datadir_options(self, config: Config) -> None:
|
||||
@@ -196,14 +196,14 @@ class Configuration:
|
||||
--user-data, --datadir
|
||||
"""
|
||||
# Check exchange parameter here - otherwise `datadir` might be wrong.
|
||||
if "exchange" in self.args and self.args["exchange"]:
|
||||
if self.args.get("exchange"):
|
||||
config["exchange"]["name"] = self.args["exchange"]
|
||||
logger.info(f"Using exchange {config['exchange']['name']}")
|
||||
|
||||
if "pair_whitelist" not in config["exchange"]:
|
||||
config["exchange"]["pair_whitelist"] = []
|
||||
|
||||
if "user_data_dir" in self.args and self.args["user_data_dir"]:
|
||||
if self.args.get("user_data_dir"):
|
||||
config.update({"user_data_dir": self.args["user_data_dir"]})
|
||||
elif "user_data_dir" not in config:
|
||||
# Default to cwd/user_data (legacy option ...)
|
||||
@@ -251,7 +251,7 @@ class Configuration:
|
||||
logstring="Parameter --enable-protections detected, enabling Protections. ...",
|
||||
)
|
||||
|
||||
if "max_open_trades" in self.args and self.args["max_open_trades"]:
|
||||
if self.args.get("max_open_trades"):
|
||||
config.update({"max_open_trades": self.args["max_open_trades"]})
|
||||
logger.info(
|
||||
"Parameter --max-open-trades detected, overriding max_open_trades to: %s ...",
|
||||
@@ -314,7 +314,7 @@ class Configuration:
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
# Edge section:
|
||||
if "stoploss_range" in self.args and self.args["stoploss_range"]:
|
||||
if self.args.get("stoploss_range"):
|
||||
txt_range = ast.literal_eval(self.args["stoploss_range"])
|
||||
config["edge"].update({"stoploss_range_min": txt_range[0]})
|
||||
config["edge"].update({"stoploss_range_max": txt_range[1]})
|
||||
@@ -493,7 +493,7 @@ class Configuration:
|
||||
config["exchange"]["pair_whitelist"] = config["pairs"]
|
||||
return
|
||||
|
||||
if "pairs_file" in self.args and self.args["pairs_file"]:
|
||||
if self.args.get("pairs_file"):
|
||||
pairs_file = Path(self.args["pairs_file"])
|
||||
logger.info(f'Reading pairs file "{pairs_file}".')
|
||||
# Download pairs from the pairs file if no config is specified
|
||||
@@ -505,7 +505,7 @@ class Configuration:
|
||||
config["pairs"].sort()
|
||||
return
|
||||
|
||||
if "config" in self.args and self.args["config"]:
|
||||
if self.args.get("config"):
|
||||
logger.info("Using pairlist from configuration.")
|
||||
config["pairs"] = config.get("exchange", {}).get("pair_whitelist")
|
||||
else:
|
||||
|
||||
@@ -37,7 +37,7 @@ def chown_user_directory(directory: Path) -> None:
|
||||
"""
|
||||
if running_in_docker():
|
||||
try:
|
||||
import subprocess # noqa: S404
|
||||
import subprocess # noqa: S404, RUF100
|
||||
|
||||
subprocess.check_output(["sudo", "chown", "-R", "ftuser:", str(directory.resolve())])
|
||||
except Exception:
|
||||
|
||||
@@ -37,6 +37,7 @@ HYPEROPT_LOSS_BUILTIN = [
|
||||
"CalmarHyperOptLoss",
|
||||
"MaxDrawDownHyperOptLoss",
|
||||
"MaxDrawDownRelativeHyperOptLoss",
|
||||
"MaxDrawDownPerPairHyperOptLoss",
|
||||
"ProfitDrawDownHyperOptLoss",
|
||||
"MultiMetricHyperOptLoss",
|
||||
]
|
||||
@@ -99,7 +100,7 @@ DL_DATA_TIMEFRAMES = ["1m", "5m"]
|
||||
ENV_VAR_PREFIX = "FREQTRADE__"
|
||||
|
||||
CANCELED_EXCHANGE_STATES = ("cancelled", "canceled", "expired", "rejected")
|
||||
NON_OPEN_EXCHANGE_STATES = CANCELED_EXCHANGE_STATES + ("closed",)
|
||||
NON_OPEN_EXCHANGE_STATES = (*CANCELED_EXCHANGE_STATES, "closed")
|
||||
|
||||
# Define decimals per coin for outputs
|
||||
# Only used for outputs.
|
||||
|
||||
31
freqtrade/data/btanalysis/__init__.py
Normal file
31
freqtrade/data/btanalysis/__init__.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# flake8: noqa: F401
|
||||
from .bt_fileutils import (
|
||||
BT_DATA_COLUMNS,
|
||||
delete_backtest_result,
|
||||
extract_trades_of_period,
|
||||
find_existing_backtest_stats,
|
||||
get_backtest_market_change,
|
||||
get_backtest_result,
|
||||
get_backtest_resultlist,
|
||||
get_latest_backtest_filename,
|
||||
get_latest_hyperopt_file,
|
||||
get_latest_hyperopt_filename,
|
||||
get_latest_optimize_filename,
|
||||
load_and_merge_backtest_result,
|
||||
load_backtest_analysis_data,
|
||||
load_backtest_data,
|
||||
load_backtest_metadata,
|
||||
load_backtest_stats,
|
||||
load_exit_signal_candles,
|
||||
load_file_from_zip,
|
||||
load_rejected_signals,
|
||||
load_signal_candles,
|
||||
load_trades,
|
||||
load_trades_from_db,
|
||||
trade_list_to_dataframe,
|
||||
update_backtest_metadata,
|
||||
)
|
||||
from .trade_parallelism import (
|
||||
analyze_trade_parallelism,
|
||||
evaluate_result_multi,
|
||||
)
|
||||
@@ -13,7 +13,7 @@ from typing import Any, Literal
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.constants import LAST_BT_RESULT_FN, IntOrInf
|
||||
from freqtrade.constants import LAST_BT_RESULT_FN
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.ft_types import BacktestHistoryEntryType, BacktestResultType
|
||||
from freqtrade.misc import file_dump_json, json_load
|
||||
@@ -376,7 +376,7 @@ def load_backtest_data(filename: Path | str, strategy: str | None = None) -> pd.
|
||||
|
||||
if not strategy:
|
||||
if len(data["strategy"]) == 1:
|
||||
strategy = list(data["strategy"].keys())[0]
|
||||
strategy = next(iter(data["strategy"].keys()))
|
||||
else:
|
||||
raise ValueError(
|
||||
"Detected backtest result with more than one strategy. "
|
||||
@@ -491,55 +491,6 @@ def load_exit_signal_candles(backtest_dir: Path) -> dict[str, dict[str, pd.DataF
|
||||
return load_backtest_analysis_data(backtest_dir, "exited")
|
||||
|
||||
|
||||
def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataFrame:
|
||||
"""
|
||||
Find overlapping trades by expanding each trade once per period it was open
|
||||
and then counting overlaps.
|
||||
:param results: Results Dataframe - can be loaded
|
||||
:param timeframe: Timeframe used for backtest
|
||||
:return: dataframe with open-counts per time-period in timeframe
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_resample_freq
|
||||
|
||||
timeframe_freq = timeframe_to_resample_freq(timeframe)
|
||||
dates = [
|
||||
pd.Series(
|
||||
pd.date_range(
|
||||
row[1]["open_date"],
|
||||
row[1]["close_date"],
|
||||
freq=timeframe_freq,
|
||||
# Exclude right boundary - the date is the candle open date.
|
||||
inclusive="left",
|
||||
)
|
||||
)
|
||||
for row in results[["open_date", "close_date"]].iterrows()
|
||||
]
|
||||
deltas = [len(x) for x in dates]
|
||||
dates = pd.Series(pd.concat(dates).values, name="date")
|
||||
df2 = pd.DataFrame(np.repeat(results.values, deltas, axis=0), columns=results.columns)
|
||||
|
||||
df2 = pd.concat([dates, df2], axis=1)
|
||||
df2 = df2.set_index("date")
|
||||
df_final = df2.resample(timeframe_freq)[["pair"]].count()
|
||||
df_final = df_final.rename({"pair": "open_trades"}, axis=1)
|
||||
return df_final
|
||||
|
||||
|
||||
def evaluate_result_multi(
|
||||
results: pd.DataFrame, timeframe: str, max_open_trades: IntOrInf
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Find overlapping trades by expanding each trade once per period it was open
|
||||
and then counting overlaps
|
||||
:param results: Results Dataframe - can be loaded
|
||||
:param timeframe: Frequency used for the backtest
|
||||
:param max_open_trades: parameter max_open_trades used during backtest run
|
||||
:return: dataframe with open-counts per time-period in freq
|
||||
"""
|
||||
df_final = analyze_trade_parallelism(results, timeframe)
|
||||
return df_final[df_final["open_trades"] > max_open_trades]
|
||||
|
||||
|
||||
def trade_list_to_dataframe(trades: list[Trade] | list[LocalTrade]) -> pd.DataFrame:
|
||||
"""
|
||||
Convert list of Trade objects to pandas Dataframe
|
||||
60
freqtrade/data/btanalysis/trade_parallelism.py
Normal file
60
freqtrade/data/btanalysis/trade_parallelism.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import logging
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.constants import IntOrInf
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def analyze_trade_parallelism(trades: pd.DataFrame, timeframe: str) -> pd.DataFrame:
|
||||
"""
|
||||
Find overlapping trades by expanding each trade once per period it was open
|
||||
and then counting overlaps.
|
||||
:param trades: Trades Dataframe - can be loaded from backtest, or created
|
||||
via trade_list_to_dataframe
|
||||
:param timeframe: Timeframe used for backtest
|
||||
:return: dataframe with open-counts per time-period in timeframe
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_resample_freq
|
||||
|
||||
timeframe_freq = timeframe_to_resample_freq(timeframe)
|
||||
dates = [
|
||||
pd.Series(
|
||||
pd.date_range(
|
||||
row[1]["open_date"],
|
||||
row[1]["close_date"],
|
||||
freq=timeframe_freq,
|
||||
# Exclude right boundary - the date is the candle open date.
|
||||
inclusive="left",
|
||||
)
|
||||
)
|
||||
for row in trades[["open_date", "close_date"]].iterrows()
|
||||
]
|
||||
deltas = [len(x) for x in dates]
|
||||
dates = pd.Series(pd.concat(dates).values, name="date")
|
||||
df2 = pd.DataFrame(np.repeat(trades.values, deltas, axis=0), columns=trades.columns)
|
||||
|
||||
df2 = pd.concat([dates, df2], axis=1)
|
||||
df2 = df2.set_index("date")
|
||||
df_final = df2.resample(timeframe_freq)[["pair"]].count()
|
||||
df_final = df_final.rename({"pair": "open_trades"}, axis=1)
|
||||
return df_final
|
||||
|
||||
|
||||
def evaluate_result_multi(
|
||||
trades: pd.DataFrame, timeframe: str, max_open_trades: IntOrInf
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Find overlapping trades by expanding each trade once per period it was open
|
||||
and then counting overlaps
|
||||
:param trades: Trades Dataframe - can be loaded from backtest, or created
|
||||
via trade_list_to_dataframe
|
||||
:param timeframe: Frequency used for the backtest
|
||||
:param max_open_trades: parameter max_open_trades used during backtest run
|
||||
:return: dataframe with open-counts per time-period in freq
|
||||
"""
|
||||
df_final = analyze_trade_parallelism(trades, timeframe)
|
||||
return df_final[df_final["open_trades"] > max_open_trades]
|
||||
@@ -281,7 +281,7 @@ def _merge_dfs(
|
||||
):
|
||||
merge_on = ["pair", "open_date"]
|
||||
signal_wide_indicators = list(set(available_inds) - set(BT_DATA_COLUMNS))
|
||||
columns_to_keep = merge_on + ["enter_reason", "exit_reason"]
|
||||
columns_to_keep = [*merge_on, "enter_reason", "exit_reason"]
|
||||
|
||||
if exit_df is None or exit_df.empty or entry_only is True:
|
||||
return entry_df[columns_to_keep + available_inds]
|
||||
|
||||
@@ -116,7 +116,7 @@ def load_data(
|
||||
result[pair] = hist
|
||||
else:
|
||||
if candle_type is CandleType.FUNDING_RATE and user_futures_funding_rate is not None:
|
||||
logger.warn(f"{pair} using user specified [{user_futures_funding_rate}]")
|
||||
logger.warning(f"{pair} using user specified [{user_futures_funding_rate}]")
|
||||
elif candle_type not in (CandleType.SPOT, CandleType.FUTURES):
|
||||
result[pair] = DataFrame(columns=["date", "open", "close", "high", "low", "volume"])
|
||||
|
||||
|
||||
@@ -10,7 +10,9 @@ import pandas as pd
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def calculate_market_change(data: dict[str, pd.DataFrame], column: str = "close") -> float:
|
||||
def calculate_market_change(
|
||||
data: dict[str, pd.DataFrame], column: str = "close", min_date: datetime | None = None
|
||||
) -> float:
|
||||
"""
|
||||
Calculate market change based on "column".
|
||||
Calculation is done by taking the first non-null and the last non-null element of each column
|
||||
@@ -19,14 +21,24 @@ def calculate_market_change(data: dict[str, pd.DataFrame], column: str = "close"
|
||||
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:param min_date: Minimum date to consider for calculations. Market change should only be
|
||||
calculated for data actually backtested, excluding startup periods.
|
||||
:return:
|
||||
"""
|
||||
tmp_means = []
|
||||
for pair, df in data.items():
|
||||
start = df[column].dropna().iloc[0]
|
||||
end = df[column].dropna().iloc[-1]
|
||||
df1 = df
|
||||
if min_date is not None:
|
||||
df1 = df1[df1["date"] >= min_date]
|
||||
if df1.empty:
|
||||
logger.warning(f"Pair {pair} has no data after {min_date}.")
|
||||
continue
|
||||
start = df1[column].dropna().iloc[0]
|
||||
end = df1[column].dropna().iloc[-1]
|
||||
tmp_means.append((end - start) / start)
|
||||
|
||||
if not tmp_means:
|
||||
return 0.0
|
||||
return float(np.mean(tmp_means))
|
||||
|
||||
|
||||
@@ -118,7 +130,7 @@ def _calc_drawdown_series(
|
||||
) -> pd.DataFrame:
|
||||
max_drawdown_df = pd.DataFrame()
|
||||
max_drawdown_df["cumulative"] = profit_results[value_col].cumsum()
|
||||
max_drawdown_df["high_value"] = max_drawdown_df["cumulative"].cummax()
|
||||
max_drawdown_df["high_value"] = np.maximum(0, max_drawdown_df["cumulative"].cummax())
|
||||
max_drawdown_df["drawdown"] = max_drawdown_df["cumulative"] - max_drawdown_df["high_value"]
|
||||
max_drawdown_df["date"] = profit_results.loc[:, date_col]
|
||||
if starting_balance:
|
||||
@@ -201,13 +213,11 @@ def calculate_max_drawdown(
|
||||
if relative
|
||||
else max_drawdown_df["drawdown"].idxmin()
|
||||
)
|
||||
if idxmin == 0:
|
||||
raise ValueError("No losing trade, therefore no drawdown.")
|
||||
high_date = profit_results.loc[max_drawdown_df.iloc[:idxmin]["high_value"].idxmax(), date_col]
|
||||
|
||||
high_idx = max_drawdown_df.iloc[: idxmin + 1]["high_value"].idxmax()
|
||||
high_date = profit_results.loc[high_idx, date_col]
|
||||
low_date = profit_results.loc[idxmin, date_col]
|
||||
high_val = max_drawdown_df.loc[
|
||||
max_drawdown_df.iloc[:idxmin]["high_value"].idxmax(), "cumulative"
|
||||
]
|
||||
high_val = max_drawdown_df.loc[high_idx, "cumulative"]
|
||||
low_val = max_drawdown_df.loc[idxmin, "cumulative"]
|
||||
max_drawdown_rel = max_drawdown_df.loc[idxmin, "drawdown_relative"]
|
||||
|
||||
|
||||
@@ -7,8 +7,9 @@ class State(Enum):
|
||||
"""
|
||||
|
||||
RUNNING = 1
|
||||
STOPPED = 2
|
||||
RELOAD_CONFIG = 3
|
||||
PAUSED = 2
|
||||
STOPPED = 3
|
||||
RELOAD_CONFIG = 4
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name.lower()}"
|
||||
|
||||
@@ -143,7 +143,7 @@ class Binance(Exchange):
|
||||
Does not work for other exchanges, which don't return the earliest data when called with "0"
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
"""
|
||||
if is_new_pair:
|
||||
if is_new_pair and candle_type in (CandleType.SPOT, CandleType.FUTURES, CandleType.MARK):
|
||||
with self._loop_lock:
|
||||
x = self.loop.run_until_complete(
|
||||
self._async_get_candle_history(pair, timeframe, candle_type, 0)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -148,6 +148,7 @@ class Exchange:
|
||||
"trades_has_history": False,
|
||||
"l2_limit_range": None,
|
||||
"l2_limit_range_required": True, # Allow Empty L2 limit (kucoin)
|
||||
"l2_limit_upper": None, # Upper limit for L2 limit
|
||||
"mark_ohlcv_price": "mark",
|
||||
"mark_ohlcv_timeframe": "8h",
|
||||
"funding_fee_timeframe": "8h",
|
||||
@@ -960,7 +961,7 @@ class Exchange:
|
||||
return 1 / pow(10, precision)
|
||||
|
||||
def get_min_pair_stake_amount(
|
||||
self, pair: str, price: float, stoploss: float, leverage: float | None = 1.0
|
||||
self, pair: str, price: float, stoploss: float, leverage: float = 1.0
|
||||
) -> float | None:
|
||||
return self._get_stake_amount_limit(pair, price, stoploss, "min", leverage)
|
||||
|
||||
@@ -979,7 +980,7 @@ class Exchange:
|
||||
price: float,
|
||||
stoploss: float,
|
||||
limit: Literal["min", "max"],
|
||||
leverage: float | None = 1.0,
|
||||
leverage: float = 1.0,
|
||||
) -> float | None:
|
||||
isMin = limit == "min"
|
||||
|
||||
@@ -988,6 +989,8 @@ class Exchange:
|
||||
except KeyError:
|
||||
raise ValueError(f"Can't get market information for symbol {pair}")
|
||||
|
||||
stake_limits = []
|
||||
limits = market["limits"]
|
||||
if isMin:
|
||||
# reserve some percent defined in config (5% default) + stoploss
|
||||
margin_reserve: float = 1.0 + self._config.get(
|
||||
@@ -997,11 +1000,12 @@ class Exchange:
|
||||
# it should not be more than 50%
|
||||
stoploss_reserve = max(min(stoploss_reserve, 1.5), 1)
|
||||
else:
|
||||
# is_max
|
||||
margin_reserve = 1.0
|
||||
stoploss_reserve = 1.0
|
||||
if max_from_tiers := self._get_max_notional_from_tiers(pair, leverage=leverage):
|
||||
stake_limits.append(max_from_tiers)
|
||||
|
||||
stake_limits = []
|
||||
limits = market["limits"]
|
||||
if limits["cost"][limit] is not None:
|
||||
stake_limits.append(
|
||||
self._contracts_to_amount(pair, limits["cost"][limit]) * stoploss_reserve
|
||||
@@ -1365,8 +1369,8 @@ class Exchange:
|
||||
ordertype = available_order_Types[user_order_type]
|
||||
else:
|
||||
# Otherwise pick only one available
|
||||
ordertype = list(available_order_Types.values())[0]
|
||||
user_order_type = list(available_order_Types.keys())[0]
|
||||
ordertype = next(iter(available_order_Types.values()))
|
||||
user_order_type = next(iter(available_order_Types.keys()))
|
||||
return ordertype, user_order_type
|
||||
|
||||
def _get_stop_limit_rate(self, stop_price: float, order_types: dict, side: str) -> float:
|
||||
@@ -1955,14 +1959,18 @@ class Exchange:
|
||||
|
||||
@staticmethod
|
||||
def get_next_limit_in_list(
|
||||
limit: int, limit_range: list[int] | None, range_required: bool = True
|
||||
limit: int,
|
||||
limit_range: list[int] | None,
|
||||
range_required: bool = True,
|
||||
upper_limit: int | None = None,
|
||||
):
|
||||
"""
|
||||
Get next greater value in the list.
|
||||
Used by fetch_l2_order_book if the api only supports a limited range
|
||||
if both limit_range and upper_limit is provided, limit_range wins.
|
||||
"""
|
||||
if not limit_range:
|
||||
return limit
|
||||
return min(limit, upper_limit) if upper_limit else limit
|
||||
|
||||
result = min([x for x in limit_range if limit <= x] + [max(limit_range)])
|
||||
if not range_required and limit > result:
|
||||
@@ -1979,7 +1987,10 @@ class Exchange:
|
||||
{'asks': [price, volume], 'bids': [price, volume]}
|
||||
"""
|
||||
limit1 = self.get_next_limit_in_list(
|
||||
limit, self._ft_has["l2_limit_range"], self._ft_has["l2_limit_range_required"]
|
||||
limit,
|
||||
self._ft_has["l2_limit_range"],
|
||||
self._ft_has["l2_limit_range_required"],
|
||||
self._ft_has["l2_limit_upper"],
|
||||
)
|
||||
try:
|
||||
return self._api.fetch_l2_order_book(pair, limit1)
|
||||
@@ -2790,7 +2801,7 @@ class Exchange:
|
||||
pair, timeframe, candle_type = pairwt
|
||||
since_ms = None
|
||||
new_ticks: list = []
|
||||
all_stored_ticks_df = DataFrame(columns=DEFAULT_TRADES_COLUMNS + ["date"])
|
||||
all_stored_ticks_df = DataFrame(columns=[*DEFAULT_TRADES_COLUMNS, "date"])
|
||||
first_candle_ms = self.needed_candle_for_trades_ms(timeframe, candle_type)
|
||||
# refresh, if
|
||||
# a. not in _trades
|
||||
@@ -2835,7 +2846,7 @@ class Exchange:
|
||||
else:
|
||||
# Skip cache, it's too old
|
||||
all_stored_ticks_df = DataFrame(
|
||||
columns=DEFAULT_TRADES_COLUMNS + ["date"]
|
||||
columns=[*DEFAULT_TRADES_COLUMNS, "date"]
|
||||
)
|
||||
|
||||
# from_id overrules with exchange set to id paginate
|
||||
@@ -3353,42 +3364,22 @@ class Exchange:
|
||||
pair_tiers = self._leverage_tiers[pair]
|
||||
|
||||
if stake_amount == 0:
|
||||
return self._leverage_tiers[pair][0]["maxLeverage"] # Max lev for lowest amount
|
||||
return pair_tiers[0]["maxLeverage"] # Max lev for lowest amount
|
||||
|
||||
for tier_index in range(len(pair_tiers)):
|
||||
tier = pair_tiers[tier_index]
|
||||
lev = tier["maxLeverage"]
|
||||
# Find the appropriate tier based on stake_amount
|
||||
prior_max_lev = None
|
||||
for tier in pair_tiers:
|
||||
min_stake = tier["minNotional"] / (prior_max_lev or tier["maxLeverage"])
|
||||
max_stake = tier["maxNotional"] / tier["maxLeverage"]
|
||||
prior_max_lev = tier["maxLeverage"]
|
||||
# Adjust notional by leverage to do a proper comparison
|
||||
if min_stake <= stake_amount <= max_stake:
|
||||
return tier["maxLeverage"]
|
||||
|
||||
if tier_index < len(pair_tiers) - 1:
|
||||
next_tier = pair_tiers[tier_index + 1]
|
||||
next_floor = next_tier["minNotional"] / next_tier["maxLeverage"]
|
||||
if next_floor > stake_amount: # Next tier min too high for stake amount
|
||||
return min((tier["maxNotional"] / stake_amount), lev)
|
||||
#
|
||||
# With the two leverage tiers below,
|
||||
# - a stake amount of 150 would mean a max leverage of (10000 / 150) = 66.66
|
||||
# - stakes below 133.33 = max_lev of 75
|
||||
# - stakes between 133.33-200 = max_lev of 10000/stake = 50.01-74.99
|
||||
# - stakes from 200 + 1000 = max_lev of 50
|
||||
#
|
||||
# {
|
||||
# "min": 0, # stake = 0.0
|
||||
# "max": 10000, # max_stake@75 = 10000/75 = 133.33333333333334
|
||||
# "lev": 75,
|
||||
# },
|
||||
# {
|
||||
# "min": 10000, # stake = 200.0
|
||||
# "max": 50000, # max_stake@50 = 50000/50 = 1000.0
|
||||
# "lev": 50,
|
||||
# }
|
||||
#
|
||||
|
||||
else: # if on the last tier
|
||||
if stake_amount > tier["maxNotional"]:
|
||||
# If stake is > than max tradeable amount
|
||||
raise InvalidOrderException(f"Amount {stake_amount} too high for {pair}")
|
||||
else:
|
||||
return tier["maxLeverage"]
|
||||
# else: # if on the last tier
|
||||
if stake_amount > max_stake:
|
||||
# If stake is > than max tradeable amount
|
||||
raise InvalidOrderException(f"Amount {stake_amount} too high for {pair}")
|
||||
|
||||
raise OperationalException(
|
||||
"Looped through all tiers without finding a max leverage. Should never be reached"
|
||||
@@ -3403,6 +3394,23 @@ class Exchange:
|
||||
else:
|
||||
return 1.0
|
||||
|
||||
def _get_max_notional_from_tiers(self, pair: str, leverage: float) -> float | None:
|
||||
"""
|
||||
get max_notional from leverage_tiers
|
||||
:param pair: The base/quote currency pair being traded
|
||||
:param leverage: The leverage to be used
|
||||
:return: The maximum notional value for the given leverage or None if not found
|
||||
"""
|
||||
if self.trading_mode != TradingMode.FUTURES:
|
||||
return None
|
||||
if pair not in self._leverage_tiers:
|
||||
return None
|
||||
pair_tiers = self._leverage_tiers[pair]
|
||||
for tier in reversed(pair_tiers):
|
||||
if leverage <= tier["maxLeverage"]:
|
||||
return tier["maxNotional"]
|
||||
return None
|
||||
|
||||
@retrier
|
||||
def _set_leverage(
|
||||
self,
|
||||
|
||||
@@ -37,6 +37,7 @@ class FtHas(TypedDict, total=False):
|
||||
# Orderbook
|
||||
l2_limit_range: list[int] | None
|
||||
l2_limit_range_required: bool
|
||||
l2_limit_upper: int | None
|
||||
# Futures
|
||||
ccxt_futures_name: str # usually swap
|
||||
mark_ohlcv_price: str
|
||||
@@ -44,6 +45,7 @@ class FtHas(TypedDict, total=False):
|
||||
funding_fee_timeframe: str
|
||||
funding_fee_candle_limit: int
|
||||
floor_leverage: bool
|
||||
uses_leverage_tiers: bool
|
||||
needs_trading_fees: bool
|
||||
order_props_in_contracts: list[Literal["amount", "cost", "filled", "remaining"]]
|
||||
|
||||
|
||||
@@ -35,6 +35,7 @@ class Gate(Exchange):
|
||||
"stoploss_order_types": {"limit": "limit"},
|
||||
"stop_price_param": "stopPrice",
|
||||
"stop_price_prop": "stopPrice",
|
||||
"l2_limit_upper": 1000,
|
||||
"marketOrderRequiresPrice": True,
|
||||
"trades_has_history": False, # Endpoint would support this - but ccxt doesn't.
|
||||
}
|
||||
@@ -44,6 +45,7 @@ class Gate(Exchange):
|
||||
"marketOrderRequiresPrice": False,
|
||||
"funding_fee_candle_limit": 90,
|
||||
"stop_price_type_field": "price_type",
|
||||
"l2_limit_upper": 300,
|
||||
"stop_price_type_value_mapping": {
|
||||
PriceType.LAST: 0,
|
||||
PriceType.MARK: 1,
|
||||
|
||||
@@ -35,6 +35,7 @@ class Hyperliquid(Exchange):
|
||||
"stop_price_prop": "stopPrice",
|
||||
"funding_fee_timeframe": "1h",
|
||||
"funding_fee_candle_limit": 500,
|
||||
"uses_leverage_tiers": False,
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
|
||||
|
||||
@@ -69,7 +69,7 @@ class Kraken(Exchange):
|
||||
consolidated: CcxtBalances = {}
|
||||
for currency, balance in balances.items():
|
||||
base_currency = currency[:-2] if currency.endswith(".F") else currency
|
||||
base_currency = self._api.commonCurrencies.get(base_currency, base_currency)
|
||||
|
||||
if base_currency in consolidated:
|
||||
consolidated[base_currency]["free"] += balance["free"]
|
||||
consolidated[base_currency]["used"] += balance["used"]
|
||||
|
||||
@@ -569,7 +569,7 @@ class FreqaiDataDrawer:
|
||||
dk.training_features_list = dk.data["training_features_list"]
|
||||
dk.label_list = dk.data["label_list"]
|
||||
|
||||
def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any: # noqa: C901
|
||||
def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any:
|
||||
"""
|
||||
loads all data required to make a prediction on a sub-train time range
|
||||
:returns:
|
||||
|
||||
@@ -4,8 +4,8 @@ from typing import Any
|
||||
from xgboost import XGBRegressor
|
||||
|
||||
from freqtrade.freqai.base_models.BaseRegressionModel import BaseRegressionModel
|
||||
from freqtrade.freqai.base_models.FreqaiMultiOutputRegressor import FreqaiMultiOutputRegressor
|
||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||
from freqtrade.freqai.tensorboard import TBCallback
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -19,6 +19,7 @@ class XGBoostRegressorMultiTarget(BaseRegressionModel):
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
This is an exact copy of XGBoostRegressor kept for compatibility reasons.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
@@ -29,45 +30,32 @@ class XGBoostRegressorMultiTarget(BaseRegressionModel):
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
xgb = XGBRegressor(**self.model_training_parameters)
|
||||
|
||||
X = data_dictionary["train_features"]
|
||||
y = data_dictionary["train_labels"]
|
||||
|
||||
if self.freqai_info.get("data_split_parameters", {}).get("test_size", 0.1) == 0:
|
||||
eval_set = None
|
||||
eval_weights = None
|
||||
else:
|
||||
eval_set = [(data_dictionary["test_features"], data_dictionary["test_labels"]), (X, y)]
|
||||
eval_weights = [data_dictionary["test_weights"], data_dictionary["train_weights"]]
|
||||
|
||||
sample_weight = data_dictionary["train_weights"]
|
||||
|
||||
eval_weights = None
|
||||
eval_sets = [None] * y.shape[1]
|
||||
xgb_model = self.get_init_model(dk.pair)
|
||||
|
||||
if self.freqai_info.get("data_split_parameters", {}).get("test_size", 0.1) != 0:
|
||||
eval_weights = [data_dictionary["test_weights"]]
|
||||
for i in range(data_dictionary["test_labels"].shape[1]):
|
||||
eval_sets[i] = [ # type: ignore
|
||||
(
|
||||
data_dictionary["test_features"],
|
||||
data_dictionary["test_labels"].iloc[:, i],
|
||||
)
|
||||
]
|
||||
model = XGBRegressor(**self.model_training_parameters)
|
||||
|
||||
init_model = self.get_init_model(dk.pair)
|
||||
if init_model:
|
||||
init_models = init_model.estimators_
|
||||
else:
|
||||
init_models = [None] * y.shape[1]
|
||||
|
||||
fit_params = []
|
||||
for i in range(len(eval_sets)):
|
||||
fit_params.append(
|
||||
{
|
||||
"eval_set": eval_sets[i],
|
||||
"sample_weight_eval_set": eval_weights,
|
||||
"xgb_model": init_models[i],
|
||||
}
|
||||
)
|
||||
|
||||
model = FreqaiMultiOutputRegressor(estimator=xgb)
|
||||
thread_training = self.freqai_info.get("multitarget_parallel_training", False)
|
||||
if thread_training:
|
||||
model.n_jobs = y.shape[1]
|
||||
model.fit(X=X, y=y, sample_weight=sample_weight, fit_params=fit_params)
|
||||
model.set_params(callbacks=[TBCallback(dk.data_path)])
|
||||
model.fit(
|
||||
X=X,
|
||||
y=y,
|
||||
sample_weight=sample_weight,
|
||||
eval_set=eval_set,
|
||||
sample_weight_eval_set=eval_weights,
|
||||
xgb_model=xgb_model,
|
||||
)
|
||||
# set the callbacks to empty so that we can serialize to disk later
|
||||
model.set_params(callbacks=[])
|
||||
|
||||
return model
|
||||
|
||||
@@ -293,6 +293,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
trades = Trade.get_open_trades()
|
||||
# First process current opened trades (positions)
|
||||
self.exit_positions(trades)
|
||||
Trade.commit()
|
||||
|
||||
# Check if we need to adjust our current positions before attempting to enter new trades.
|
||||
if self.strategy.position_adjustment_enable:
|
||||
@@ -300,7 +301,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.process_open_trade_positions()
|
||||
|
||||
# Then looking for entry opportunities
|
||||
if self.get_free_open_trades():
|
||||
if self.state == State.RUNNING and self.get_free_open_trades():
|
||||
self.enter_positions()
|
||||
self._schedule.run_pending()
|
||||
Trade.commit()
|
||||
@@ -759,12 +760,14 @@ class FreqtradeBot(LoggingMixin):
|
||||
current_exit_profit = trade.calc_profit_ratio(current_exit_rate)
|
||||
|
||||
min_entry_stake = self.exchange.get_min_pair_stake_amount(
|
||||
trade.pair, current_entry_rate, 0.0
|
||||
trade.pair, current_entry_rate, 0.0, trade.leverage
|
||||
)
|
||||
min_exit_stake = self.exchange.get_min_pair_stake_amount(
|
||||
trade.pair, current_exit_rate, self.strategy.stoploss
|
||||
trade.pair, current_exit_rate, self.strategy.stoploss, trade.leverage
|
||||
)
|
||||
max_entry_stake = self.exchange.get_max_pair_stake_amount(
|
||||
trade.pair, current_entry_rate, trade.leverage
|
||||
)
|
||||
max_entry_stake = self.exchange.get_max_pair_stake_amount(trade.pair, current_entry_rate)
|
||||
stake_available = self.wallets.get_available_stake_amount()
|
||||
logger.debug(f"Calling adjust_trade_position for pair {trade.pair}")
|
||||
stake_amount, order_tag = self.strategy._adjust_trade_position_internal(
|
||||
@@ -781,6 +784,10 @@ class FreqtradeBot(LoggingMixin):
|
||||
)
|
||||
|
||||
if stake_amount is not None and stake_amount > 0.0:
|
||||
if self.state == State.PAUSED:
|
||||
logger.debug("Position adjustment aborted because the bot is in PAUSED state")
|
||||
return
|
||||
|
||||
# We should increase our position
|
||||
if self.strategy.max_entry_position_adjustment > -1:
|
||||
count_of_entries = trade.nr_of_successful_entries
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
# flake8: noqa: F401
|
||||
from freqtrade.ft_types.backtest_result_type import (
|
||||
BacktestContentType,
|
||||
BacktestContentTypeIcomplete,
|
||||
BacktestHistoryEntryType,
|
||||
BacktestMetadataType,
|
||||
BacktestResultType,
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
from copy import deepcopy
|
||||
from typing import Any, cast
|
||||
|
||||
from pandas import DataFrame
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from freqtrade.constants import Config
|
||||
|
||||
|
||||
class BacktestMetadataType(TypedDict):
|
||||
run_id: str
|
||||
@@ -36,3 +39,23 @@ class BacktestHistoryEntryType(BacktestMetadataType):
|
||||
backtest_end_ts: int | None
|
||||
timeframe: str | None
|
||||
timeframe_detail: str | None
|
||||
|
||||
|
||||
class BacktestContentTypeIcomplete(TypedDict, total=False):
|
||||
results: DataFrame
|
||||
config: Config
|
||||
locks: Any
|
||||
rejected_signals: int
|
||||
timedout_entry_orders: int
|
||||
timedout_exit_orders: int
|
||||
canceled_trade_entries: int
|
||||
canceled_entry_orders: int
|
||||
replaced_entry_orders: int
|
||||
final_balance: float
|
||||
backtest_start_time: int
|
||||
backtest_end_time: int
|
||||
run_id: str
|
||||
|
||||
|
||||
class BacktestContentType(BacktestContentTypeIcomplete, total=True):
|
||||
pass
|
||||
|
||||
@@ -123,7 +123,7 @@ def _add_formatter(log_config: dict[str, Any], format_name: str, format_: str):
|
||||
|
||||
def _create_log_config(config: Config) -> dict[str, Any]:
|
||||
# Get log_config from user config or use default
|
||||
log_config = config.get("log_config", deepcopy(FT_LOGGING_CONFIG))
|
||||
log_config = deepcopy(config.get("log_config", FT_LOGGING_CONFIG))
|
||||
|
||||
if logfile := config.get("logfile"):
|
||||
s = logfile.split(":")
|
||||
|
||||
@@ -20,12 +20,13 @@ class LoggingMixin:
|
||||
self.refresh_period = refresh_period
|
||||
self._log_cache: TTLCache = TTLCache(maxsize=1024, ttl=self.refresh_period)
|
||||
|
||||
def log_once(self, message: str, logmethod: Callable) -> None:
|
||||
def log_once(self, message: str, logmethod: Callable, force_show: bool = False) -> None:
|
||||
"""
|
||||
Logs message - not more often than "refresh_period" to avoid log spamming
|
||||
Logs the log-message as debug as well to simplify debugging.
|
||||
:param message: String containing the message to be sent to the function.
|
||||
:param logmethod: Function that'll be called. Most likely `logger.info`.
|
||||
:param force_show: If True, sends the message regardless of show_output value.
|
||||
:return: None.
|
||||
"""
|
||||
|
||||
@@ -35,6 +36,7 @@ class LoggingMixin:
|
||||
|
||||
# Log as debug first
|
||||
self.logger.debug(message)
|
||||
# Call hidden function.
|
||||
if self.show_output:
|
||||
|
||||
# Call hidden function if show_output is True or force_show is True
|
||||
if self.show_output or force_show:
|
||||
_log_once(message)
|
||||
|
||||
@@ -8,7 +8,6 @@ import logging
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
from numpy import nan
|
||||
from pandas import DataFrame
|
||||
@@ -37,7 +36,12 @@ from freqtrade.exchange import (
|
||||
timeframe_to_seconds,
|
||||
)
|
||||
from freqtrade.exchange.exchange import Exchange
|
||||
from freqtrade.ft_types import BacktestResultType, get_BacktestResultType_default
|
||||
from freqtrade.ft_types import (
|
||||
BacktestContentType,
|
||||
BacktestContentTypeIcomplete,
|
||||
BacktestResultType,
|
||||
get_BacktestResultType_default,
|
||||
)
|
||||
from freqtrade.leverage.liquidation_price import update_liquidation_prices
|
||||
from freqtrade.mixins import LoggingMixin
|
||||
from freqtrade.optimize.backtest_caching import get_strategy_run_id
|
||||
@@ -119,7 +123,7 @@ class Backtesting:
|
||||
config["dry_run"] = True
|
||||
self.run_ids: dict[str, str] = {}
|
||||
self.strategylist: list[IStrategy] = []
|
||||
self.all_results: dict[str, dict] = {}
|
||||
self.all_bt_content: dict[str, BacktestContentType] = {}
|
||||
self.analysis_results: dict[str, dict[str, DataFrame]] = {
|
||||
"signals": {},
|
||||
"rejected": {},
|
||||
@@ -360,8 +364,9 @@ class Backtesting:
|
||||
)
|
||||
# Combine data to avoid combining the data per trade.
|
||||
unavailable_pairs = []
|
||||
uses_leverage_tiers = self.exchange.get_option("uses_leverage_tiers", True)
|
||||
for pair in self.pairlists.whitelist:
|
||||
if pair not in self.exchange._leverage_tiers:
|
||||
if uses_leverage_tiers and pair not in self.exchange._leverage_tiers:
|
||||
unavailable_pairs.append(pair)
|
||||
continue
|
||||
|
||||
@@ -1610,7 +1615,9 @@ class Backtesting:
|
||||
yield current_time_det, pair, row, is_last_row, trade_dir
|
||||
self.progress.increment()
|
||||
|
||||
def backtest(self, processed: dict, start_date: datetime, end_date: datetime) -> dict[str, Any]:
|
||||
def backtest(
|
||||
self, processed: dict, start_date: datetime, end_date: datetime
|
||||
) -> BacktestContentTypeIcomplete:
|
||||
"""
|
||||
Implement backtesting functionality
|
||||
|
||||
@@ -1710,7 +1717,7 @@ class Backtesting:
|
||||
"backtest_end_time": int(backtest_end_time.timestamp()),
|
||||
}
|
||||
)
|
||||
self.all_results[strategy_name] = results
|
||||
self.all_bt_content[strategy_name] = results
|
||||
|
||||
if (
|
||||
self.config.get("export", "none") == "signals"
|
||||
@@ -1773,9 +1780,9 @@ class Backtesting:
|
||||
min_date, max_date = self.backtest_one_strategy(strat, data, timerange)
|
||||
|
||||
# Update old results with new ones.
|
||||
if len(self.all_results) > 0:
|
||||
if len(self.all_bt_content) > 0:
|
||||
results = generate_backtest_stats(
|
||||
data, self.all_results, min_date=min_date, max_date=max_date
|
||||
data, self.all_bt_content, min_date=min_date, max_date=max_date
|
||||
)
|
||||
if self.results:
|
||||
self.results["metadata"].update(results["metadata"])
|
||||
|
||||
@@ -19,6 +19,7 @@ from freqtrade.data.history import get_timerange
|
||||
from freqtrade.data.metrics import calculate_market_change
|
||||
from freqtrade.enums import HyperoptState
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.ft_types import BacktestContentType
|
||||
from freqtrade.misc import deep_merge_dicts
|
||||
from freqtrade.optimize.backtesting import Backtesting
|
||||
|
||||
@@ -324,7 +325,7 @@ class HyperOptimizer:
|
||||
|
||||
def _get_results_dict(
|
||||
self,
|
||||
backtesting_results: dict[str, Any],
|
||||
backtesting_results: BacktestContentType,
|
||||
min_date: datetime,
|
||||
max_date: datetime,
|
||||
params_dict: dict[str, Any],
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
"""
|
||||
MaxDrawDownPerPairHyperOptLoss
|
||||
|
||||
This module defines the alternative HyperOptLoss class which can be used for
|
||||
Hyperoptimization.
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
class MaxDrawDownPerPairHyperOptLoss(IHyperOptLoss):
|
||||
"""
|
||||
Defines the loss function for hyperopt.
|
||||
|
||||
This implementation calculates the profit/drawdown ratio per pair and
|
||||
returns the worst result as objective, forcing hyperopt to optimize
|
||||
the parameters for all pairs in the pairlist.
|
||||
|
||||
This way, we prevent one or more pairs with good results from inflating
|
||||
the metrics, while the rest of the pairs with poor results are not
|
||||
represented and therefore not optimized.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def hyperopt_loss_function(backtest_stats: dict[str, Any], *args, **kwargs) -> float:
|
||||
"""
|
||||
Objective function, returns smaller number for better results.
|
||||
"""
|
||||
|
||||
##############################################
|
||||
# Configurable parameters
|
||||
##############################################
|
||||
# Minimum acceptable profit/drawdown per pair
|
||||
min_acceptable_profit_dd = 1.0
|
||||
# Penalty when acceptable minimum are not met
|
||||
penalty = 20
|
||||
##############################################
|
||||
|
||||
score_per_pair = []
|
||||
for p in backtest_stats["results_per_pair"]:
|
||||
if p["key"] != "TOTAL":
|
||||
profit = p.get("profit_total_abs", 0)
|
||||
drawdown = p.get("max_drawdown_abs", 0)
|
||||
|
||||
if drawdown != 0 and profit != 0:
|
||||
profit_dd = profit / drawdown
|
||||
else:
|
||||
profit_dd = profit
|
||||
|
||||
if profit_dd < min_acceptable_profit_dd:
|
||||
score = profit_dd - penalty
|
||||
else:
|
||||
score = profit_dd
|
||||
|
||||
score_per_pair.append(score)
|
||||
|
||||
return -min(score_per_pair)
|
||||
@@ -102,7 +102,9 @@ def text_table_tags(
|
||||
[
|
||||
*(
|
||||
(
|
||||
(t["key"] if isinstance(t["key"], list) else [t["key"], ""])
|
||||
list(t["key"])
|
||||
if isinstance(t["key"], list | tuple)
|
||||
else [t["key"], ""]
|
||||
if is_list
|
||||
else [t["key"]]
|
||||
)
|
||||
|
||||
@@ -18,7 +18,11 @@ from freqtrade.data.metrics import (
|
||||
calculate_sortino,
|
||||
calculate_sqn,
|
||||
)
|
||||
from freqtrade.ft_types import BacktestResultType, get_BacktestResultType_default
|
||||
from freqtrade.ft_types import (
|
||||
BacktestContentType,
|
||||
BacktestResultType,
|
||||
get_BacktestResultType_default,
|
||||
)
|
||||
from freqtrade.util import decimals_per_coin, fmt_coin, get_dry_run_wallet
|
||||
|
||||
|
||||
@@ -26,7 +30,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_trade_signal_candles(
|
||||
preprocessed_df: dict[str, DataFrame], bt_results: dict[str, Any], date_col: str
|
||||
preprocessed_df: dict[str, DataFrame], bt_results: BacktestContentType, date_col: str
|
||||
) -> dict[str, DataFrame]:
|
||||
signal_candles_only = {}
|
||||
for pair in preprocessed_df.keys():
|
||||
@@ -70,7 +74,11 @@ def generate_rejected_signals(
|
||||
|
||||
|
||||
def _generate_result_line(
|
||||
result: DataFrame, starting_balance: float, first_column: str | list[str]
|
||||
result: DataFrame,
|
||||
min_date: datetime,
|
||||
max_date: datetime,
|
||||
starting_balance: float,
|
||||
first_column: str | list[str],
|
||||
) -> dict:
|
||||
"""
|
||||
Generate one result dict, with "first_column" as key.
|
||||
@@ -78,6 +86,20 @@ def _generate_result_line(
|
||||
profit_sum = result["profit_ratio"].sum()
|
||||
# (end-capital - starting capital) / starting capital
|
||||
profit_total = result["profit_abs"].sum() / starting_balance
|
||||
backtest_days = (max_date - min_date).days or 1
|
||||
final_balance = starting_balance + result["profit_abs"].sum()
|
||||
expectancy, expectancy_ratio = calculate_expectancy(result)
|
||||
winning_profit = result.loc[result["profit_abs"] > 0, "profit_abs"].sum()
|
||||
losing_profit = result.loc[result["profit_abs"] < 0, "profit_abs"].sum()
|
||||
profit_factor = winning_profit / abs(losing_profit) if losing_profit else 0.0
|
||||
|
||||
try:
|
||||
drawdown = calculate_max_drawdown(
|
||||
result, value_col="profit_abs", starting_balance=starting_balance
|
||||
)
|
||||
|
||||
except ValueError:
|
||||
drawdown = None
|
||||
|
||||
return {
|
||||
"key": first_column,
|
||||
@@ -106,6 +128,16 @@ def _generate_result_line(
|
||||
"draws": len(result[result["profit_abs"] == 0]),
|
||||
"losses": len(result[result["profit_abs"] < 0]),
|
||||
"winrate": len(result[result["profit_abs"] > 0]) / len(result) if len(result) else 0.0,
|
||||
"cagr": calculate_cagr(backtest_days, starting_balance, final_balance),
|
||||
"expectancy": expectancy,
|
||||
"expectancy_ratio": expectancy_ratio,
|
||||
"sortino": calculate_sortino(result, min_date, max_date, starting_balance),
|
||||
"sharpe": calculate_sharpe(result, min_date, max_date, starting_balance),
|
||||
"calmar": calculate_calmar(result, min_date, max_date, starting_balance),
|
||||
"sqn": calculate_sqn(result, starting_balance),
|
||||
"profit_factor": profit_factor,
|
||||
"max_drawdown_account": drawdown.relative_account_drawdown if drawdown else 0.0,
|
||||
"max_drawdown_abs": drawdown.drawdown_abs if drawdown else 0.0,
|
||||
}
|
||||
|
||||
|
||||
@@ -121,6 +153,8 @@ def generate_pair_metrics( #
|
||||
stake_currency: str,
|
||||
starting_balance: float,
|
||||
results: DataFrame,
|
||||
min_date: datetime,
|
||||
max_date: datetime,
|
||||
skip_nan: bool = False,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
@@ -140,13 +174,18 @@ def generate_pair_metrics( #
|
||||
if skip_nan and result["profit_abs"].isnull().all():
|
||||
continue
|
||||
|
||||
tabular_data.append(_generate_result_line(result, starting_balance, pair))
|
||||
tabular_data.append(
|
||||
_generate_result_line(result, min_date, max_date, starting_balance, pair)
|
||||
)
|
||||
|
||||
# Sort by total profit %:
|
||||
tabular_data = sorted(tabular_data, key=lambda k: k["profit_total_abs"], reverse=True)
|
||||
|
||||
# Append Total
|
||||
tabular_data.append(_generate_result_line(results, starting_balance, "TOTAL"))
|
||||
tabular_data.append(
|
||||
_generate_result_line(results, min_date, max_date, starting_balance, "TOTAL")
|
||||
)
|
||||
|
||||
return tabular_data
|
||||
|
||||
|
||||
@@ -154,6 +193,8 @@ def generate_tag_metrics(
|
||||
tag_type: Literal["enter_tag", "exit_reason"] | list[Literal["enter_tag", "exit_reason"]],
|
||||
starting_balance: float,
|
||||
results: DataFrame,
|
||||
min_date: datetime,
|
||||
max_date: datetime,
|
||||
skip_nan: bool = False,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
@@ -173,13 +214,17 @@ def generate_tag_metrics(
|
||||
if skip_nan and group["profit_abs"].isnull().all():
|
||||
continue
|
||||
|
||||
tabular_data.append(_generate_result_line(group, starting_balance, tags))
|
||||
tabular_data.append(
|
||||
_generate_result_line(group, min_date, max_date, starting_balance, tags)
|
||||
)
|
||||
|
||||
# Sort by total profit %:
|
||||
tabular_data = sorted(tabular_data, key=lambda k: k["profit_total_abs"], reverse=True)
|
||||
|
||||
# Append Total
|
||||
tabular_data.append(_generate_result_line(results, starting_balance, "TOTAL"))
|
||||
tabular_data.append(
|
||||
_generate_result_line(results, min_date, max_date, starting_balance, "TOTAL")
|
||||
)
|
||||
return tabular_data
|
||||
else:
|
||||
return []
|
||||
@@ -366,7 +411,7 @@ def generate_daily_stats(results: DataFrame) -> dict[str, Any]:
|
||||
def generate_strategy_stats(
|
||||
pairlist: list[str],
|
||||
strategy: str,
|
||||
content: dict[str, Any],
|
||||
content: BacktestContentType,
|
||||
min_date: datetime,
|
||||
max_date: datetime,
|
||||
market_change: float,
|
||||
@@ -395,19 +440,33 @@ def generate_strategy_stats(
|
||||
stake_currency=stake_currency,
|
||||
starting_balance=start_balance,
|
||||
results=results,
|
||||
min_date=min_date,
|
||||
max_date=max_date,
|
||||
skip_nan=False,
|
||||
)
|
||||
|
||||
enter_tag_stats = generate_tag_metrics(
|
||||
"enter_tag", starting_balance=start_balance, results=results, skip_nan=False
|
||||
"enter_tag",
|
||||
starting_balance=start_balance,
|
||||
results=results,
|
||||
min_date=min_date,
|
||||
max_date=max_date,
|
||||
skip_nan=False,
|
||||
)
|
||||
exit_reason_stats = generate_tag_metrics(
|
||||
"exit_reason", starting_balance=start_balance, results=results, skip_nan=False
|
||||
"exit_reason",
|
||||
starting_balance=start_balance,
|
||||
results=results,
|
||||
min_date=min_date,
|
||||
max_date=max_date,
|
||||
skip_nan=False,
|
||||
)
|
||||
mix_tag_stats = generate_tag_metrics(
|
||||
["enter_tag", "exit_reason"],
|
||||
starting_balance=start_balance,
|
||||
results=results,
|
||||
min_date=min_date,
|
||||
max_date=max_date,
|
||||
skip_nan=False,
|
||||
)
|
||||
left_open_results = generate_pair_metrics(
|
||||
@@ -415,6 +474,8 @@ def generate_strategy_stats(
|
||||
stake_currency=stake_currency,
|
||||
starting_balance=start_balance,
|
||||
results=results.loc[results["exit_reason"] == "force_exit"],
|
||||
min_date=min_date,
|
||||
max_date=max_date,
|
||||
skip_nan=True,
|
||||
)
|
||||
|
||||
@@ -575,7 +636,7 @@ def generate_strategy_stats(
|
||||
|
||||
def generate_backtest_stats(
|
||||
btdata: dict[str, DataFrame],
|
||||
all_results: dict[str, dict[str, DataFrame | dict]],
|
||||
all_results: dict[str, BacktestContentType],
|
||||
min_date: datetime,
|
||||
max_date: datetime,
|
||||
) -> BacktestResultType:
|
||||
@@ -588,7 +649,7 @@ def generate_backtest_stats(
|
||||
:return: Dictionary containing results per strategy and a strategy summary.
|
||||
"""
|
||||
result: BacktestResultType = get_BacktestResultType_default()
|
||||
market_change = calculate_market_change(btdata, "close")
|
||||
market_change = calculate_market_change(btdata, "close", min_date=min_date)
|
||||
metadata = {}
|
||||
pairlist = list(btdata.keys())
|
||||
for strategy, content in all_results.items():
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import ClassVar
|
||||
from typing import ClassVar, Literal
|
||||
|
||||
from sqlalchemy import String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
@@ -18,9 +18,11 @@ class ValueTypesEnum(str, Enum):
|
||||
INT = "int"
|
||||
|
||||
|
||||
class KeyStoreKeys(str, Enum):
|
||||
BOT_START_TIME = "bot_start_time"
|
||||
STARTUP_TIME = "startup_time"
|
||||
KeyStoreKeys = Literal[
|
||||
"bot_start_time",
|
||||
"startup_time",
|
||||
"binance_migration",
|
||||
]
|
||||
|
||||
|
||||
class _KeyValueStoreModel(ModelBase):
|
||||
@@ -192,7 +194,7 @@ class KeyValueStore:
|
||||
return kv.int_value
|
||||
|
||||
|
||||
def set_startup_time():
|
||||
def set_startup_time() -> None:
|
||||
"""
|
||||
sets bot_start_time to the first trade open date - or "now" on new databases.
|
||||
sets startup_time to "now"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
|
||||
from sqlalchemy import inspect, select, text, update
|
||||
from sqlalchemy import Engine, inspect, select, text, update
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.persistence.trade_model import Order, Trade
|
||||
@@ -9,7 +9,7 @@ from freqtrade.persistence.trade_model import Order, Trade
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_table_names_for_table(inspector, tabletype) -> list[str]:
|
||||
def get_table_names_for_table(inspector, tabletype: str) -> list[str]:
|
||||
return [t for t in inspector.get_table_names() if t.startswith(tabletype)]
|
||||
|
||||
|
||||
@@ -350,7 +350,7 @@ def fix_wrong_max_stake_amount(engine):
|
||||
connection.execute(stmt)
|
||||
|
||||
|
||||
def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
def check_migrate(engine: Engine, decl_base, previous_tables: list[str]) -> None:
|
||||
"""
|
||||
Checks if migration is necessary and migrates if necessary
|
||||
"""
|
||||
|
||||
@@ -1155,18 +1155,21 @@ class LocalTrade:
|
||||
profit_ratio = 0.0
|
||||
|
||||
total_profit_abs = profit_abs + self.realized_profit
|
||||
total_profit_ratio = (
|
||||
(total_profit_abs / self.max_stake_amount) * self.leverage
|
||||
if self.max_stake_amount
|
||||
else 0.0
|
||||
)
|
||||
total_profit_ratio = float(f"{total_profit_ratio:.8f}")
|
||||
if self.max_stake_amount:
|
||||
max_stake = self.max_stake_amount * (
|
||||
(1 - self.fee_open) if self.is_short else (1 + self.fee_open)
|
||||
)
|
||||
total_profit_ratio = total_profit_abs / max_stake
|
||||
total_profit_ratio = float(f"{total_profit_ratio:.8f}")
|
||||
else:
|
||||
total_profit_ratio = 0.0
|
||||
profit_abs = float(f"{profit_abs:.8f}")
|
||||
total_profit_abs = float(f"{total_profit_abs:.8f}")
|
||||
|
||||
return ProfitStruct(
|
||||
profit_abs=profit_abs,
|
||||
profit_ratio=profit_ratio,
|
||||
total_profit=profit_abs + self.realized_profit,
|
||||
total_profit=total_profit_abs,
|
||||
total_profit_ratio=total_profit_ratio,
|
||||
)
|
||||
|
||||
|
||||
@@ -460,7 +460,7 @@ def generate_candlestick_graph(
|
||||
rows=rows,
|
||||
cols=1,
|
||||
shared_xaxes=True,
|
||||
row_width=row_widths + [1, 4],
|
||||
row_width=[*row_widths, 1, 4],
|
||||
vertical_spacing=0.0001,
|
||||
)
|
||||
fig["layout"].update(title=pair)
|
||||
|
||||
@@ -251,6 +251,7 @@ class IPairList(LoggingMixin, ABC):
|
||||
f"Pair {pair} is not compatible with exchange "
|
||||
f"{self._exchange.name}. Removing it from whitelist..",
|
||||
logger.warning,
|
||||
True,
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -258,6 +259,7 @@ class IPairList(LoggingMixin, ABC):
|
||||
self.log_once(
|
||||
f"Pair {pair} is not tradable with Freqtrade. Removing it from whitelist..",
|
||||
logger.warning,
|
||||
True,
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -266,13 +268,18 @@ class IPairList(LoggingMixin, ABC):
|
||||
f"Pair {pair} is not compatible with your stake currency "
|
||||
f"{self._config['stake_currency']}. Removing it from whitelist..",
|
||||
logger.warning,
|
||||
True,
|
||||
)
|
||||
continue
|
||||
|
||||
# Check if market is active
|
||||
market = markets[pair]
|
||||
if not market_is_active(market):
|
||||
self.log_once(f"Ignoring {pair} from whitelist. Market is not active.", logger.info)
|
||||
self.log_once(
|
||||
f"Ignoring {pair} from whitelist. Market is not active.",
|
||||
logger.info,
|
||||
True,
|
||||
)
|
||||
continue
|
||||
if pair not in sanitized_whitelist:
|
||||
sanitized_whitelist.append(pair)
|
||||
|
||||
@@ -96,7 +96,10 @@ def __run_backtest_bg(btconfig: Config):
|
||||
)
|
||||
|
||||
ApiBG.bt["bt"].results = generate_backtest_stats(
|
||||
ApiBG.bt["data"], ApiBG.bt["bt"].all_results, min_date=min_date, max_date=max_date
|
||||
ApiBG.bt["data"],
|
||||
ApiBG.bt["bt"].all_bt_content,
|
||||
min_date=min_date,
|
||||
max_date=max_date,
|
||||
)
|
||||
|
||||
if btconfig.get("export", "none") == "trades":
|
||||
|
||||
@@ -200,9 +200,12 @@ def status(rpc: RPC = Depends(get_rpc)):
|
||||
def trades(
|
||||
limit: int = Query(500, ge=1, description="Maximum number of different trades to return data"),
|
||||
offset: int = Query(0, ge=0, description="Number of trades to skip for pagination"),
|
||||
order_by_id: bool = Query(
|
||||
True, description="Sort trades by id (default: True). If False, sorts by latest timestamp"
|
||||
),
|
||||
rpc: RPC = Depends(get_rpc),
|
||||
):
|
||||
return rpc._rpc_trade_history(limit, offset=offset, order_by_id=True)
|
||||
return rpc._rpc_trade_history(limit, offset=offset, order_by_id=order_by_id)
|
||||
|
||||
|
||||
@router.get("/trade/{tradeid}", response_model=OpenTradeSchema, tags=["info", "trading"])
|
||||
@@ -369,10 +372,11 @@ def stop(rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_stop()
|
||||
|
||||
|
||||
@router.post("/pause", response_model=StatusMsg, tags=["botcontrol"])
|
||||
@router.post("/stopentry", response_model=StatusMsg, tags=["botcontrol"])
|
||||
@router.post("/stopbuy", response_model=StatusMsg, tags=["botcontrol"])
|
||||
def stop_buy(rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_stopentry()
|
||||
def pause(rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_pause()
|
||||
|
||||
|
||||
@router.post("/reload_config", response_model=StatusMsg, tags=["botcontrol"])
|
||||
|
||||
@@ -41,7 +41,7 @@ class UvicornServer(uvicorn.Server):
|
||||
but we need to create uvloop event loop manually
|
||||
"""
|
||||
try:
|
||||
import uvloop # noqa
|
||||
import uvloop
|
||||
except ImportError: # pragma: no cover
|
||||
asyncio_setup()
|
||||
else:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user