mirror of
https://github.com/freqtrade/freqtrade.git
synced 2025-12-01 09:33:05 +00:00
Compare commits
387 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
73363d925f | ||
|
|
ad56462fa1 | ||
|
|
a6bfe1a4af | ||
|
|
f960e343cc | ||
|
|
2072a625bd | ||
|
|
8792d56251 | ||
|
|
556db08b0a | ||
|
|
b651e13bd6 | ||
|
|
13ae0f81ca | ||
|
|
a534340cc1 | ||
|
|
067a7315f2 | ||
|
|
f405b7d1cd | ||
|
|
178a0a5db0 | ||
|
|
055c4396d1 | ||
|
|
9726c4ae21 | ||
|
|
7895eeb3b6 | ||
|
|
0a186eb8b7 | ||
|
|
79719bbe82 | ||
|
|
f2335c5db9 | ||
|
|
e100a06785 | ||
|
|
d606d6d08a | ||
|
|
01f09ca5c8 | ||
|
|
8ad0375073 | ||
|
|
e8cea35f5e | ||
|
|
95fe0d5d82 | ||
|
|
411322357c | ||
|
|
d047ac67a8 | ||
|
|
e072522ed1 | ||
|
|
e971f043f4 | ||
|
|
66422ce9a9 | ||
|
|
0a0105c31e | ||
|
|
a6ad36d08b | ||
|
|
7eb91c74a8 | ||
|
|
79b8363b7b | ||
|
|
ca9d792d2a | ||
|
|
fba028abd1 | ||
|
|
3f226c5022 | ||
|
|
afe3d3ebda | ||
|
|
01bb7706e4 | ||
|
|
772dbeaf7e | ||
|
|
0c026f950b | ||
|
|
f0e0957579 | ||
|
|
7e3e65ecd8 | ||
|
|
bef15b5238 | ||
|
|
127f61ccb9 | ||
|
|
bd49ad6420 | ||
|
|
46622f6172 | ||
|
|
5888da5bc5 | ||
|
|
bfe739da3c | ||
|
|
1036a890b8 | ||
|
|
3401e3506b | ||
|
|
e75d46321e | ||
|
|
26311663e3 | ||
|
|
bda17b59e7 | ||
|
|
1ec04a8dc8 | ||
|
|
78442e36e7 | ||
|
|
82565a9667 | ||
|
|
38e7b0e8ae | ||
|
|
e49ab2593c | ||
|
|
c433f49089 | ||
|
|
553094c3c6 | ||
|
|
06c4431c31 | ||
|
|
5b4d286637 | ||
|
|
bcc766a8ff | ||
|
|
666f2fc10a | ||
|
|
50cd36acbd | ||
|
|
dfd46a19b5 | ||
|
|
41d508867e | ||
|
|
f60d6c8f65 | ||
|
|
18e34632d8 | ||
|
|
526d7fad62 | ||
|
|
e90a68cc1a | ||
|
|
b81735e718 | ||
|
|
f1f272b88f | ||
|
|
90ae723a74 | ||
|
|
392598086c | ||
|
|
691b7d2628 | ||
|
|
cdb2a3aa90 | ||
|
|
0f24b8d132 | ||
|
|
e8a18c0524 | ||
|
|
16083b2f63 | ||
|
|
bbeba6ab3a | ||
|
|
710a953c43 | ||
|
|
9d5cb103dd | ||
|
|
674ec6acb2 | ||
|
|
6e6a93f0ce | ||
|
|
74641c5659 | ||
|
|
ea634e5cef | ||
|
|
f6a8cb4698 | ||
|
|
2854186b14 | ||
|
|
093fd48cf9 | ||
|
|
7f5c38fd5a | ||
|
|
82a9bdca5e | ||
|
|
b2a6722687 | ||
|
|
e07f3d266e | ||
|
|
6d3e3b5bfa | ||
|
|
72225daa6a | ||
|
|
720232a047 | ||
|
|
6e88cbfcab | ||
|
|
7d6d3d38f7 | ||
|
|
163d03ac62 | ||
|
|
b4c9541fb8 | ||
|
|
d0bda7c2ce | ||
|
|
d2a6781379 | ||
|
|
cc7d341afc | ||
|
|
3ea1cccda9 | ||
|
|
94c0f41564 | ||
|
|
b4ad6122cf | ||
|
|
0d9775918f | ||
|
|
150b7f85de | ||
|
|
40898f522a | ||
|
|
d6ff8ebbc0 | ||
|
|
65105f7768 | ||
|
|
82ff4d5879 | ||
|
|
b1fe5b6d8a | ||
|
|
6dc4fa9dbf | ||
|
|
81d1a662a2 | ||
|
|
6b7935f1ae | ||
|
|
38ae3ac03c | ||
|
|
3059d66ece | ||
|
|
7543b2681d | ||
|
|
8e5097d9a9 | ||
|
|
21dcb4a6a1 | ||
|
|
35253f9c8d | ||
|
|
6219a25901 | ||
|
|
669076a29f | ||
|
|
abd096dcd8 | ||
|
|
4a097bd644 | ||
|
|
64019e0e6c | ||
|
|
3621ba034b | ||
|
|
17d052df3f | ||
|
|
ebd516cadb | ||
|
|
b5548dbee0 | ||
|
|
3a64749678 | ||
|
|
ca94bbe994 | ||
|
|
85c150b68e | ||
|
|
02570e285d | ||
|
|
996fcb6f56 | ||
|
|
6941953a8b | ||
|
|
08c1866cdc | ||
|
|
d1e1b8410b | ||
|
|
cd986ced45 | ||
|
|
18a3489a6f | ||
|
|
a0a4230339 | ||
|
|
592e86a6e3 | ||
|
|
1dda952172 | ||
|
|
6df04d7e3e | ||
|
|
e54ee0b4b3 | ||
|
|
dafc968602 | ||
|
|
d4ddcbc784 | ||
|
|
8395192e95 | ||
|
|
76ff0689f0 | ||
|
|
28575a9ef3 | ||
|
|
80be86533e | ||
|
|
2725ee5772 | ||
|
|
405e0f02d4 | ||
|
|
9bb9f7453c | ||
|
|
409729f9d2 | ||
|
|
838d9cd4d0 | ||
|
|
3ed53218a0 | ||
|
|
c06f892109 | ||
|
|
ca2d322eb8 | ||
|
|
d07ec50549 | ||
|
|
600519c36a | ||
|
|
01c0fd0420 | ||
|
|
9a72003c74 | ||
|
|
45bb9ee9a0 | ||
|
|
60a62d933c | ||
|
|
4281642f54 | ||
|
|
5e0aa9108c | ||
|
|
dbcb07275a | ||
|
|
4aff24f6df | ||
|
|
3037cef4d2 | ||
|
|
fcc6d0fea8 | ||
|
|
c284da404e | ||
|
|
93a5f2906a | ||
|
|
91e1b068e8 | ||
|
|
ec23fec3e6 | ||
|
|
e958552bcc | ||
|
|
85c145c777 | ||
|
|
28bdbbffca | ||
|
|
e560f9963c | ||
|
|
4e02d31cdc | ||
|
|
0cf46d8605 | ||
|
|
08cff9890f | ||
|
|
2b5b518ccf | ||
|
|
ce6dd466fa | ||
|
|
f578453656 | ||
|
|
72015686b6 | ||
|
|
a4541fda2f | ||
|
|
14c27f2cbe | ||
|
|
c5201a6476 | ||
|
|
8000f94295 | ||
|
|
a68fbb7f0c | ||
|
|
3c34126e96 | ||
|
|
c3e7569820 | ||
|
|
7888f9265f | ||
|
|
e336d870a6 | ||
|
|
225ef6b8ca | ||
|
|
76459f08aa | ||
|
|
7dae93f6b5 | ||
|
|
048cad04a8 | ||
|
|
bae4973da5 | ||
|
|
1d0c0d6ab7 | ||
|
|
65a09b0d89 | ||
|
|
6c81807b76 | ||
|
|
b6fb1dc9e0 | ||
|
|
2a8c6a6d0e | ||
|
|
ca4fb98775 | ||
|
|
8d22fbf39c | ||
|
|
1cc0207b9b | ||
|
|
4f96d6c7f7 | ||
|
|
9c84d3549c | ||
|
|
f3e8dd896d | ||
|
|
6a5869fa43 | ||
|
|
018d10b346 | ||
|
|
5b188b5e1d | ||
|
|
33556f3c2c | ||
|
|
e37d29b607 | ||
|
|
7171dadc52 | ||
|
|
1c91675c58 | ||
|
|
31b400406e | ||
|
|
c78480c494 | ||
|
|
23d226d372 | ||
|
|
9b44d1d8cb | ||
|
|
ab6a5d75bc | ||
|
|
e1fdb8dec9 | ||
|
|
6d2f454d8c | ||
|
|
02dc895c41 | ||
|
|
80560a389c | ||
|
|
60b9d9448a | ||
|
|
518b6eb565 | ||
|
|
1010ce96fa | ||
|
|
204849aa85 | ||
|
|
4e94178169 | ||
|
|
0bd50a6e24 | ||
|
|
cb1f49e81c | ||
|
|
edc74ae2e4 | ||
|
|
c5f2a69d9c | ||
|
|
971a81e15d | ||
|
|
cc3b2000eb | ||
|
|
86db888386 | ||
|
|
7cceddb3df | ||
|
|
29f90cbd04 | ||
|
|
71b4e79874 | ||
|
|
1b608a162e | ||
|
|
f8cbf138ee | ||
|
|
acbb485302 | ||
|
|
98c2f81bb9 | ||
|
|
2cfe993951 | ||
|
|
6f0f4f06ef | ||
|
|
b690325f22 | ||
|
|
865ebc3143 | ||
|
|
3e6e534e76 | ||
|
|
cab38fb8c3 | ||
|
|
510863f939 | ||
|
|
bcd631d3a8 | ||
|
|
99c8be4c30 | ||
|
|
1b92f2522c | ||
|
|
00e77c7f3f | ||
|
|
1680728acb | ||
|
|
9ad60643f5 | ||
|
|
2ac7500cbf | ||
|
|
a2f7d93d13 | ||
|
|
6e052d9e0c | ||
|
|
a4aa20c1f2 | ||
|
|
ac6f7dca1f | ||
|
|
91ff6df512 | ||
|
|
154064602e | ||
|
|
426bc4c97b | ||
|
|
8c0ba2a69a | ||
|
|
d6ae63ac48 | ||
|
|
ec17b5523c | ||
|
|
b8c16fb889 | ||
|
|
c3f9b16c84 | ||
|
|
062376f573 | ||
|
|
21709204eb | ||
|
|
7ca3032d51 | ||
|
|
07bf19a990 | ||
|
|
a948796ef7 | ||
|
|
255ea88638 | ||
|
|
6bdb651573 | ||
|
|
ed8469f23a | ||
|
|
265a7123da | ||
|
|
ceb461a252 | ||
|
|
c1ae110080 | ||
|
|
30b4f27152 | ||
|
|
1176c16b93 | ||
|
|
093a093bd5 | ||
|
|
b8a1089592 | ||
|
|
fcb16098d8 | ||
|
|
7ed7ed4081 | ||
|
|
99da6f70c2 | ||
|
|
09d763b604 | ||
|
|
5e7868a28d | ||
|
|
66e43f2fe8 | ||
|
|
43103f51e5 | ||
|
|
b6040e270f | ||
|
|
f8cc2a6e74 | ||
|
|
75c84bfe65 | ||
|
|
bdd63aa1d6 | ||
|
|
5dee60921f | ||
|
|
46e616f997 | ||
|
|
82f191f7b0 | ||
|
|
cdfeae9f90 | ||
|
|
e988995d71 | ||
|
|
2384ba74a3 | ||
|
|
d1028b8ca2 | ||
|
|
b55105ec82 | ||
|
|
4a8c8f296a | ||
|
|
883f27d99e | ||
|
|
c0e9726f49 | ||
|
|
0021e2c205 | ||
|
|
ce2f4f89c4 | ||
|
|
5912d87b65 | ||
|
|
b1015172c7 | ||
|
|
bd7edfba97 | ||
|
|
e80ad309f1 | ||
|
|
817ad64402 | ||
|
|
67152ad48a | ||
|
|
e82d9e2f55 | ||
|
|
b972ee78ec | ||
|
|
2704f6e758 | ||
|
|
9dd5967275 | ||
|
|
6a313aa9e3 | ||
|
|
81de29a1e3 | ||
|
|
3677953d90 | ||
|
|
7af46628f8 | ||
|
|
88a2995b4c | ||
|
|
866ff55d84 | ||
|
|
91ba4f6424 | ||
|
|
31e2543134 | ||
|
|
eaf70428c1 | ||
|
|
38ca58c728 | ||
|
|
0bf73cc64b | ||
|
|
7ddaa09a23 | ||
|
|
6307e16304 | ||
|
|
c7fff45bef | ||
|
|
4bbb3174b2 | ||
|
|
c511d65d2e | ||
|
|
e8ca9ce39b | ||
|
|
f1af00dd39 | ||
|
|
ab062d7bb1 | ||
|
|
d5b21f2a32 | ||
|
|
9be7759e42 | ||
|
|
304f52ab79 | ||
|
|
6a6e3aacf3 | ||
|
|
1c81a21bb6 | ||
|
|
67b910835e | ||
|
|
8364a704d6 | ||
|
|
790c7e386a | ||
|
|
8dda28351e | ||
|
|
b7904b8e80 | ||
|
|
7fd70b82fa | ||
|
|
9699011cd9 | ||
|
|
83b22dedd5 | ||
|
|
c67e451fe1 | ||
|
|
8593094619 | ||
|
|
d49da76382 | ||
|
|
626c904103 | ||
|
|
2393a9fecf | ||
|
|
8f9f4b40cd | ||
|
|
4700782f60 | ||
|
|
008c2feff9 | ||
|
|
3b54e1e746 | ||
|
|
f755df2568 | ||
|
|
c420304b33 | ||
|
|
8494bea64f | ||
|
|
c8ba8106e6 | ||
|
|
ce9d9d7e60 | ||
|
|
365527508b | ||
|
|
9fdb8b07ac | ||
|
|
0009b987e4 | ||
|
|
24b6ce450b | ||
|
|
3ad8111d11 | ||
|
|
4c6074062c | ||
|
|
4f799cc9db | ||
|
|
c719860a16 | ||
|
|
f3dee5ec4f | ||
|
|
be169a23f4 | ||
|
|
1551510c6f | ||
|
|
9f1a7209d6 | ||
|
|
abda02572b | ||
|
|
de01aaf290 | ||
|
|
096e98a68c | ||
|
|
854bd9af2b | ||
|
|
10917a280a |
47
.github/workflows/binance-lev-tier-update.yml
vendored
Normal file
47
.github/workflows/binance-lev-tier-update.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Binance Leverage tiers update
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 3 * * 4"
|
||||
# on demand
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
auto-update:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: develop
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install ccxt
|
||||
run: pip install ccxt
|
||||
|
||||
- name: Run leverage tier update
|
||||
env:
|
||||
CI_WEB_PROXY: ${{ secrets.CI_WEB_PROXY }}
|
||||
FREQTRADE__EXCHANGE__KEY: ${{ secrets.BINANCE_EXCHANGE_KEY }}
|
||||
FREQTRADE__EXCHANGE__SECRET: ${{ secrets.BINANCE_EXCHANGE_SECRET }}
|
||||
run: python build_helpers/binance_update_lev_tiers.py
|
||||
|
||||
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.REPO_SCOPED_TOKEN }}
|
||||
add-paths: freqtrade/exchange/binance_leverage_tiers.json
|
||||
labels: |
|
||||
Tech maintenance
|
||||
Dependencies
|
||||
branch: update/binance-leverage-tiers
|
||||
title: Update Binance Leverage Tiers
|
||||
commit-message: "chore: update pre-commit hooks"
|
||||
committer: Freqtrade Bot <noreply@github.com>
|
||||
body: Update binance leverage tiers.
|
||||
delete-branch: true
|
||||
58
.github/workflows/ci.yml
vendored
58
.github/workflows/ci.yml
vendored
@@ -11,7 +11,7 @@ on:
|
||||
types: [published]
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 5 * * 4'
|
||||
- cron: '0 3 * * 4'
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ github.ref }}-${{ github.event_name }}"
|
||||
@@ -19,7 +19,7 @@ concurrency:
|
||||
permissions:
|
||||
repository-projects: read
|
||||
jobs:
|
||||
build_linux:
|
||||
build-linux:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
@@ -60,11 +60,16 @@ jobs:
|
||||
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
||||
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
||||
pip install -r requirements-dev.txt
|
||||
pip install -e ft_client/
|
||||
pip install -e .
|
||||
|
||||
- name: Check for version alignment
|
||||
run: |
|
||||
python build_helpers/freqtrade_client_version_align.py
|
||||
|
||||
- name: Tests
|
||||
run: |
|
||||
pytest --random-order --cov=freqtrade --cov-config=.coveragerc
|
||||
pytest --random-order --cov=freqtrade --cov=freqtrade_client --cov-config=.coveragerc
|
||||
|
||||
- name: Coveralls
|
||||
if: (runner.os == 'Linux' && matrix.python-version == '3.10' && matrix.os == 'ubuntu-22.04')
|
||||
@@ -124,8 +129,11 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ "macos-latest", "macos-13" ]
|
||||
os: [ "macos-latest", "macos-13", "macos-14" ]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
exclude:
|
||||
- os: "macos-14"
|
||||
python-version: "3.9"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -154,7 +162,7 @@ jobs:
|
||||
run: |
|
||||
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
|
||||
|
||||
- name: Installation - macOS
|
||||
- name: Installation - macOS (Brew)
|
||||
run: |
|
||||
# brew update
|
||||
# TODO: Should be the brew upgrade
|
||||
@@ -177,11 +185,15 @@ jobs:
|
||||
rm /usr/local/bin/python3.12-config || true
|
||||
|
||||
brew install hdf5 c-blosc libomp
|
||||
|
||||
- name: Installation (python)
|
||||
run: |
|
||||
python -m pip install --upgrade pip wheel
|
||||
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
||||
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
||||
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
||||
pip install -r requirements-dev.txt
|
||||
pip install -e ft_client/
|
||||
pip install -e .
|
||||
|
||||
- name: Tests
|
||||
@@ -356,7 +368,7 @@ jobs:
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
|
||||
|
||||
build_linux_online:
|
||||
build-linux-online:
|
||||
# Run pytest with "live" checks
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
@@ -392,29 +404,30 @@ jobs:
|
||||
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
||||
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
||||
pip install -r requirements-dev.txt
|
||||
pip install -e ft_client/
|
||||
pip install -e .
|
||||
|
||||
- name: Tests incl. ccxt compatibility tests
|
||||
env:
|
||||
CI_WEB_PROXY: http://152.67.78.211:13128
|
||||
run: |
|
||||
pytest --random-order --longrun --durations 20 -n auto --dist loadscope
|
||||
pytest --random-order --longrun --durations 20 -n auto
|
||||
|
||||
|
||||
# Notify only once - when CI completes (and after deploy) in case it's successfull
|
||||
notify-complete:
|
||||
needs: [
|
||||
build_linux,
|
||||
build-linux,
|
||||
build-macos,
|
||||
build-windows,
|
||||
docs-check,
|
||||
mypy-version-check,
|
||||
pre-commit,
|
||||
build_linux_online
|
||||
build-linux-online
|
||||
]
|
||||
runs-on: ubuntu-22.04
|
||||
# Discord notification can't handle schedule events
|
||||
if: (github.event_name != 'schedule')
|
||||
if: github.event_name != 'schedule' && github.repository == 'freqtrade/freqtrade'
|
||||
permissions:
|
||||
repository-projects: read
|
||||
steps:
|
||||
@@ -437,7 +450,7 @@ jobs:
|
||||
|
||||
build:
|
||||
name: "Build"
|
||||
needs: [ build_linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
|
||||
needs: [ build-linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
@@ -461,6 +474,19 @@ jobs:
|
||||
dist
|
||||
retention-days: 10
|
||||
|
||||
- name: Build Client distribution
|
||||
run: |
|
||||
pip install -U build
|
||||
python -m build --sdist --wheel ft_client
|
||||
|
||||
- name: Upload artifacts 📦
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: freqtrade-client-build
|
||||
path: |
|
||||
ft_client/dist
|
||||
retention-days: 10
|
||||
|
||||
deploy-pypi:
|
||||
name: "Deploy to PyPI"
|
||||
needs: [ build ]
|
||||
@@ -478,20 +504,22 @@ jobs:
|
||||
- name: Download artifact 📦
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: freqtrade-build
|
||||
pattern: freqtrade*-build
|
||||
path: dist
|
||||
merge-multiple: true
|
||||
|
||||
|
||||
- name: Publish to PyPI (Test)
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.11
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.14
|
||||
with:
|
||||
repository-url: https://test.pypi.org/legacy/
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.11
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.14
|
||||
|
||||
|
||||
deploy-docker:
|
||||
needs: [ build_linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
|
||||
needs: [ build-linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'
|
||||
|
||||
@@ -9,9 +9,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Docker Hub Description
|
||||
uses: peter-evans/dockerhub-description@v4
|
||||
env:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
DOCKERHUB_REPOSITORY: freqtradeorg/freqtrade
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
repository: freqtradeorg/freqtrade
|
||||
47
.github/workflows/pre-commit-update.yml
vendored
47
.github/workflows/pre-commit-update.yml
vendored
@@ -1,7 +1,6 @@
|
||||
name: Pre-commit auto-update
|
||||
|
||||
on:
|
||||
# every day at midnight
|
||||
schedule:
|
||||
- cron: "0 3 * * 2"
|
||||
# on demand
|
||||
@@ -14,32 +13,32 @@ jobs:
|
||||
auto-update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
|
||||
- name: Install pre-commit
|
||||
run: pip install pre-commit
|
||||
- name: Install pre-commit
|
||||
run: pip install pre-commit
|
||||
|
||||
- name: Run auto-update
|
||||
run: pre-commit autoupdate
|
||||
- name: Run auto-update
|
||||
run: pre-commit autoupdate
|
||||
|
||||
- name: Run pre-commit
|
||||
run: pre-commit run --all-files
|
||||
- name: Run pre-commit
|
||||
run: pre-commit run --all-files
|
||||
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.REPO_SCOPED_TOKEN }}
|
||||
add-paths: .pre-commit-config.yaml
|
||||
labels: |
|
||||
Tech maintenance
|
||||
Dependencies
|
||||
branch: update/pre-commit-hooks
|
||||
title: Update pre-commit hooks
|
||||
commit-message: "chore: update pre-commit hooks"
|
||||
committer: Freqtrade Bot <noreply@github.com>
|
||||
body: Update versions of pre-commit hooks to latest version.
|
||||
delete-branch: true
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.REPO_SCOPED_TOKEN }}
|
||||
add-paths: .pre-commit-config.yaml
|
||||
labels: |
|
||||
Tech maintenance
|
||||
Dependencies
|
||||
branch: update/pre-commit-hooks
|
||||
title: Update pre-commit hooks
|
||||
commit-message: "chore: update pre-commit hooks"
|
||||
committer: Freqtrade Bot <noreply@github.com>
|
||||
body: Update versions of pre-commit hooks to latest version.
|
||||
delete-branch: true
|
||||
|
||||
@@ -9,17 +9,17 @@ repos:
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: "v1.8.0"
|
||||
rev: "v1.9.0"
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: build_helpers
|
||||
additional_dependencies:
|
||||
- types-cachetools==5.3.0.7
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.31.0.20240218
|
||||
- types-requests==2.31.0.20240311
|
||||
- types-tabulate==0.9.0.20240106
|
||||
- types-python-dateutil==2.8.19.20240106
|
||||
- SQLAlchemy==2.0.27
|
||||
- types-python-dateutil==2.9.0.20240316
|
||||
- SQLAlchemy==2.0.29
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
@@ -31,7 +31,7 @@ repos:
|
||||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.2.2'
|
||||
rev: 'v0.3.4'
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ pytest tests/test_<file_name>.py::test_<method_name>
|
||||
#### Run Ruff
|
||||
|
||||
```bash
|
||||
ruff .
|
||||
ruff check .
|
||||
```
|
||||
|
||||
We receive a lot of code that fails the `ruff` checks.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11.8-slim-bookworm as base
|
||||
FROM python:3.12.2-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
26
build_helpers/binance_update_lev_tiers.py
Normal file
26
build_helpers/binance_update_lev_tiers.py
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import ccxt
|
||||
|
||||
|
||||
key = os.environ.get('FREQTRADE__EXCHANGE__KEY')
|
||||
secret = os.environ.get('FREQTRADE__EXCHANGE__SECRET')
|
||||
|
||||
proxy = os.environ.get('CI_WEB_PROXY')
|
||||
|
||||
exchange = ccxt.binance({
|
||||
'apiKey': key,
|
||||
'secret': secret,
|
||||
'httpsProxy': proxy,
|
||||
'options': {'defaultType': 'swap'}
|
||||
})
|
||||
_ = exchange.load_markets()
|
||||
|
||||
lev_tiers = exchange.fetch_leverage_tiers()
|
||||
|
||||
# Assumes this is running in the root of the repository.
|
||||
file = Path('freqtrade/exchange/binance_leverage_tiers.json')
|
||||
json.dump(dict(sorted(lev_tiers.items())), file.open('w'), indent=2)
|
||||
18
build_helpers/freqtrade_client_version_align.py
Executable file
18
build_helpers/freqtrade_client_version_align.py
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python3
|
||||
from freqtrade_client import __version__ as client_version
|
||||
|
||||
from freqtrade import __version__ as ft_version
|
||||
|
||||
|
||||
def main():
|
||||
if ft_version != client_version:
|
||||
print(f"Versions do not match: \n"
|
||||
f"ft: {ft_version} \n"
|
||||
f"client: {client_version}")
|
||||
exit(1)
|
||||
print(f"Versions match: ft: {ft_version}, client: {client_version}")
|
||||
exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Binary file not shown.
Binary file not shown.
@@ -109,12 +109,12 @@ automatically accessible by including them on the indicator-list, and these incl
|
||||
- **open_date :** trade open datetime
|
||||
- **close_date :** trade close datetime
|
||||
- **min_rate :** minimum price seen throughout the position
|
||||
- **max_rate :** maxiumum price seen throughout the position
|
||||
- **max_rate :** maximum price seen throughout the position
|
||||
- **open :** signal candle open price
|
||||
- **close :** signal candle close price
|
||||
- **high :** signal candle high price
|
||||
- **low :** signal candle low price
|
||||
- **volume :** signal candle volumne
|
||||
- **volume :** signal candle volume
|
||||
- **profit_ratio :** trade profit ratio
|
||||
- **profit_abs :** absolute profit return of the trade
|
||||
|
||||
|
||||
BIN
docs/assets/show-config-output.png
Normal file
BIN
docs/assets/show-config-output.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 29 KiB |
@@ -252,34 +252,34 @@ The most important in the backtesting is to understand the result.
|
||||
A backtesting result will look like that:
|
||||
|
||||
```
|
||||
========================================================= BACKTESTING REPORT =========================================================
|
||||
| Pair | Entries | Avg Profit % | Cum Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Wins Draws Loss Win% |
|
||||
|:---------|--------:|---------------:|---------------:|-----------------:|---------------:|:-------------|-------------------------:|
|
||||
| ADA/BTC | 35 | -0.11 | -3.88 | -0.00019428 | -1.94 | 4:35:00 | 14 0 21 40.0 |
|
||||
| ARK/BTC | 11 | -0.41 | -4.52 | -0.00022647 | -2.26 | 2:03:00 | 3 0 8 27.3 |
|
||||
| BTS/BTC | 32 | 0.31 | 9.78 | 0.00048938 | 4.89 | 5:05:00 | 18 0 14 56.2 |
|
||||
| DASH/BTC | 13 | -0.08 | -1.07 | -0.00005343 | -0.53 | 4:39:00 | 6 0 7 46.2 |
|
||||
| ENG/BTC | 18 | 1.36 | 24.54 | 0.00122807 | 12.27 | 2:50:00 | 8 0 10 44.4 |
|
||||
| EOS/BTC | 36 | 0.08 | 3.06 | 0.00015304 | 1.53 | 3:34:00 | 16 0 20 44.4 |
|
||||
| ETC/BTC | 26 | 0.37 | 9.51 | 0.00047576 | 4.75 | 6:14:00 | 11 0 15 42.3 |
|
||||
| ETH/BTC | 33 | 0.30 | 9.96 | 0.00049856 | 4.98 | 7:31:00 | 16 0 17 48.5 |
|
||||
| IOTA/BTC | 32 | 0.03 | 1.09 | 0.00005444 | 0.54 | 3:12:00 | 14 0 18 43.8 |
|
||||
| LSK/BTC | 15 | 1.75 | 26.26 | 0.00131413 | 13.13 | 2:58:00 | 6 0 9 40.0 |
|
||||
| LTC/BTC | 32 | -0.04 | -1.38 | -0.00006886 | -0.69 | 4:49:00 | 11 0 21 34.4 |
|
||||
| NANO/BTC | 17 | 1.26 | 21.39 | 0.00107058 | 10.70 | 1:55:00 | 10 0 7 58.5 |
|
||||
| NEO/BTC | 23 | 0.82 | 18.97 | 0.00094936 | 9.48 | 2:59:00 | 10 0 13 43.5 |
|
||||
| REQ/BTC | 9 | 1.17 | 10.54 | 0.00052734 | 5.27 | 3:47:00 | 4 0 5 44.4 |
|
||||
| XLM/BTC | 16 | 1.22 | 19.54 | 0.00097800 | 9.77 | 3:15:00 | 7 0 9 43.8 |
|
||||
| XMR/BTC | 23 | -0.18 | -4.13 | -0.00020696 | -2.07 | 5:30:00 | 12 0 11 52.2 |
|
||||
| XRP/BTC | 35 | 0.66 | 22.96 | 0.00114897 | 11.48 | 3:49:00 | 12 0 23 34.3 |
|
||||
| ZEC/BTC | 22 | -0.46 | -10.18 | -0.00050971 | -5.09 | 2:22:00 | 7 0 15 31.8 |
|
||||
| TOTAL | 429 | 0.36 | 152.41 | 0.00762792 | 76.20 | 4:12:00 | 186 0 243 43.4 |
|
||||
====================================================== LEFT OPEN TRADES REPORT ======================================================
|
||||
| Pair | Entries | Avg Profit % | Cum Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Win Draw Loss Win% |
|
||||
|:---------|---------:|---------------:|---------------:|-----------------:|---------------:|:---------------|--------------------:|
|
||||
| ADA/BTC | 1 | 0.89 | 0.89 | 0.00004434 | 0.44 | 6:00:00 | 1 0 0 100 |
|
||||
| LTC/BTC | 1 | 0.68 | 0.68 | 0.00003421 | 0.34 | 2:00:00 | 1 0 0 100 |
|
||||
| TOTAL | 2 | 0.78 | 1.57 | 0.00007855 | 0.78 | 4:00:00 | 2 0 0 100 |
|
||||
================================================ BACKTESTING REPORT =================================================
|
||||
| Pair | Entries | Avg Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Wins Draws Loss Win% |
|
||||
|:---------|--------:|---------------:|-----------------:|---------------:|:-------------|-------------------------:|
|
||||
| ADA/BTC | 35 | -0.11 | -0.00019428 | -1.94 | 4:35:00 | 14 0 21 40.0 |
|
||||
| ARK/BTC | 11 | -0.41 | -0.00022647 | -2.26 | 2:03:00 | 3 0 8 27.3 |
|
||||
| BTS/BTC | 32 | 0.31 | 0.00048938 | 4.89 | 5:05:00 | 18 0 14 56.2 |
|
||||
| DASH/BTC | 13 | -0.08 | -0.00005343 | -0.53 | 4:39:00 | 6 0 7 46.2 |
|
||||
| ENG/BTC | 18 | 1.36 | 0.00122807 | 12.27 | 2:50:00 | 8 0 10 44.4 |
|
||||
| EOS/BTC | 36 | 0.08 | 0.00015304 | 1.53 | 3:34:00 | 16 0 20 44.4 |
|
||||
| ETC/BTC | 26 | 0.37 | 0.00047576 | 4.75 | 6:14:00 | 11 0 15 42.3 |
|
||||
| ETH/BTC | 33 | 0.30 | 0.00049856 | 4.98 | 7:31:00 | 16 0 17 48.5 |
|
||||
| IOTA/BTC | 32 | 0.03 | 0.00005444 | 0.54 | 3:12:00 | 14 0 18 43.8 |
|
||||
| LSK/BTC | 15 | 1.75 | 0.00131413 | 13.13 | 2:58:00 | 6 0 9 40.0 |
|
||||
| LTC/BTC | 32 | -0.04 | -0.00006886 | -0.69 | 4:49:00 | 11 0 21 34.4 |
|
||||
| NANO/BTC | 17 | 1.26 | 0.00107058 | 10.70 | 1:55:00 | 10 0 7 58.5 |
|
||||
| NEO/BTC | 23 | 0.82 | 0.00094936 | 9.48 | 2:59:00 | 10 0 13 43.5 |
|
||||
| REQ/BTC | 9 | 1.17 | 0.00052734 | 5.27 | 3:47:00 | 4 0 5 44.4 |
|
||||
| XLM/BTC | 16 | 1.22 | 0.00097800 | 9.77 | 3:15:00 | 7 0 9 43.8 |
|
||||
| XMR/BTC | 23 | -0.18 | -0.00020696 | -2.07 | 5:30:00 | 12 0 11 52.2 |
|
||||
| XRP/BTC | 35 | 0.66 | 0.00114897 | 11.48 | 3:49:00 | 12 0 23 34.3 |
|
||||
| ZEC/BTC | 22 | -0.46 | -0.00050971 | -5.09 | 2:22:00 | 7 0 15 31.8 |
|
||||
| TOTAL | 429 | 0.36 | 0.00762792 | 76.20 | 4:12:00 | 186 0 243 43.4 |
|
||||
============================================= LEFT OPEN TRADES REPORT =============================================
|
||||
| Pair | Entries | Avg Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Win Draw Loss Win% |
|
||||
|:---------|---------:|---------------:|-----------------:|---------------:|:---------------|--------------------:|
|
||||
| ADA/BTC | 1 | 0.89 | 0.00004434 | 0.44 | 6:00:00 | 1 0 0 100 |
|
||||
| LTC/BTC | 1 | 0.68 | 0.00003421 | 0.34 | 2:00:00 | 1 0 0 100 |
|
||||
| TOTAL | 2 | 0.78 | 0.00007855 | 0.78 | 4:00:00 | 2 0 0 100 |
|
||||
==================== EXIT REASON STATS ====================
|
||||
| Exit Reason | Exits | Wins | Draws | Losses |
|
||||
|:-------------------|--------:|------:|-------:|--------:|
|
||||
@@ -358,7 +358,7 @@ here:
|
||||
The bot has made `429` trades for an average duration of `4:12:00`, with a performance of `76.20%` (profit), that means it has
|
||||
earned a total of `0.00762792 BTC` starting with a capital of 0.01 BTC.
|
||||
|
||||
The column `Avg Profit %` shows the average profit for all trades made while the column `Cum Profit %` sums up all the profits/losses.
|
||||
The column `Avg Profit %` shows the average profit for all trades made.
|
||||
The column `Tot Profit %` shows instead the total profit % in relation to the starting balance.
|
||||
In the above results, we have a starting balance of 0.01 BTC and an absolute profit of 0.00762792 BTC - so the `Tot Profit %` will be `(0.00762792 / 0.01) * 100 ~= 76.2%`.
|
||||
|
||||
@@ -464,7 +464,7 @@ It contains some useful key metrics about performance of your strategy on backte
|
||||
- `Profit factor`: profit / loss.
|
||||
- `Avg. stake amount`: Average stake amount, either `stake_amount` or the average when using dynamic stake amount.
|
||||
- `Total trade volume`: Volume generated on the exchange to reach the above profit.
|
||||
- `Best Pair` / `Worst Pair`: Best and worst performing pair, and it's corresponding `Cum Profit %`.
|
||||
- `Best Pair` / `Worst Pair`: Best and worst performing pair, and it's corresponding `Tot Profit %`.
|
||||
- `Best Trade` / `Worst Trade`: Biggest single winning trade and biggest single losing trade.
|
||||
- `Best day` / `Worst day`: Best and worst day based on daily profit.
|
||||
- `Days win/draw/lose`: Winning / Losing days (draws are usually days without closed trade).
|
||||
@@ -629,11 +629,11 @@ There will be an additional table comparing win/losses of the different strategi
|
||||
Detailed output for all strategies one after the other will be available, so make sure to scroll up to see the details per strategy.
|
||||
|
||||
```
|
||||
=========================================================== STRATEGY SUMMARY ===========================================================================
|
||||
| Strategy | Entries | Avg Profit % | Cum Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Wins | Draws | Losses | Drawdown % |
|
||||
|:------------|---------:|---------------:|---------------:|-----------------:|---------------:|:---------------|------:|-------:|-------:|-----------:|
|
||||
| Strategy1 | 429 | 0.36 | 152.41 | 0.00762792 | 76.20 | 4:12:00 | 186 | 0 | 243 | 45.2 |
|
||||
| Strategy2 | 1487 | -0.13 | -197.58 | -0.00988917 | -98.79 | 4:43:00 | 662 | 0 | 825 | 241.68 |
|
||||
================================================== STRATEGY SUMMARY ===================================================================
|
||||
| Strategy | Entries | Avg Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Wins | Draws | Losses | Drawdown % |
|
||||
|:------------|---------:|---------------:|-----------------:|---------------:|:---------------|------:|-------:|-------:|-----------:|
|
||||
| Strategy1 | 429 | 0.36 | 0.00762792 | 76.20 | 4:12:00 | 186 | 0 | 243 | 45.2 |
|
||||
| Strategy2 | 1487 | -0.13 | -0.00988917 | -98.79 | 4:43:00 | 662 | 0 | 825 | 241.68 |
|
||||
```
|
||||
|
||||
## Next step
|
||||
|
||||
@@ -33,7 +33,6 @@ For spot pairs, naming will be `base/quote` (e.g. `ETH/USDT`).
|
||||
|
||||
For futures pairs, naming will be `base/quote:settle` (e.g. `ETH/USDT:USDT`).
|
||||
|
||||
|
||||
## Bot execution logic
|
||||
|
||||
Starting freqtrade in dry-run or live mode (using `freqtrade trade`) will start the bot and start the bot iteration loop.
|
||||
@@ -50,10 +49,12 @@ By default, the bot loop runs every few seconds (`internals.process_throttle_sec
|
||||
* Call `populate_indicators()`
|
||||
* Call `populate_entry_trend()`
|
||||
* Call `populate_exit_trend()`
|
||||
* Check timeouts for open orders.
|
||||
* Calls `check_entry_timeout()` strategy callback for open entry orders.
|
||||
* Calls `check_exit_timeout()` strategy callback for open exit orders.
|
||||
* Calls `adjust_entry_price()` strategy callback for open entry orders.
|
||||
* Update trades open order state from exchange.
|
||||
* Call `order_filled()` strategy callback for filled orders.
|
||||
* Check timeouts for open orders.
|
||||
* Calls `check_entry_timeout()` strategy callback for open entry orders.
|
||||
* Calls `check_exit_timeout()` strategy callback for open exit orders.
|
||||
* Calls `adjust_entry_price()` strategy callback for open entry orders.
|
||||
* Verifies existing positions and eventually places exit orders.
|
||||
* Considers stoploss, ROI and exit-signal, `custom_exit()` and `custom_stoploss()`.
|
||||
* Determine exit-price based on `exit_pricing` configuration setting or by using the `custom_exit_price()` callback.
|
||||
@@ -86,8 +87,10 @@ This loop will be repeated again and again until the bot is stopped.
|
||||
* In Margin and Futures mode, `leverage()` strategy callback is called to determine the desired leverage.
|
||||
* Determine stake size by calling the `custom_stake_amount()` callback.
|
||||
* Check position adjustments for open trades if enabled and call `adjust_trade_position()` to determine if an additional order is requested.
|
||||
* Call `order_filled()` strategy callback for filled entry orders.
|
||||
* Call `custom_stoploss()` and `custom_exit()` to find custom exit points.
|
||||
* For exits based on exit-signal, custom-exit and partial exits: Call `custom_exit_price()` to determine exit price (Prices are moved to be within the closing candle).
|
||||
* Call `order_filled()` strategy callback for filled exit orders.
|
||||
* Generate backtest report output
|
||||
|
||||
!!! Note
|
||||
|
||||
@@ -49,6 +49,13 @@ FREQTRADE__EXCHANGE__SECRET=<yourExchangeSecret>
|
||||
!!! Note
|
||||
Environment variables detected are logged at startup - so if you can't find why a value is not what you think it should be based on the configuration, make sure it's not loaded from an environment variable.
|
||||
|
||||
!!! Tip "Validate combined result"
|
||||
You can use the [show-config subcommand](utils.md#show-config) to see the final, combined configuration.
|
||||
|
||||
??? Warning "Loading sequence"
|
||||
Environment variables are loaded after the initial configuration. As such, you cannot provide the path to the configuration through environment variables. Please use `--config path/to/config.json` for that.
|
||||
This also applies to user_dir to some degree. while the user directory can be set through environment variables - the configuration will **not** be loaded from that location.
|
||||
|
||||
### Multiple configuration files
|
||||
|
||||
Multiple configuration files can be specified and used by the bot or the bot can read its configuration parameters from the process standard input stream.
|
||||
@@ -56,6 +63,9 @@ Multiple configuration files can be specified and used by the bot or the bot can
|
||||
You can specify additional configuration files in `add_config_files`. Files specified in this parameter will be loaded and merged with the initial config file. The files are resolved relative to the initial configuration file.
|
||||
This is similar to using multiple `--config` parameters, but simpler in usage as you don't have to specify all files for all commands.
|
||||
|
||||
!!! Tip "Validate combined result"
|
||||
You can use the [show-config subcommand](utils.md#show-config) to see the final, combined configuration.
|
||||
|
||||
!!! Tip "Use multiple configuration files to keep secrets secret"
|
||||
You can use a 2nd configuration file containing your secrets. That way you can share your "primary" configuration file, while still keeping your API keys for yourself.
|
||||
The 2nd file should only specify what you intend to override.
|
||||
@@ -326,6 +336,8 @@ You'd set `available_capital=5000` - granting each bot an initial capital of 500
|
||||
The bot will then split this starting balance equally into `max_open_trades` buckets.
|
||||
Profitable trades will result in increased stake-sizes for this bot - without affecting the stake-sizes of the other bot.
|
||||
|
||||
Adjusting `available_capital` requires reloading the configuration to take effect. Adjusting the `available_capital` adds the difference between the previous `available_capital` and the new `available_capital`. Decreasing the available capital when trades are open doesn't exit the trades. The difference is returned to the wallet when the trades conclude. The outcome of this differs depending on the price movement between the adjustment and exiting the trades.
|
||||
|
||||
!!! Warning "Incompatible with `tradable_balance_ratio`"
|
||||
Setting this option will replace any configuration of `tradable_balance_ratio`.
|
||||
|
||||
@@ -503,13 +515,13 @@ Configuration:
|
||||
Please carefully read the section [Market order pricing](#market-order-pricing) section when using market orders.
|
||||
|
||||
!!! Note "Stoploss on exchange"
|
||||
`stoploss_on_exchange_interval` is not mandatory. Do not change its value if you are
|
||||
`order_types.stoploss_on_exchange_interval` is not mandatory. Do not change its value if you are
|
||||
unsure of what you are doing. For more information about how stoploss works please
|
||||
refer to [the stoploss documentation](stoploss.md).
|
||||
|
||||
If `stoploss_on_exchange` is enabled and the stoploss is cancelled manually on the exchange, then the bot will create a new stoploss order.
|
||||
If `order_types.stoploss_on_exchange` is enabled and the stoploss is cancelled manually on the exchange, then the bot will create a new stoploss order.
|
||||
|
||||
!!! Warning "Warning: stoploss_on_exchange failures"
|
||||
!!! Warning "Warning: order_types.stoploss_on_exchange failures"
|
||||
If stoploss on exchange creation fails for some reason, then an "emergency exit" is initiated. By default, this will exit the trade using a market order. The order-type for the emergency-exit can be changed by setting the `emergency_exit` value in the `order_types` dictionary - however, this is not advised.
|
||||
|
||||
### Understand order_time_in_force
|
||||
|
||||
@@ -129,6 +129,8 @@ Below is an outline of exception inheritance hierarchy:
|
||||
+ FreqtradeException
|
||||
|
|
||||
+---+ OperationalException
|
||||
| |
|
||||
| +---+ ConfigurationError
|
||||
|
|
||||
+---+ DependencyException
|
||||
| |
|
||||
@@ -376,7 +378,7 @@ from pathlib import Path
|
||||
|
||||
exchange = ccxt.binance({
|
||||
'apiKey': '<apikey>',
|
||||
'secret': '<secret>'
|
||||
'secret': '<secret>',
|
||||
'options': {'defaultType': 'swap'}
|
||||
})
|
||||
_ = exchange.load_markets()
|
||||
|
||||
@@ -68,6 +68,8 @@ Binance supports [time_in_force](configuration.md#understand-order_time_in_force
|
||||
For Binance, it is suggested to add `"BNB/<STAKE>"` to your blacklist to avoid issues, unless you are willing to maintain enough extra `BNB` on the account or unless you're willing to disable using `BNB` for fees.
|
||||
Binance accounts may use `BNB` for fees, and if a trade happens to be on `BNB`, further trades may consume this position and make the initial BNB trade unsellable as the expected amount is not there anymore.
|
||||
|
||||
If not enough `BNB` is available to cover transaction fees, then fees will not be covered by `BNB` and no fee reduction will occur. Freqtrade will never buy BNB to cover for fees. BNB needs to be bought and monitored manually to this end.
|
||||
|
||||
### Binance sites
|
||||
|
||||
Binance has been split into 2, and users must use the correct ccxt exchange ID for their exchange, otherwise API keys are not recognized.
|
||||
|
||||
@@ -32,6 +32,9 @@ FreqAI is configured through the typical [Freqtrade config file](configuration.m
|
||||
|
||||
A full example config is available in `config_examples/config_freqai.example.json`.
|
||||
|
||||
!!! Note
|
||||
The `identifier` is commonly overlooked by newcomers, however, this value plays an important role in your configuration. This value is a unique ID that you choose to describe one of your runs. Keeping it the same allows you to maintain crash resilience as well as faster backtesting. As soon as you want to try a new run (new features, new model, etc.), you should change this value (or delete the `user_data/models/unique-id` folder. More details available in the [parameter table](freqai-parameter-table.md#feature-parameters).
|
||||
|
||||
## Building a FreqAI strategy
|
||||
|
||||
The FreqAI strategy requires including the following lines of code in the standard [Freqtrade strategy](strategy-customization.md):
|
||||
|
||||
@@ -75,7 +75,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
||||
| `rl_config` | A dictionary containing the control parameters for a Reinforcement Learning model. <br> **Datatype:** Dictionary.
|
||||
| `train_cycles` | Training time steps will be set based on the `train_cycles * number of training data points. <br> **Datatype:** Integer.
|
||||
| `max_trade_duration_candles`| Guides the agent training to keep trades below desired length. Example usage shown in `prediction_models/ReinforcementLearner.py` within the customizable `calculate_reward()` function. <br> **Datatype:** int.
|
||||
| `model_type` | Model string from stable_baselines3 or SBcontrib. Available strings include: `'TRPO', 'ARS', 'RecurrentPPO', 'MaskablePPO', 'PPO', 'A2C', 'DQN'`. User should ensure that `model_training_parameters` match those available to the corresponding stable_baselines3 model by visiting their documentaiton. [PPO doc](https://stable-baselines3.readthedocs.io/en/master/modules/ppo.html) (external website) <br> **Datatype:** string.
|
||||
| `model_type` | Model string from stable_baselines3 or SBcontrib. Available strings include: `'TRPO', 'ARS', 'RecurrentPPO', 'MaskablePPO', 'PPO', 'A2C', 'DQN'`. User should ensure that `model_training_parameters` match those available to the corresponding stable_baselines3 model by visiting their documentation. [PPO doc](https://stable-baselines3.readthedocs.io/en/master/modules/ppo.html) (external website) <br> **Datatype:** string.
|
||||
| `policy_type` | One of the available policy types from stable_baselines3 <br> **Datatype:** string.
|
||||
| `max_training_drawdown_pct` | The maximum drawdown that the agent is allowed to experience during training. <br> **Datatype:** float. <br> Default: 0.8
|
||||
| `cpu_count` | Number of threads/cpus to dedicate to the Reinforcement Learning training process (depending on if `ReinforcementLearning_multiproc` is selected or not). Recommended to leave this untouched, by default, this value is set to the total number of physical cores minus 1. <br> **Datatype:** int.
|
||||
|
||||
@@ -142,7 +142,7 @@ Parameter details can be found [here](freqai-parameter-table.md), but in general
|
||||
As you begin to modify the strategy and the prediction model, you will quickly realize some important differences between the Reinforcement Learner and the Regressors/Classifiers. Firstly, the strategy does not set a target value (no labels!). Instead, you set the `calculate_reward()` function inside the `MyRLEnv` class (see below). A default `calculate_reward()` is provided inside `prediction_models/ReinforcementLearner.py` to demonstrate the necessary building blocks for creating rewards, but this is *not* designed for production. Users *must* create their own custom reinforcement learning model class or use a pre-built one from outside the Freqtrade source code and save it to `user_data/freqaimodels`. It is inside the `calculate_reward()` where creative theories about the market can be expressed. For example, you can reward your agent when it makes a winning trade, and penalize the agent when it makes a losing trade. Or perhaps, you wish to reward the agent for entering trades, and penalize the agent for sitting in trades too long. Below we show examples of how these rewards are all calculated:
|
||||
|
||||
!!! note "Hint"
|
||||
The best reward functions are ones that are continuously differentiable, and well scaled. In other words, adding a single large negative penalty to a rare event is not a good idea, and the neural net will not be able to learn that function. Instead, it is better to add a small negative penalty to a common event. This will help the agent learn faster. Not only this, but you can help improve the continuity of your rewards/penalties by having them scale with severity according to some linear/exponential functions. In other words, you'd slowly scale the penalty as the duration of the trade increases. This is better than a single large penalty occuring at a single point in time.
|
||||
The best reward functions are ones that are continuously differentiable, and well scaled. In other words, adding a single large negative penalty to a rare event is not a good idea, and the neural net will not be able to learn that function. Instead, it is better to add a small negative penalty to a common event. This will help the agent learn faster. Not only this, but you can help improve the continuity of your rewards/penalties by having them scale with severity according to some linear/exponential functions. In other words, you'd slowly scale the penalty as the duration of the trade increases. This is better than a single large penalty occurring at a single point in time.
|
||||
|
||||
```python
|
||||
from freqtrade.freqai.prediction_models.ReinforcementLearner import ReinforcementLearner
|
||||
|
||||
@@ -81,12 +81,14 @@ Filtering instances (not the first position in the list) will not apply any cach
|
||||
"number_assets": 20,
|
||||
"sort_key": "quoteVolume",
|
||||
"min_value": 0,
|
||||
"max_value": 8000000,
|
||||
"refresh_period": 1800
|
||||
}
|
||||
],
|
||||
```
|
||||
|
||||
You can define a minimum volume with `min_value` - which will filter out pairs with a volume lower than the specified value in the specified timerange.
|
||||
In addition to that, you can also define a maximum volume with `max_value` - which will filter out pairs with a volume higher than the specified value in the specified timerange.
|
||||
|
||||
##### VolumePairList Advanced mode
|
||||
|
||||
@@ -369,6 +371,11 @@ As this Filter uses past performance of the bot, it'll have some startup-period
|
||||
|
||||
Filters low-value coins which would not allow setting stoplosses.
|
||||
|
||||
Namely, pairs are blacklisted if a variance of one percent or more in the stop price would be caused by precision rounding on the exchange, i.e. `rounded(stop_price) <= rounded(stop_price * 0.99)`. The idea is to avoid coins with a value VERY close to their lower trading boundary, not allowing setting of proper stoploss.
|
||||
|
||||
!!! Tip "PerformanceFilter is pointless for futures trading"
|
||||
The above does not apply to shorts. And for longs, in theory the trade will be liquidated first.
|
||||
|
||||
!!! Warning "Backtesting"
|
||||
`PrecisionFilter` does not support backtesting mode using multiple strategies.
|
||||
|
||||
@@ -450,6 +457,8 @@ If the trading range over the last 10 days is <1% or >99%, remove the pair from
|
||||
]
|
||||
```
|
||||
|
||||
Adding `"sort_direction": "asc"` or `"sort_direction": "desc"` enables sorting for this pairlist.
|
||||
|
||||
!!! Tip
|
||||
This Filter can be used to automatically remove stable coin pairs, which have a very low trading range, and are therefore extremely difficult to trade with profit.
|
||||
Additionally, it can also be used to automatically remove pairs with extreme high/low variance over a given amount of time.
|
||||
@@ -460,7 +469,7 @@ Volatility is the degree of historical variation of a pairs over time, it is mea
|
||||
|
||||
This filter removes pairs if the average volatility over a `lookback_days` days is below `min_volatility` or above `max_volatility`. Since this is a filter that requires additional data, the results are cached for `refresh_period`.
|
||||
|
||||
This filter can be used to narrow down your pairs to a certain volatility or avoid very volatile pairs.
|
||||
This filter can be used to narrow down your pairs to a certain volatility or avoid very volatile pairs.
|
||||
|
||||
In the below example:
|
||||
If the volatility over the last 10 days is not in the range of 0.05-0.50, remove the pair from the whitelist. The filter is applied every 24h.
|
||||
@@ -477,6 +486,8 @@ If the volatility over the last 10 days is not in the range of 0.05-0.50, remove
|
||||
]
|
||||
```
|
||||
|
||||
Adding `"sort_direction": "asc"` or `"sort_direction": "desc"` enables sorting mode for this pairlist.
|
||||
|
||||
### Full example of Pairlist Handlers
|
||||
|
||||
The below example blacklists `BNB/BTC`, uses `VolumePairList` with `20` assets, sorting pairs by `quoteVolume` and applies [`PrecisionFilter`](#precisionfilter) and [`PriceFilter`](#pricefilter), filtering all assets where 1 price unit is > 1%. Then the [`SpreadFilter`](#spreadfilter) and [`VolatilityFilter`](#volatilityfilter) is applied and pairs are finally shuffled with the random seed set to some predefined value.
|
||||
|
||||
@@ -23,6 +23,7 @@ It also supports the lookahead-analysis of freqai strategies.
|
||||
- `--max-open-trades` is forced to be at least equal to the number of pairs.
|
||||
- `--dry-run-wallet` is forced to be basically infinite (1 billion).
|
||||
- `--stake-amount` is forced to be a static 10000 (10k).
|
||||
- `--enable-protections` is forced to be off.
|
||||
|
||||
Those are set to avoid users accidentally generating false positives.
|
||||
|
||||
@@ -40,7 +41,6 @@ usage: freqtrade lookahead-analysis [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[--max-open-trades INT]
|
||||
[--stake-amount STAKE_AMOUNT]
|
||||
[--fee FLOAT] [-p PAIRS [PAIRS ...]]
|
||||
[--enable-protections]
|
||||
[--dry-run-wallet DRY_RUN_WALLET]
|
||||
[--timeframe-detail TIMEFRAME_DETAIL]
|
||||
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
markdown==3.5.2
|
||||
markdown==3.6
|
||||
mkdocs==1.5.3
|
||||
mkdocs-material==9.5.11
|
||||
mkdocs-material==9.5.15
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.7
|
||||
pymdown-extensions==10.7.1
|
||||
jinja2==3.1.3
|
||||
|
||||
@@ -95,11 +95,13 @@ Make sure that the following 2 lines are available in your docker-compose file:
|
||||
|
||||
### Consuming the API
|
||||
|
||||
You can consume the API by using the script `scripts/rest_client.py`.
|
||||
The client script only requires the `requests` module, so Freqtrade does not need to be installed on the system.
|
||||
You can consume the API by using `freqtrade-client` (also available as `scripts/rest_client.py`).
|
||||
This command can be installed independent of the bot by using `pip install freqtrade-client`.
|
||||
|
||||
This module is designed to be lightweight, and only depends on the `requests` and `python-rapidjson` modules, skipping all heavy dependencies freqtrade otherwise needs.
|
||||
|
||||
``` bash
|
||||
python3 scripts/rest_client.py <command> [optional parameters]
|
||||
freqtrade-client <command> [optional parameters]
|
||||
```
|
||||
|
||||
By default, the script assumes `127.0.0.1` (localhost) and port `8080` to be used, however you can specify a configuration file to override this behaviour.
|
||||
@@ -120,9 +122,27 @@ By default, the script assumes `127.0.0.1` (localhost) and port `8080` to be use
|
||||
```
|
||||
|
||||
``` bash
|
||||
python3 scripts/rest_client.py --config rest_config.json <command> [optional parameters]
|
||||
freqtrade-client --config rest_config.json <command> [optional parameters]
|
||||
```
|
||||
|
||||
??? Note "Programmatic use"
|
||||
The `freqtrade-client` package (installable independent of freqtrade) can be used in your own scripts to interact with the freqtrade API.
|
||||
to do so, please use the following:
|
||||
|
||||
``` python
|
||||
from freqtrade_client import FtRestClient
|
||||
|
||||
|
||||
client = FtRestClient(server_url, username, password)
|
||||
|
||||
# Get the status of the bot
|
||||
ping = client.ping()
|
||||
print(ping)
|
||||
# ...
|
||||
```
|
||||
|
||||
For a full list of available commands, please refer to the list below.
|
||||
|
||||
### Available endpoints
|
||||
|
||||
| Command | Description |
|
||||
@@ -176,7 +196,7 @@ python3 scripts/rest_client.py --config rest_config.json <command> [optional par
|
||||
Possible commands can be listed from the rest-client script using the `help` command.
|
||||
|
||||
``` bash
|
||||
python3 scripts/rest_client.py help
|
||||
freqtrade-client help
|
||||
```
|
||||
|
||||
``` output
|
||||
|
||||
@@ -11,34 +11,129 @@ The call sequence of the methods described here is covered under [bot execution
|
||||
!!! Tip
|
||||
Start off with a strategy template containing all available callback methods by running `freqtrade new-strategy --strategy MyAwesomeStrategy --template advanced`
|
||||
|
||||
## Storing information
|
||||
## Storing information (Persistent)
|
||||
|
||||
Storing information can be accomplished by creating a new dictionary within the strategy class.
|
||||
Freqtrade allows storing/retrieving user custom information associated with a specific trade in the database.
|
||||
|
||||
The name of the variable can be chosen at will, but should be prefixed with `custom_` to avoid naming collisions with predefined strategy variables.
|
||||
Using a trade object, information can be stored using `trade.set_custom_data(key='my_key', value=my_value)` and retrieved using `trade.get_custom_data(key='my_key')`. Each data entry is associated with a trade and a user supplied key (of type `string`). This means that this can only be used in callbacks that also provide a trade object.
|
||||
|
||||
For the data to be able to be stored within the database, freqtrade must serialized the data. This is done by converting the data to a JSON formatted string.
|
||||
Freqtrade will attempt to reverse this action on retrieval, so from a strategy perspective, this should not be relevant.
|
||||
|
||||
```python
|
||||
from freqtrade.persistence import Trade
|
||||
from datetime import timedelta
|
||||
|
||||
class AwesomeStrategy(IStrategy):
|
||||
# Create custom dictionary
|
||||
custom_info = {}
|
||||
|
||||
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
# Check if the entry already exists
|
||||
if not metadata["pair"] in self.custom_info:
|
||||
# Create empty entry for this pair
|
||||
self.custom_info[metadata["pair"]] = {}
|
||||
def bot_loop_start(self, **kwargs) -> None:
|
||||
for trade in Trade.get_open_order_trades():
|
||||
fills = trade.select_filled_orders(trade.entry_side)
|
||||
if trade.pair == 'ETH/USDT':
|
||||
trade_entry_type = trade.get_custom_data(key='entry_type')
|
||||
if trade_entry_type is None:
|
||||
trade_entry_type = 'breakout' if 'entry_1' in trade.enter_tag else 'dip'
|
||||
elif fills > 1:
|
||||
trade_entry_type = 'buy_up'
|
||||
trade.set_custom_data(key='entry_type', value=trade_entry_type)
|
||||
return super().bot_loop_start(**kwargs)
|
||||
|
||||
if "crosstime" in self.custom_info[metadata["pair"]]:
|
||||
self.custom_info[metadata["pair"]]["crosstime"] += 1
|
||||
else:
|
||||
self.custom_info[metadata["pair"]]["crosstime"] = 1
|
||||
def adjust_entry_price(self, trade: Trade, order: Optional[Order], pair: str,
|
||||
current_time: datetime, proposed_rate: float, current_order_rate: float,
|
||||
entry_tag: Optional[str], side: str, **kwargs) -> float:
|
||||
# Limit orders to use and follow SMA200 as price target for the first 10 minutes since entry trigger for BTC/USDT pair.
|
||||
if (
|
||||
pair == 'BTC/USDT'
|
||||
and entry_tag == 'long_sma200'
|
||||
and side == 'long'
|
||||
and (current_time - timedelta(minutes=10)) > trade.open_date_utc
|
||||
and order.filled == 0.0
|
||||
):
|
||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair=pair, timeframe=self.timeframe)
|
||||
current_candle = dataframe.iloc[-1].squeeze()
|
||||
# store information about entry adjustment
|
||||
existing_count = trade.get_custom_data('num_entry_adjustments', default=0)
|
||||
if not existing_count:
|
||||
existing_count = 1
|
||||
else:
|
||||
existing_count += 1
|
||||
trade.set_custom_data(key='num_entry_adjustments', value=existing_count)
|
||||
|
||||
# adjust order price
|
||||
return current_candle['sma_200']
|
||||
|
||||
# default: maintain existing order
|
||||
return current_order_rate
|
||||
|
||||
def custom_exit(self, pair: str, trade: Trade, current_time: datetime, current_rate: float, current_profit: float, **kwargs):
|
||||
|
||||
entry_adjustment_count = trade.get_custom_data(key='num_entry_adjustments')
|
||||
trade_entry_type = trade.get_custom_data(key='entry_type')
|
||||
if entry_adjustment_count is None:
|
||||
if current_profit > 0.01 and (current_time - timedelta(minutes=100) > trade.open_date_utc):
|
||||
return True, 'exit_1'
|
||||
else
|
||||
if entry_adjustment_count > 0 and if current_profit > 0.05:
|
||||
return True, 'exit_2'
|
||||
if trade_entry_type == 'breakout' and current_profit > 0.1:
|
||||
return True, 'exit_3
|
||||
|
||||
return False, None
|
||||
```
|
||||
|
||||
!!! Warning
|
||||
The data is not persisted after a bot-restart (or config-reload). Also, the amount of data should be kept smallish (no DataFrames and such), otherwise the bot will start to consume a lot of memory and eventually run out of memory and crash.
|
||||
The above is a simple example - there are simpler ways to retrieve trade data like entry-adjustments.
|
||||
|
||||
!!! Note
|
||||
If the data is pair-specific, make sure to use pair as one of the keys in the dictionary.
|
||||
It is recommended that simple data types are used `[bool, int, float, str]` to ensure no issues when serializing the data that needs to be stored.
|
||||
Storing big junks of data may lead to unintended side-effects, like a database becoming big (and as a consequence, also slow).
|
||||
|
||||
!!! Warning "Non-serializable data"
|
||||
If supplied data cannot be serialized a warning is logged and the entry for the specified `key` will contain `None` as data.
|
||||
|
||||
??? Note "All attributes"
|
||||
custom-data has the following accessors through the Trade object (assumed as `trade` below):
|
||||
|
||||
* `trade.get_custom_data(key='something', default=0)` - Returns the actual value given in the type provided.
|
||||
* `trade.get_custom_data_entry(key='something')` - Returns the entry - including metadata. The value is accessible via `.value` property.
|
||||
* `trade.set_custom_data(key='something', value={'some': 'value'})` - set or update the corresponding key for this trade. Value must be serializable - and we recommend to keep the stored data relatively small.
|
||||
|
||||
"value" can be any type (both in setting and receiving) - but must be json serializable.
|
||||
|
||||
## Storing information (Non-Persistent)
|
||||
|
||||
!!! Warning "Deprecated"
|
||||
This method of storing information is deprecated and we do advise against using non-persistent storage.
|
||||
Please use [Persistent Storage](#storing-information-persistent) instead.
|
||||
|
||||
It's content has therefore been collapsed.
|
||||
|
||||
??? Abstract "Storing information"
|
||||
Storing information can be accomplished by creating a new dictionary within the strategy class.
|
||||
|
||||
The name of the variable can be chosen at will, but should be prefixed with `custom_` to avoid naming collisions with predefined strategy variables.
|
||||
|
||||
```python
|
||||
class AwesomeStrategy(IStrategy):
|
||||
# Create custom dictionary
|
||||
custom_info = {}
|
||||
|
||||
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
# Check if the entry already exists
|
||||
if not metadata["pair"] in self.custom_info:
|
||||
# Create empty entry for this pair
|
||||
self.custom_info[metadata["pair"]] = {}
|
||||
|
||||
if "crosstime" in self.custom_info[metadata["pair"]]:
|
||||
self.custom_info[metadata["pair"]]["crosstime"] += 1
|
||||
else:
|
||||
self.custom_info[metadata["pair"]]["crosstime"] = 1
|
||||
```
|
||||
|
||||
!!! Warning
|
||||
The data is not persisted after a bot-restart (or config-reload). Also, the amount of data should be kept smallish (no DataFrames and such), otherwise the bot will start to consume a lot of memory and eventually run out of memory and crash.
|
||||
|
||||
!!! Note
|
||||
If the data is pair-specific, make sure to use pair as one of the keys in the dictionary.
|
||||
|
||||
## Dataframe access
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ Currently available callbacks:
|
||||
* [`adjust_trade_position()`](#adjust-trade-position)
|
||||
* [`adjust_entry_price()`](#adjust-entry-price)
|
||||
* [`leverage()`](#leverage-callback)
|
||||
* [`order_filled()`](#order-filled-callback)
|
||||
|
||||
!!! Tip "Callback calling sequence"
|
||||
You can find the callback calling sequence in [bot-basics](bot-basics.md#bot-execution-logic)
|
||||
@@ -783,7 +784,7 @@ Additional entries are ignored once you have reached the maximum amount of extra
|
||||
### Decrease position
|
||||
|
||||
The strategy is expected to return a negative stake_amount (in stake currency) for a partial exit.
|
||||
Returning the full owned stake at that point (based on the current price) (`-(trade.amount / trade.leverage) * current_exit_rate`) results in a full exit.
|
||||
Returning the full owned stake at that point (`-trade.stake_amount`) results in a full exit.
|
||||
Returning a value more than the above (so remaining stake_amount would become negative) will result in the bot ignoring the signal.
|
||||
|
||||
!!! Note "About stake size"
|
||||
@@ -1022,3 +1023,33 @@ class AwesomeStrategy(IStrategy):
|
||||
|
||||
All profit calculations include leverage. Stoploss / ROI also include leverage in their calculation.
|
||||
Defining a stoploss of 10% at 10x leverage would trigger the stoploss with a 1% move to the downside.
|
||||
|
||||
## Order filled Callback
|
||||
|
||||
The `order_filled()` callback may be used to perform specific actions based on the current trade state after an order is filled.
|
||||
It will be called independent of the order type (entry, exit, stoploss or position adjustment).
|
||||
|
||||
Assuming that your strategy needs to store the high value of the candle at trade entry, this is possible with this callback as the following example show.
|
||||
|
||||
``` python
|
||||
class AwesomeStrategy(IStrategy):
|
||||
def order_filled(self, pair: str, trade: Trade, order: Order, current_time: datetime, **kwargs) -> None:
|
||||
"""
|
||||
Called right after an order fills.
|
||||
Will be called for all order types (entry, exit, stoploss, position adjustment).
|
||||
:param pair: Pair for trade
|
||||
:param trade: trade object.
|
||||
:param order: Order object.
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
"""
|
||||
# Obtain pair dataframe (just to show how to access it)
|
||||
dataframe, _ = self.dp.get_analyzed_dataframe(trade.pair, self.timeframe)
|
||||
last_candle = dataframe.iloc[-1].squeeze()
|
||||
|
||||
if (trade.nr_of_successful_entries == 1) and (order.ft_order_side == trade.entry_side):
|
||||
trade.set_custom_data(key='entry_candle_high', value=last_candle['high'])
|
||||
|
||||
return None
|
||||
|
||||
```
|
||||
|
||||
@@ -19,7 +19,7 @@ from pathlib import Path
|
||||
project_root = "somedir/freqtrade"
|
||||
i=0
|
||||
try:
|
||||
os.chdirdir(project_root)
|
||||
os.chdir(project_root)
|
||||
assert Path('LICENSE').is_file()
|
||||
except:
|
||||
while i<4 and (not Path('LICENSE').is_file()):
|
||||
|
||||
@@ -59,7 +59,7 @@ For the Freqtrade configuration, you can then use the the full value (including
|
||||
"chat_id": "-1001332619709"
|
||||
```
|
||||
!!! Warning "Using telegram groups"
|
||||
When using telegram groups, you're giving every member of the telegram group access to your freqtrade bot and to all commands possible via telegram. Please make sure that you can trust everyone in the telegram group to avoid unpleasent surprises.
|
||||
When using telegram groups, you're giving every member of the telegram group access to your freqtrade bot and to all commands possible via telegram. Please make sure that you can trust everyone in the telegram group to avoid unpleasant surprises.
|
||||
|
||||
## Control telegram noise
|
||||
|
||||
@@ -181,6 +181,7 @@ official commands. You can ask at any moment for help with `/help`.
|
||||
| `/locks` | Show currently locked pairs.
|
||||
| `/unlock <pair or lock_id>` | Remove the lock for this pair (or for this lock id).
|
||||
| `/marketdir [long | short | even | none]` | Updates the user managed variable that represents the current market direction. If no direction is provided, the currently set direction will be displayed.
|
||||
| `/list_custom_data <trade_id> [key]` | List custom_data for Trade ID & Key combination. If no Key is supplied it will list all key-value pairs found for that Trade ID.
|
||||
| **Modify Trade states** |
|
||||
| `/forceexit <trade_id> | /fx <tradeid>` | Instantly exits the given trade (Ignoring `minimum_roi`).
|
||||
| `/forceexit all | /fx all` | Instantly exits all open trades (Ignoring `minimum_roi`).
|
||||
|
||||
@@ -6,7 +6,7 @@ To update your freqtrade installation, please use one of the below methods, corr
|
||||
Breaking changes / changed behavior will be documented in the changelog that is posted alongside every release.
|
||||
For the develop branch, please follow PR's to avoid being surprised by changes.
|
||||
|
||||
## docker
|
||||
## Docker
|
||||
|
||||
!!! Note "Legacy installations using the `master` image"
|
||||
We're switching from master to stable for the release Images - please adjust your docker-file and replace `freqtradeorg/freqtrade:master` with `freqtradeorg/freqtrade:stable`
|
||||
|
||||
265
docs/utils.md
265
docs/utils.md
@@ -66,6 +66,53 @@ $ freqtrade new-config --config user_data/config_binance.json
|
||||
? Do you want to enable Telegram? No
|
||||
```
|
||||
|
||||
## Show config
|
||||
|
||||
Show configuration file (with sensitive values redacted by default).
|
||||
Especially useful with [split configuration files](configuration.md#multiple-configuration-files) or [environment variables](configuration.md#environment-variables), where this command will show the merged configuration.
|
||||
|
||||

|
||||
|
||||
```
|
||||
usage: freqtrade show-config [-h] [--userdir PATH] [-c PATH]
|
||||
[--show-sensitive]
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default:
|
||||
`userdir/config.json` or `config.json` whichever
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
--show-sensitive Show secrets in the output.
|
||||
```
|
||||
|
||||
``` output
|
||||
Your combined configuration is:
|
||||
{
|
||||
"exit_pricing": {
|
||||
"price_side": "other",
|
||||
"use_order_book": true,
|
||||
"order_book_top": 1
|
||||
},
|
||||
"stake_currency": "USDT",
|
||||
"exchange": {
|
||||
"name": "binance",
|
||||
"key": "REDACTED",
|
||||
"secret": "REDACTED",
|
||||
"ccxt_config": {},
|
||||
"ccxt_async_config": {},
|
||||
}
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
!!! Warning "Sharing information provided by this command"
|
||||
We try to remove all known sensitive information from the default output (without `--show-sensitive`).
|
||||
Yet, please do double-check for sensitive values in your output to make sure you're not accidentally exposing some private info.
|
||||
|
||||
## Create new strategy
|
||||
|
||||
Creates a new strategy from a template similar to SampleStrategy.
|
||||
@@ -219,207 +266,49 @@ optional arguments:
|
||||
-a, --all Print all exchanges known to the ccxt library.
|
||||
```
|
||||
|
||||
* Example: see exchanges available for the bot:
|
||||
Example: see exchanges available for the bot:
|
||||
|
||||
```
|
||||
$ freqtrade list-exchanges
|
||||
Exchanges available for Freqtrade:
|
||||
Exchange name Valid reason
|
||||
--------------- ------- --------------------------------------------
|
||||
aax True
|
||||
ascendex True missing opt: fetchMyTrades
|
||||
bequant True
|
||||
bibox True
|
||||
bigone True
|
||||
binance True
|
||||
binanceus True
|
||||
bitbank True missing opt: fetchTickers
|
||||
bitcoincom True
|
||||
bitfinex True
|
||||
bitforex True missing opt: fetchMyTrades, fetchTickers
|
||||
bitget True
|
||||
bithumb True missing opt: fetchMyTrades
|
||||
bitkk True missing opt: fetchMyTrades
|
||||
bitmart True
|
||||
bitmax True missing opt: fetchMyTrades
|
||||
bitpanda True
|
||||
bitvavo True
|
||||
bitz True missing opt: fetchMyTrades
|
||||
btcalpha True missing opt: fetchTicker, fetchTickers
|
||||
btcmarkets True missing opt: fetchTickers
|
||||
buda True missing opt: fetchMyTrades, fetchTickers
|
||||
bw True missing opt: fetchMyTrades, fetchL2OrderBook
|
||||
bybit True
|
||||
bytetrade True
|
||||
cdax True
|
||||
cex True missing opt: fetchMyTrades
|
||||
coinbaseprime True missing opt: fetchTickers
|
||||
coinbasepro True missing opt: fetchTickers
|
||||
coinex True
|
||||
crex24 True
|
||||
deribit True
|
||||
digifinex True
|
||||
equos True missing opt: fetchTicker, fetchTickers
|
||||
eterbase True
|
||||
fcoin True missing opt: fetchMyTrades, fetchTickers
|
||||
fcoinjp True missing opt: fetchMyTrades, fetchTickers
|
||||
gateio True
|
||||
gemini True
|
||||
gopax True
|
||||
hbtc True
|
||||
hitbtc True
|
||||
huobijp True
|
||||
huobipro True
|
||||
idex True
|
||||
kraken True
|
||||
kucoin True
|
||||
lbank True missing opt: fetchMyTrades
|
||||
mercado True missing opt: fetchTickers
|
||||
ndax True missing opt: fetchTickers
|
||||
novadax True
|
||||
okcoin True
|
||||
okex True
|
||||
probit True
|
||||
qtrade True
|
||||
stex True
|
||||
timex True
|
||||
upbit True missing opt: fetchMyTrades
|
||||
vcc True
|
||||
zb True missing opt: fetchMyTrades
|
||||
|
||||
Exchange name Supported Markets Reason
|
||||
------------------ ----------- ---------------------- ------------------------------------------------------------------------
|
||||
binance Official spot, isolated futures
|
||||
bitmart Official spot
|
||||
bybit spot, isolated futures
|
||||
gate Official spot, isolated futures
|
||||
htx Official spot
|
||||
huobi spot
|
||||
kraken Official spot
|
||||
okx Official spot, isolated futures
|
||||
```
|
||||
|
||||
!!! info ""
|
||||
Output reduced for clarity - supported and available exchanges may change over time.
|
||||
|
||||
!!! Note "missing opt exchanges"
|
||||
Values with "missing opt:" might need special configuration (e.g. using orderbook if `fetchTickers` is missing) - but should in theory work (although we cannot guarantee they will).
|
||||
|
||||
* Example: see all exchanges supported by the ccxt library (including 'bad' ones, i.e. those that are known to not work with Freqtrade):
|
||||
Example: see all exchanges supported by the ccxt library (including 'bad' ones, i.e. those that are known to not work with Freqtrade)
|
||||
|
||||
```
|
||||
$ freqtrade list-exchanges -a
|
||||
All exchanges supported by the ccxt library:
|
||||
Exchange name Valid reason
|
||||
------------------ ------- ---------------------------------------------------------------------------------------
|
||||
aax True
|
||||
aofex False missing: fetchOrder
|
||||
ascendex True missing opt: fetchMyTrades
|
||||
bequant True
|
||||
bibox True
|
||||
bigone True
|
||||
binance True
|
||||
binanceus True
|
||||
bit2c False missing: fetchOrder, fetchOHLCV
|
||||
bitbank True missing opt: fetchTickers
|
||||
bitbay False missing: fetchOrder
|
||||
bitcoincom True
|
||||
bitfinex True
|
||||
bitfinex2 False missing: fetchOrder
|
||||
bitflyer False missing: fetchOrder, fetchOHLCV
|
||||
bitforex True missing opt: fetchMyTrades, fetchTickers
|
||||
bitget True
|
||||
bithumb True missing opt: fetchMyTrades
|
||||
bitkk True missing opt: fetchMyTrades
|
||||
bitmart True
|
||||
bitmax True missing opt: fetchMyTrades
|
||||
bitmex False Various reasons.
|
||||
bitpanda True
|
||||
bitso False missing: fetchOHLCV
|
||||
bitstamp True missing opt: fetchTickers
|
||||
bitstamp1 False missing: fetchOrder, fetchOHLCV
|
||||
bitvavo True
|
||||
bitz True missing opt: fetchMyTrades
|
||||
bl3p False missing: fetchOrder, fetchOHLCV
|
||||
bleutrade False missing: fetchOrder
|
||||
braziliex False missing: fetchOHLCV
|
||||
btcalpha True missing opt: fetchTicker, fetchTickers
|
||||
btcbox False missing: fetchOHLCV
|
||||
btcmarkets True missing opt: fetchTickers
|
||||
btctradeua False missing: fetchOrder, fetchOHLCV
|
||||
btcturk False missing: fetchOrder
|
||||
buda True missing opt: fetchMyTrades, fetchTickers
|
||||
bw True missing opt: fetchMyTrades, fetchL2OrderBook
|
||||
bybit True
|
||||
bytetrade True
|
||||
cdax True
|
||||
cex True missing opt: fetchMyTrades
|
||||
chilebit False missing: fetchOrder, fetchOHLCV
|
||||
coinbase False missing: fetchOrder, cancelOrder, createOrder, fetchOHLCV
|
||||
coinbaseprime True missing opt: fetchTickers
|
||||
coinbasepro True missing opt: fetchTickers
|
||||
coincheck False missing: fetchOrder, fetchOHLCV
|
||||
coinegg False missing: fetchOHLCV
|
||||
coinex True
|
||||
coinfalcon False missing: fetchOHLCV
|
||||
coinfloor False missing: fetchOrder, fetchOHLCV
|
||||
coingi False missing: fetchOrder, fetchOHLCV
|
||||
coinmarketcap False missing: fetchOrder, cancelOrder, createOrder, fetchBalance, fetchOHLCV
|
||||
coinmate False missing: fetchOHLCV
|
||||
coinone False missing: fetchOHLCV
|
||||
coinspot False missing: fetchOrder, cancelOrder, fetchOHLCV
|
||||
crex24 True
|
||||
currencycom False missing: fetchOrder
|
||||
delta False missing: fetchOrder
|
||||
deribit True
|
||||
digifinex True
|
||||
equos True missing opt: fetchTicker, fetchTickers
|
||||
eterbase True
|
||||
exmo False missing: fetchOrder
|
||||
exx False missing: fetchOHLCV
|
||||
fcoin True missing opt: fetchMyTrades, fetchTickers
|
||||
fcoinjp True missing opt: fetchMyTrades, fetchTickers
|
||||
flowbtc False missing: fetchOrder, fetchOHLCV
|
||||
foxbit False missing: fetchOrder, fetchOHLCV
|
||||
gateio True
|
||||
gemini True
|
||||
gopax True
|
||||
hbtc True
|
||||
hitbtc True
|
||||
hollaex False missing: fetchOrder
|
||||
huobijp True
|
||||
huobipro True
|
||||
idex True
|
||||
independentreserve False missing: fetchOHLCV
|
||||
indodax False missing: fetchOHLCV
|
||||
itbit False missing: fetchOHLCV
|
||||
kraken True
|
||||
kucoin True
|
||||
kuna False missing: fetchOHLCV
|
||||
lakebtc False missing: fetchOrder, fetchOHLCV
|
||||
latoken False missing: fetchOrder, fetchOHLCV
|
||||
lbank True missing opt: fetchMyTrades
|
||||
liquid False missing: fetchOHLCV
|
||||
luno False missing: fetchOHLCV
|
||||
lykke False missing: fetchOHLCV
|
||||
mercado True missing opt: fetchTickers
|
||||
mixcoins False missing: fetchOrder, fetchOHLCV
|
||||
ndax True missing opt: fetchTickers
|
||||
novadax True
|
||||
oceanex False missing: fetchOHLCV
|
||||
okcoin True
|
||||
okex True
|
||||
paymium False missing: fetchOrder, fetchOHLCV
|
||||
phemex False Does not provide history.
|
||||
poloniex False missing: fetchOrder
|
||||
probit True
|
||||
qtrade True
|
||||
rightbtc False missing: fetchOrder
|
||||
ripio False missing: fetchOHLCV
|
||||
southxchange False missing: fetchOrder, fetchOHLCV
|
||||
stex True
|
||||
surbitcoin False missing: fetchOrder, fetchOHLCV
|
||||
therock False missing: fetchOHLCV
|
||||
tidebit False missing: fetchOrder
|
||||
tidex False missing: fetchOHLCV
|
||||
timex True
|
||||
upbit True missing opt: fetchMyTrades
|
||||
vbtc False missing: fetchOrder, fetchOHLCV
|
||||
vcc True
|
||||
wavesexchange False missing: fetchOrder
|
||||
whitebit False missing: fetchOrder, cancelOrder, createOrder, fetchBalance
|
||||
xbtce False missing: fetchOrder, fetchOHLCV
|
||||
xena False missing: fetchOrder
|
||||
yobit False missing: fetchOHLCV
|
||||
zaif False missing: fetchOrder, fetchOHLCV
|
||||
zb True missing opt: fetchMyTrades
|
||||
Exchange name Valid Supported Markets Reason
|
||||
------------------ ------- ----------- ---------------------- ---------------------------------------------------------------------------------
|
||||
binance True Official spot, isolated futures
|
||||
bitflyer False spot missing: fetchOrder. missing opt: fetchTickers.
|
||||
bitmart True Official spot
|
||||
bybit True spot, isolated futures
|
||||
gate True Official spot, isolated futures
|
||||
htx True Official spot
|
||||
kraken True Official spot
|
||||
okx True Official spot, isolated futures
|
||||
```
|
||||
|
||||
!!! info ""
|
||||
Reduced output - supported and available exchanges may change over time.
|
||||
|
||||
## List Timeframes
|
||||
|
||||
Use the `list-timeframes` subcommand to see the list of timeframes available for the exchange.
|
||||
|
||||
@@ -65,7 +65,7 @@ You can set the POST body format to Form-Encoded (default), JSON-Encoded, or raw
|
||||
|
||||
The result would be a POST request with e.g. `{"text":"Status: running"}` body and `Content-Type: application/json` header which results `Status: running` message in the Mattermost channel.
|
||||
|
||||
When using the Form-Encoded or JSON-Encoded configuration you can configure any number of payload values, and both the key and value will be ouput in the POST request. However, when using the raw data format you can only configure one value and it **must** be named `"data"`. In this instance the data key will not be output in the POST request, only the value. For example:
|
||||
When using the Form-Encoded or JSON-Encoded configuration you can configure any number of payload values, and both the key and value will be output in the POST request. However, when using the raw data format you can only configure one value and it **must** be named `"data"`. In this instance the data key will not be output in the POST request, only the value. For example:
|
||||
|
||||
```json
|
||||
"webhook": {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
""" Freqtrade bot """
|
||||
__version__ = '2024.2'
|
||||
__version__ = '2024.3'
|
||||
|
||||
if 'dev' in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
@@ -8,7 +8,7 @@ Note: Be careful with file-scoped imports in these subfiles.
|
||||
"""
|
||||
from freqtrade.commands.analyze_commands import start_analysis_entries_exits
|
||||
from freqtrade.commands.arguments import Arguments
|
||||
from freqtrade.commands.build_config_commands import start_new_config
|
||||
from freqtrade.commands.build_config_commands import start_new_config, start_show_config
|
||||
from freqtrade.commands.data_commands import (start_convert_data, start_convert_trades,
|
||||
start_download_data, start_list_data)
|
||||
from freqtrade.commands.db_commands import start_convert_db
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any, Dict
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -34,9 +34,9 @@ def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[s
|
||||
btfile = Path(config['exportfilename'])
|
||||
signals_file = f"{btfile.parent}/{btfile.stem}_signals.pkl"
|
||||
else:
|
||||
raise OperationalException(f"{config['exportfilename']} does not exist.")
|
||||
raise ConfigurationError(f"{config['exportfilename']} does not exist.")
|
||||
else:
|
||||
raise OperationalException('exportfilename not in config.')
|
||||
raise ConfigurationError('exportfilename not in config.')
|
||||
|
||||
if (not Path(signals_file).exists()):
|
||||
raise OperationalException(
|
||||
|
||||
@@ -62,6 +62,7 @@ ARGS_TEST_PAIRLIST = ["user_data_dir", "verbosity", "config", "quote_currencies"
|
||||
ARGS_CREATE_USERDIR = ["user_data_dir", "reset"]
|
||||
|
||||
ARGS_BUILD_CONFIG = ["config"]
|
||||
ARGS_SHOW_CONFIG = ["user_data_dir", "config", "show_sensitive"]
|
||||
|
||||
ARGS_BUILD_STRATEGY = ["user_data_dir", "strategy", "template"]
|
||||
|
||||
@@ -69,7 +70,8 @@ ARGS_CONVERT_DATA_TRADES = ["pairs", "format_from_trades", "format_to", "erase",
|
||||
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase", "exchange"]
|
||||
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes", "trading_mode", "candle_types"]
|
||||
|
||||
ARGS_CONVERT_TRADES = ["pairs", "timeframes", "exchange", "dataformat_ohlcv", "dataformat_trades"]
|
||||
ARGS_CONVERT_TRADES = ["pairs", "timeframes", "exchange", "dataformat_ohlcv", "dataformat_trades",
|
||||
"trading_mode"]
|
||||
|
||||
ARGS_LIST_DATA = ["exchange", "dataformat_ohlcv", "pairs", "trading_mode", "show_timerange"]
|
||||
|
||||
@@ -208,9 +210,9 @@ class Arguments:
|
||||
start_list_strategies, start_list_timeframes,
|
||||
start_lookahead_analysis, start_new_config,
|
||||
start_new_strategy, start_plot_dataframe, start_plot_profit,
|
||||
start_recursive_analysis, start_show_trades,
|
||||
start_strategy_update, start_test_pairlist, start_trading,
|
||||
start_webserver)
|
||||
start_recursive_analysis, start_show_config,
|
||||
start_show_trades, start_strategy_update,
|
||||
start_test_pairlist, start_trading, start_webserver)
|
||||
|
||||
subparsers = self.parser.add_subparsers(dest='command',
|
||||
# Use custom message when no subhandler is added
|
||||
@@ -243,6 +245,14 @@ class Arguments:
|
||||
build_config_cmd.set_defaults(func=start_new_config)
|
||||
self._build_args(optionlist=ARGS_BUILD_CONFIG, parser=build_config_cmd)
|
||||
|
||||
# add show-config subcommand
|
||||
show_config_cmd = subparsers.add_parser(
|
||||
'show-config',
|
||||
help="Show resolved config",
|
||||
)
|
||||
show_config_cmd.set_defaults(func=start_show_config)
|
||||
self._build_args(optionlist=ARGS_SHOW_CONFIG, parser=show_config_cmd)
|
||||
|
||||
# add new-strategy subcommand
|
||||
build_strategy_cmd = subparsers.add_parser(
|
||||
'new-strategy',
|
||||
|
||||
@@ -5,9 +5,12 @@ from typing import Any, Dict, List
|
||||
|
||||
from questionary import Separator, prompt
|
||||
|
||||
from freqtrade.configuration import sanitize_config
|
||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||
from freqtrade.configuration.detect_environment import running_in_docker
|
||||
from freqtrade.configuration.directory_operations import chown_user_directory
|
||||
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import MAP_EXCHANGE_CHILDCLASS, available_exchanges
|
||||
from freqtrade.util import render_template
|
||||
@@ -264,3 +267,19 @@ def start_new_config(args: Dict[str, Any]) -> None:
|
||||
"Please delete it or use a different configuration file name.")
|
||||
selections = ask_user_config()
|
||||
deploy_new_config(config_path, selections)
|
||||
|
||||
|
||||
def start_show_config(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE, set_dry=False)
|
||||
|
||||
# TODO: Sanitize from sensitive info before printing
|
||||
|
||||
print("Your combined configuration is:")
|
||||
config_sanitized = sanitize_config(
|
||||
config['original_config'],
|
||||
show_sensitive=args.get('show_sensitive', False)
|
||||
)
|
||||
|
||||
from rich import print_json
|
||||
print_json(data=config_sanitized)
|
||||
|
||||
@@ -716,4 +716,10 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
help='Specify startup candles to be checked (`199`, `499`, `999`, `1999`).',
|
||||
nargs='+',
|
||||
),
|
||||
"show_sensitive": Arg(
|
||||
'--show-sensitive',
|
||||
help='Show secrets in the output.',
|
||||
action='store_true',
|
||||
default=False,
|
||||
),
|
||||
}
|
||||
|
||||
@@ -8,9 +8,10 @@ from freqtrade.constants import DATETIME_PRINT_FORMAT, DL_DATA_TIMEFRAMES, Confi
|
||||
from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format,
|
||||
convert_trades_to_ohlcv)
|
||||
from freqtrade.data.history import download_data_main
|
||||
from freqtrade.enums import RunMode, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.enums import CandleType, RunMode, TradingMode
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
from freqtrade.util.migrations import migrate_data
|
||||
|
||||
@@ -20,11 +21,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def _check_data_config_download_sanity(config: Config) -> None:
|
||||
if 'days' in config and 'timerange' in config:
|
||||
raise OperationalException("--days and --timerange are mutually exclusive. "
|
||||
"You can only specify one or the other.")
|
||||
raise ConfigurationError("--days and --timerange are mutually exclusive. "
|
||||
"You can only specify one or the other.")
|
||||
|
||||
if 'pairs' not in config:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"Downloading data requires a list of pairs. "
|
||||
"Please check the documentation on how to configure this.")
|
||||
|
||||
@@ -62,13 +63,21 @@ def start_convert_trades(args: Dict[str, Any]) -> None:
|
||||
|
||||
for timeframe in config['timeframes']:
|
||||
exchange.validate_timeframes(timeframe)
|
||||
available_pairs = [
|
||||
p for p in exchange.get_markets(
|
||||
tradable_only=True, active_only=not config.get('include_inactive')
|
||||
).keys()
|
||||
]
|
||||
|
||||
expanded_pairs = dynamic_expand_pairlist(config, available_pairs)
|
||||
|
||||
# Convert downloaded trade data to different timeframes
|
||||
convert_trades_to_ohlcv(
|
||||
pairs=config.get('pairs', []), timeframes=config['timeframes'],
|
||||
pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||
data_format_trades=config['dataformat_trades'],
|
||||
candle_type=config.get('candle_type_def', CandleType.SPOT)
|
||||
)
|
||||
|
||||
|
||||
@@ -98,7 +107,7 @@ def start_list_data(args: Dict[str, Any]) -> None:
|
||||
|
||||
from tabulate import tabulate
|
||||
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
from freqtrade.data.history import get_datahandler
|
||||
dhc = get_datahandler(config['datadir'], config['dataformat_ohlcv'])
|
||||
|
||||
paircombs = dhc.ohlcv_get_available_data(
|
||||
|
||||
@@ -9,7 +9,7 @@ from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.configuration.directory_operations import copy_sample_files, create_userdata_dir
|
||||
from freqtrade.constants import USERPATH_STRATEGIES
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.util import render_template, render_template_with_fallback
|
||||
|
||||
|
||||
@@ -89,7 +89,7 @@ def start_new_strategy(args: Dict[str, Any]) -> None:
|
||||
deploy_new_strategy(args['strategy'], new_path, args['template'])
|
||||
|
||||
else:
|
||||
raise OperationalException("`new-strategy` requires --strategy to be set.")
|
||||
raise ConfigurationError("`new-strategy` requires --strategy to be set.")
|
||||
|
||||
|
||||
def clean_ui_subdir(directory: Path):
|
||||
|
||||
@@ -10,7 +10,7 @@ from tabulate import tabulate
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.exchange import list_available_exchanges, market_is_active
|
||||
from freqtrade.misc import parse_db_uri_for_logging, plural
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
@@ -246,7 +246,7 @@ def start_show_trades(args: Dict[str, Any]) -> None:
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
if 'db_url' not in config:
|
||||
raise OperationalException("--db-url is required for this command.")
|
||||
raise ConfigurationError("--db-url is required for this command.")
|
||||
|
||||
logger.info(f'Using DB: "{parse_db_uri_for_logging(config["db_url"])}"')
|
||||
init_db(config['db_url'])
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any, Dict
|
||||
from freqtrade import constants
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.util import fmt_coin
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ def setup_optimize_configuration(args: Dict[str, Any], method: RunMode) -> Dict[
|
||||
and config['stake_amount'] > wallet_size):
|
||||
wallet = fmt_coin(wallet_size, config['stake_currency'])
|
||||
stake = fmt_coin(config['stake_amount'], config['stake_currency'])
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f"Starting balance ({wallet}) is smaller than stake_amount {stake}. "
|
||||
f"Wallet is calculated as `dry_run_wallet * tradable_balance_ratio`."
|
||||
)
|
||||
|
||||
@@ -2,12 +2,12 @@ from typing import Any, Dict
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
|
||||
|
||||
def validate_plot_args(args: Dict[str, Any]) -> None:
|
||||
if not args.get('datadir') and not args.get('config'):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"You need to specify either `--datadir` or `--config` "
|
||||
"for plot-profit and plot-dataframe.")
|
||||
|
||||
|
||||
@@ -23,11 +23,6 @@ def start_trading(args: Dict[str, Any]) -> int:
|
||||
signal.signal(signal.SIGTERM, term_handler)
|
||||
worker = Worker(args)
|
||||
worker.run()
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
logger.exception("Fatal exception!")
|
||||
except (KeyboardInterrupt):
|
||||
logger.info('SIGINT received, aborting ...')
|
||||
finally:
|
||||
if worker:
|
||||
logger.info("worker found ... calling exit")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# flake8: noqa: F401
|
||||
|
||||
from freqtrade.configuration.config_secrets import sanitize_config
|
||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||
from freqtrade.configuration.config_validation import validate_config_consistency
|
||||
from freqtrade.configuration.configuration import Configuration
|
||||
|
||||
36
freqtrade/configuration/config_secrets.py
Normal file
36
freqtrade/configuration/config_secrets.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from copy import deepcopy
|
||||
|
||||
from freqtrade.constants import Config
|
||||
|
||||
|
||||
def sanitize_config(config: Config, *, show_sensitive: bool = False) -> Config:
|
||||
"""
|
||||
Remove sensitive information from the config.
|
||||
:param config: Configuration
|
||||
:param show_sensitive: Show sensitive information
|
||||
:return: Configuration
|
||||
"""
|
||||
if show_sensitive:
|
||||
return config
|
||||
keys_to_remove = [
|
||||
"exchange.key",
|
||||
"exchange.secret",
|
||||
"exchange.password",
|
||||
"exchange.uid",
|
||||
"telegram.token",
|
||||
"telegram.chat_id",
|
||||
"discord.webhook_url",
|
||||
"api_server.password",
|
||||
]
|
||||
config = deepcopy(config)
|
||||
for key in keys_to_remove:
|
||||
if '.' in key:
|
||||
nested_keys = key.split('.')
|
||||
nested_config = config
|
||||
for nested_key in nested_keys[:-1]:
|
||||
nested_config = nested_config.get(nested_key, {})
|
||||
nested_config[nested_keys[-1]] = 'REDACTED'
|
||||
else:
|
||||
config[key] = 'REDACTED'
|
||||
|
||||
return config
|
||||
@@ -10,7 +10,8 @@ from .configuration import Configuration
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_utils_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str, Any]:
|
||||
def setup_utils_configuration(
|
||||
args: Dict[str, Any], method: RunMode, *, set_dry: bool = True) -> Dict[str, Any]:
|
||||
"""
|
||||
Prepare the configuration for utils subcommands
|
||||
:param args: Cli args from Arguments()
|
||||
@@ -21,7 +22,8 @@ def setup_utils_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str
|
||||
config = configuration.get_config()
|
||||
|
||||
# Ensure these modes are using Dry-run
|
||||
config['dry_run'] = True
|
||||
if set_dry:
|
||||
config['dry_run'] = True
|
||||
validate_config_consistency(config, preliminary=True)
|
||||
|
||||
return config
|
||||
|
||||
@@ -9,7 +9,7 @@ from jsonschema.exceptions import ValidationError, best_match
|
||||
from freqtrade import constants
|
||||
from freqtrade.configuration.deprecated_settings import process_deprecated_setting
|
||||
from freqtrade.enums import RunMode, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -73,7 +73,7 @@ def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = Fal
|
||||
Should be ran after loading both configuration and strategy,
|
||||
since strategies can set certain configuration settings too.
|
||||
:param conf: Config in JSON format
|
||||
:return: Returns None if everything is ok, otherwise throw an OperationalException
|
||||
:return: Returns None if everything is ok, otherwise throw an ConfigurationError
|
||||
"""
|
||||
|
||||
# validating trailing stoploss
|
||||
@@ -98,12 +98,12 @@ def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = Fal
|
||||
def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
|
||||
"""
|
||||
If edge is disabled, either max_open_trades or stake_amount need to be set.
|
||||
:raise: OperationalException if config validation failed
|
||||
:raise: ConfigurationError if config validation failed
|
||||
"""
|
||||
if (not conf.get('edge', {}).get('enabled')
|
||||
and conf.get('max_open_trades') == float('inf')
|
||||
and conf.get('stake_amount') == constants.UNLIMITED_STAKE_AMOUNT):
|
||||
raise OperationalException("`max_open_trades` and `stake_amount` cannot both be unlimited.")
|
||||
raise ConfigurationError("`max_open_trades` and `stake_amount` cannot both be unlimited.")
|
||||
|
||||
|
||||
def _validate_price_config(conf: Dict[str, Any]) -> None:
|
||||
@@ -113,18 +113,18 @@ def _validate_price_config(conf: Dict[str, Any]) -> None:
|
||||
# TODO: The below could be an enforced setting when using market orders
|
||||
if (conf.get('order_types', {}).get('entry') == 'market'
|
||||
and conf.get('entry_pricing', {}).get('price_side') not in ('ask', 'other')):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
'Market entry orders require entry_pricing.price_side = "other".')
|
||||
|
||||
if (conf.get('order_types', {}).get('exit') == 'market'
|
||||
and conf.get('exit_pricing', {}).get('price_side') not in ('bid', 'other')):
|
||||
raise OperationalException('Market exit orders require exit_pricing.price_side = "other".')
|
||||
raise ConfigurationError('Market exit orders require exit_pricing.price_side = "other".')
|
||||
|
||||
|
||||
def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
|
||||
|
||||
if conf.get('stoploss') == 0.0:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
'The config stoploss needs to be different from 0 to avoid problems with sell orders.'
|
||||
)
|
||||
# Skip if trailing stoploss is not activated
|
||||
@@ -137,17 +137,17 @@ def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
|
||||
|
||||
if tsl_only_offset:
|
||||
if tsl_positive == 0.0:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
'The config trailing_only_offset_is_reached needs '
|
||||
'trailing_stop_positive_offset to be more than 0 in your config.')
|
||||
if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
'The config trailing_stop_positive_offset needs '
|
||||
'to be greater than trailing_stop_positive in your config.')
|
||||
|
||||
# Fetch again without default
|
||||
if 'trailing_stop_positive' in conf and float(conf['trailing_stop_positive']) == 0.0:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
'The config trailing_stop_positive needs to be different from 0 '
|
||||
'to avoid problems with sell orders.'
|
||||
)
|
||||
@@ -162,7 +162,7 @@ def _validate_edge(conf: Dict[str, Any]) -> None:
|
||||
return
|
||||
|
||||
if not conf.get('use_exit_signal', True):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"Edge requires `use_exit_signal` to be True, otherwise no sells will happen."
|
||||
)
|
||||
|
||||
@@ -178,7 +178,7 @@ def _validate_whitelist(conf: Dict[str, Any]) -> None:
|
||||
for pl in conf.get('pairlists', [{'method': 'StaticPairList'}]):
|
||||
if (isinstance(pl, dict) and pl.get('method') == 'StaticPairList'
|
||||
and not conf.get('exchange', {}).get('pair_whitelist')):
|
||||
raise OperationalException("StaticPairList requires pair_whitelist to be set.")
|
||||
raise ConfigurationError("StaticPairList requires pair_whitelist to be set.")
|
||||
|
||||
|
||||
def _validate_protections(conf: Dict[str, Any]) -> None:
|
||||
@@ -188,13 +188,13 @@ def _validate_protections(conf: Dict[str, Any]) -> None:
|
||||
|
||||
for prot in conf.get('protections', []):
|
||||
if ('stop_duration' in prot and 'stop_duration_candles' in prot):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"Protections must specify either `stop_duration` or `stop_duration_candles`.\n"
|
||||
f"Please fix the protection {prot.get('method')}"
|
||||
)
|
||||
|
||||
if ('lookback_period' in prot and 'lookback_period_candles' in prot):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"Protections must specify either `lookback_period` or `lookback_period_candles`.\n"
|
||||
f"Please fix the protection {prot.get('method')}"
|
||||
)
|
||||
@@ -206,7 +206,7 @@ def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
|
||||
ob_max = ask_strategy.get('order_book_max')
|
||||
if ob_min is not None and ob_max is not None and ask_strategy.get('use_order_book'):
|
||||
if ob_min != ob_max:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"Using order_book_max != order_book_min in exit_pricing is no longer supported."
|
||||
"Please pick one value and use `order_book_top` in the future."
|
||||
)
|
||||
@@ -234,7 +234,7 @@ def _validate_time_in_force(conf: Dict[str, Any]) -> None:
|
||||
time_in_force = conf.get('order_time_in_force', {})
|
||||
if 'buy' in time_in_force or 'sell' in time_in_force:
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"Please migrate your time_in_force settings to use 'entry' and 'exit'.")
|
||||
else:
|
||||
logger.warning(
|
||||
@@ -255,7 +255,7 @@ def _validate_order_types(conf: Dict[str, Any]) -> None:
|
||||
'forcesell', 'emergencyexit', 'forceexit', 'forceentry']
|
||||
if any(x in order_types for x in old_order_types):
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"Please migrate your order_types settings to use the new wording.")
|
||||
else:
|
||||
logger.warning(
|
||||
@@ -280,7 +280,7 @@ def _validate_unfilledtimeout(conf: Dict[str, Any]) -> None:
|
||||
unfilledtimeout = conf.get('unfilledtimeout', {})
|
||||
if any(x in unfilledtimeout for x in ['buy', 'sell']):
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"Please migrate your unfilledtimeout settings to use the new wording.")
|
||||
else:
|
||||
|
||||
@@ -300,7 +300,7 @@ def _validate_pricing_rules(conf: Dict[str, Any]) -> None:
|
||||
|
||||
if conf.get('ask_strategy') or conf.get('bid_strategy'):
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"Please migrate your pricing settings to use the new wording.")
|
||||
else:
|
||||
|
||||
@@ -331,7 +331,7 @@ def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
|
||||
freqai_enabled = conf.get('freqai', {}).get('enabled', False)
|
||||
analyze_per_epoch = conf.get('analyze_per_epoch', False)
|
||||
if analyze_per_epoch and freqai_enabled:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
'Using analyze-per-epoch parameter is not supported with a FreqAI strategy.')
|
||||
|
||||
|
||||
@@ -350,7 +350,7 @@ def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool)
|
||||
if tf_s < main_tf_s:
|
||||
offending_lines.append(tf)
|
||||
if offending_lines:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f"Main timeframe of {main_tf} must be smaller or equal to FreqAI "
|
||||
f"`include_timeframes`.Offending include-timeframes: {', '.join(offending_lines)}")
|
||||
|
||||
@@ -368,17 +368,17 @@ def _validate_freqai_backtest(conf: Dict[str, Any]) -> None:
|
||||
timerange = conf.get('timerange')
|
||||
freqai_backtest_live_models = conf.get('freqai_backtest_live_models', False)
|
||||
if freqai_backtest_live_models and freqai_enabled and timerange:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
'Using timerange parameter is not supported with '
|
||||
'--freqai-backtest-live-models parameter.')
|
||||
|
||||
if freqai_backtest_live_models and not freqai_enabled:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
'Using --freqai-backtest-live-models parameter is only '
|
||||
'supported with a FreqAI strategy.')
|
||||
|
||||
if freqai_enabled and not freqai_backtest_live_models and not timerange:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
'Please pass --timerange if you intend to use FreqAI for backtesting.')
|
||||
|
||||
|
||||
@@ -386,12 +386,12 @@ def _validate_consumers(conf: Dict[str, Any]) -> None:
|
||||
emc_conf = conf.get('external_message_consumer', {})
|
||||
if emc_conf.get('enabled', False):
|
||||
if len(emc_conf.get('producers', [])) < 1:
|
||||
raise OperationalException("You must specify at least 1 Producer to connect to.")
|
||||
raise ConfigurationError("You must specify at least 1 Producer to connect to.")
|
||||
|
||||
producer_names = [p['name'] for p in emc_conf.get('producers', [])]
|
||||
duplicates = [item for item, count in Counter(producer_names).items() if count > 1]
|
||||
if duplicates:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f"Producer names must be unique. Duplicate: {', '.join(duplicates)}")
|
||||
if conf.get('process_only_new_candles', True):
|
||||
# Warning here or require it?
|
||||
|
||||
@@ -200,6 +200,12 @@ class Configuration:
|
||||
config['exportfilename'] = (config['user_data_dir']
|
||||
/ 'backtest_results')
|
||||
|
||||
if self.args.get('show_sensitive'):
|
||||
logger.warning(
|
||||
"Sensitive information will be shown in the upcomming output. "
|
||||
"Please make sure to never share this output without redacting "
|
||||
"the information yourself.")
|
||||
|
||||
def _process_optimize_options(self, config: Config) -> None:
|
||||
|
||||
# This will override the strategy configuration
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -41,7 +41,7 @@ def process_removed_setting(config: Config,
|
||||
section1_config = config.get(section1, {})
|
||||
if name1 in section1_config:
|
||||
section_2 = f"{section2}.{name2}" if section2 else f"{name2}"
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f"Setting `{section1}.{name1}` has been moved to `{section_2}. "
|
||||
f"Please delete it from your configuration and use the `{section_2}` "
|
||||
"setting instead."
|
||||
@@ -122,7 +122,7 @@ def process_temporary_deprecated_settings(config: Config) -> None:
|
||||
None, 'ignore_roi_if_entry_signal')
|
||||
if (config.get('edge', {}).get('enabled', False)
|
||||
and 'capital_available_percentage' in config.get('edge', {})):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"DEPRECATED: "
|
||||
"Using 'edge.capital_available_percentage' has been deprecated in favor of "
|
||||
"'tradable_balance_ratio'. Please migrate your configuration to "
|
||||
@@ -131,7 +131,7 @@ def process_temporary_deprecated_settings(config: Config) -> None:
|
||||
)
|
||||
if 'ticker_interval' in config:
|
||||
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"DEPRECATED: 'ticker_interval' detected. "
|
||||
"Please use 'timeframe' instead of 'ticker_interval."
|
||||
)
|
||||
|
||||
@@ -11,7 +11,7 @@ from typing import Any, Dict, List, Optional
|
||||
import rapidjson
|
||||
|
||||
from freqtrade.constants import MINIMAL_CONFIG, Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.misc import deep_merge_dicts
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ def load_file(path: Path) -> Dict[str, Any]:
|
||||
with path.open('r') as file:
|
||||
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
|
||||
except FileNotFoundError:
|
||||
raise OperationalException(f'File "{path}" not found!')
|
||||
raise OperationalException(f'File "{path}" not found!') from None
|
||||
return config
|
||||
|
||||
|
||||
@@ -63,10 +63,10 @@ def load_config_file(path: str) -> Dict[str, Any]:
|
||||
except FileNotFoundError:
|
||||
raise OperationalException(
|
||||
f'Config file "{path}" not found!'
|
||||
' Please create a config file or check whether it exists.')
|
||||
' Please create a config file or check whether it exists.') from None
|
||||
except rapidjson.JSONDecodeError as e:
|
||||
err_range = log_config_error_range(path, str(e))
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f'{e}\n'
|
||||
f'Please verify the following segment of your configuration:\n{err_range}'
|
||||
if err_range else 'Please verify your configuration file for syntax errors.'
|
||||
@@ -83,7 +83,7 @@ def load_from_files(
|
||||
"""
|
||||
config: Config = {}
|
||||
if level > 5:
|
||||
raise OperationalException("Config loop detected.")
|
||||
raise ConfigurationError("Config loop detected.")
|
||||
|
||||
if not files:
|
||||
return deepcopy(MINIMAL_CONFIG)
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import Optional
|
||||
from typing_extensions import Self
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -156,7 +156,7 @@ class TimeRange:
|
||||
else:
|
||||
stop = int(stops)
|
||||
if start > stop > 0:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f'Start date is after stop date for timerange "{text}"')
|
||||
return cls(stype[0], stype[1], start, stop)
|
||||
raise OperationalException(f'Incorrect syntax for timerange "{text}"')
|
||||
raise ConfigurationError(f'Incorrect syntax for timerange "{text}"')
|
||||
|
||||
@@ -11,7 +11,7 @@ import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.constants import LAST_BT_RESULT_FN, IntOrInf
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.misc import file_dump_json, json_load
|
||||
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
||||
from freqtrade.persistence import LocalTrade, Trade, init_db
|
||||
@@ -106,7 +106,7 @@ def get_latest_hyperopt_file(
|
||||
directory = Path(directory)
|
||||
if predef_filename:
|
||||
if Path(predef_filename).is_absolute():
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
"--hyperopt-filename expects only the filename, not an absolute path.")
|
||||
return directory / predef_filename
|
||||
return directory / get_latest_hyperopt_filename(directory)
|
||||
|
||||
@@ -200,7 +200,7 @@ def convert_ohlcv_format(
|
||||
:param convert_to: Target format
|
||||
:param erase: Erase source data (does not apply if source and target format are identical)
|
||||
"""
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
from freqtrade.data.history import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
timeframes = config.get('timeframes', [config.get('timeframe')])
|
||||
|
||||
@@ -11,7 +11,7 @@ from pandas import DataFrame, to_datetime
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TRADES_DTYPES,
|
||||
Config, TradeList)
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
@@ -88,25 +88,23 @@ def convert_trades_to_ohlcv(
|
||||
timeframes: List[str],
|
||||
datadir: Path,
|
||||
timerange: TimeRange,
|
||||
erase: bool = False,
|
||||
data_format_ohlcv: str = 'feather',
|
||||
data_format_trades: str = 'feather',
|
||||
candle_type: CandleType = CandleType.SPOT
|
||||
erase: bool,
|
||||
data_format_ohlcv: str,
|
||||
data_format_trades: str,
|
||||
candle_type: CandleType,
|
||||
) -> None:
|
||||
"""
|
||||
Convert stored trades data to ohlcv data
|
||||
"""
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
from freqtrade.data.history import get_datahandler
|
||||
data_handler_trades = get_datahandler(datadir, data_format=data_format_trades)
|
||||
data_handler_ohlcv = get_datahandler(datadir, data_format=data_format_ohlcv)
|
||||
if not pairs:
|
||||
pairs = data_handler_trades.trades_get_pairs(datadir)
|
||||
|
||||
logger.info(f"About to convert pairs: '{', '.join(pairs)}', "
|
||||
f"intervals: '{', '.join(timeframes)}' to {datadir}")
|
||||
|
||||
trading_mode = TradingMode.FUTURES if candle_type != CandleType.SPOT else TradingMode.SPOT
|
||||
for pair in pairs:
|
||||
trades = data_handler_trades.trades_load(pair)
|
||||
trades = data_handler_trades.trades_load(pair, trading_mode)
|
||||
for timeframe in timeframes:
|
||||
if erase:
|
||||
if data_handler_ohlcv.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
||||
@@ -116,7 +114,7 @@ def convert_trades_to_ohlcv(
|
||||
# Store ohlcv
|
||||
data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv, candle_type=candle_type)
|
||||
except ValueError:
|
||||
logger.exception(f'Could not convert {pair} to OHLCV.')
|
||||
logger.warning(f'Could not convert {pair} to OHLCV.')
|
||||
|
||||
|
||||
def convert_trades_format(config: Config, convert_from: str, convert_to: str, erase: bool):
|
||||
@@ -137,18 +135,19 @@ def convert_trades_format(config: Config, convert_from: str, convert_to: str, er
|
||||
import_kraken_trades_from_csv(config, convert_to)
|
||||
return
|
||||
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
from freqtrade.data.history import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
|
||||
if 'pairs' not in config:
|
||||
config['pairs'] = src.trades_get_pairs(config['datadir'])
|
||||
logger.info(f"Converting trades for {config['pairs']}")
|
||||
|
||||
trading_mode: TradingMode = config.get('trading_mode', TradingMode.SPOT)
|
||||
for pair in config['pairs']:
|
||||
data = src.trades_load(pair=pair)
|
||||
data = src.trades_load(pair, trading_mode)
|
||||
logger.info(f"Converting {len(data)} trades for {pair}")
|
||||
trg.trades_store(pair, data)
|
||||
trg.trades_store(pair, data, trading_mode)
|
||||
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source Trade data for {pair}.")
|
||||
src.trades_purge(pair=pair)
|
||||
src.trades_purge(pair, trading_mode)
|
||||
|
||||
@@ -6,7 +6,8 @@ import pandas as pd
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, DEFAULT_TRADES_COLUMNS, Config
|
||||
from freqtrade.data.converter.trade_converter import (trades_convert_types,
|
||||
trades_df_remove_duplicates)
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
from freqtrade.data.history import get_datahandler
|
||||
from freqtrade.enums import TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
@@ -79,4 +80,4 @@ def import_kraken_trades_from_csv(config: Config, convert_to: str):
|
||||
f"{trades_df['date'].min():{DATETIME_PRINT_FORMAT}} to "
|
||||
f"{trades_df['date'].max():{DATETIME_PRINT_FORMAT}}")
|
||||
|
||||
data_handler.trades_store(pair, trades_df)
|
||||
data_handler.trades_store(pair, trades_df, TradingMode.SPOT)
|
||||
|
||||
@@ -6,7 +6,7 @@ Includes:
|
||||
* download data from exchange and store to disk
|
||||
"""
|
||||
# flake8: noqa: F401
|
||||
from .datahandlers import get_datahandler
|
||||
from .history_utils import (convert_trades_to_ohlcv, download_data_main, get_timerange, load_data,
|
||||
load_pair_history, refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data, refresh_data, validate_backtest_data)
|
||||
from .idatahandler import get_datahandler
|
||||
|
||||
2
freqtrade/data/history/datahandlers/__init__.py
Normal file
2
freqtrade/data/history/datahandlers/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# flake8: noqa: F401
|
||||
from .idatahandler import IDataHandler, get_datahandler
|
||||
@@ -5,7 +5,7 @@ from pandas import DataFrame, read_feather, to_datetime
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
|
||||
from .idatahandler import IDataHandler
|
||||
|
||||
@@ -82,14 +82,15 @@ class FeatherDataHandler(IDataHandler):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_store(self, pair: str, data: DataFrame) -> None:
|
||||
def _trades_store(self, pair: str, data: DataFrame, trading_mode: TradingMode) -> None:
|
||||
"""
|
||||
Store trades data (list of Dicts) to file
|
||||
:param pair: Pair - used for filename
|
||||
:param data: Dataframe containing trades
|
||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
self.create_dir_if_needed(filename)
|
||||
data.reset_index(drop=True).to_feather(filename, compression_level=9, compression='lz4')
|
||||
|
||||
@@ -102,15 +103,18 @@ class FeatherDataHandler(IDataHandler):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> DataFrame:
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
# TODO: respect timerange ...
|
||||
:param pair: Load trades for this pair
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: Dataframe containing trades
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
if not filename.exists():
|
||||
return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
||||
|
||||
@@ -6,7 +6,7 @@ import pandas as pd
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
|
||||
from .idatahandler import IDataHandler
|
||||
|
||||
@@ -35,7 +35,7 @@ class HDF5DataHandler(IDataHandler):
|
||||
self.create_dir_if_needed(filename)
|
||||
|
||||
_data.loc[:, self._columns].to_hdf(
|
||||
filename, key, mode='a', complevel=9, complib='blosc',
|
||||
filename, key=key, mode='a', complevel=9, complib='blosc',
|
||||
format='table', data_columns=['date']
|
||||
)
|
||||
|
||||
@@ -100,17 +100,18 @@ class HDF5DataHandler(IDataHandler):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_store(self, pair: str, data: pd.DataFrame) -> None:
|
||||
def _trades_store(self, pair: str, data: pd.DataFrame, trading_mode: TradingMode) -> None:
|
||||
"""
|
||||
Store trades data (list of Dicts) to file
|
||||
:param pair: Pair - used for filename
|
||||
:param data: Dataframe containing trades
|
||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
"""
|
||||
key = self._pair_trades_key(pair)
|
||||
|
||||
data.to_hdf(
|
||||
self._pair_trades_filename(self._datadir, pair), key,
|
||||
self._pair_trades_filename(self._datadir, pair, trading_mode), key=key,
|
||||
mode='a', complevel=9, complib='blosc',
|
||||
format='table', data_columns=['timestamp']
|
||||
)
|
||||
@@ -124,15 +125,18 @@ class HDF5DataHandler(IDataHandler):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> pd.DataFrame:
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Load a pair from h5 file.
|
||||
:param pair: Load trades for this pair
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: Dataframe containing trades
|
||||
"""
|
||||
key = self._pair_trades_key(pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
|
||||
if not filename.exists():
|
||||
return pd.DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
||||
@@ -172,12 +172,13 @@ class IDataHandler(ABC):
|
||||
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
|
||||
|
||||
@abstractmethod
|
||||
def _trades_store(self, pair: str, data: DataFrame) -> None:
|
||||
def _trades_store(self, pair: str, data: DataFrame, trading_mode: TradingMode) -> None:
|
||||
"""
|
||||
Store trades data (list of Dicts) to file
|
||||
:param pair: Pair - used for filename
|
||||
:param data: Dataframe containing trades
|
||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
@@ -190,45 +191,55 @@ class IDataHandler(ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> DataFrame:
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
:param pair: Load trades for this pair
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: Dataframe containing trades
|
||||
"""
|
||||
|
||||
def trades_store(self, pair: str, data: DataFrame) -> None:
|
||||
def trades_store(self, pair: str, data: DataFrame, trading_mode: TradingMode) -> None:
|
||||
"""
|
||||
Store trades data (list of Dicts) to file
|
||||
:param pair: Pair - used for filename
|
||||
:param data: Dataframe containing trades
|
||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
"""
|
||||
# Filter on expected columns (will remove the actual date column).
|
||||
self._trades_store(pair, data[DEFAULT_TRADES_COLUMNS])
|
||||
self._trades_store(pair, data[DEFAULT_TRADES_COLUMNS], trading_mode)
|
||||
|
||||
def trades_purge(self, pair: str) -> bool:
|
||||
def trades_purge(self, pair: str, trading_mode: TradingMode) -> bool:
|
||||
"""
|
||||
Remove data for this pair
|
||||
:param pair: Delete data for this pair.
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
:return: True when deleted, false if file did not exist.
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
if filename.exists():
|
||||
filename.unlink()
|
||||
return True
|
||||
return False
|
||||
|
||||
def trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> DataFrame:
|
||||
def trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
Removes duplicates in the process.
|
||||
:param pair: Load trades for this pair
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: List of trades
|
||||
"""
|
||||
trades = trades_df_remove_duplicates(self._trades_load(pair, timerange=timerange))
|
||||
trades = trades_df_remove_duplicates(
|
||||
self._trades_load(pair, trading_mode, timerange=timerange)
|
||||
)
|
||||
|
||||
trades = trades_convert_types(trades)
|
||||
return trades
|
||||
@@ -264,8 +275,12 @@ class IDataHandler(ABC):
|
||||
return filename
|
||||
|
||||
@classmethod
|
||||
def _pair_trades_filename(cls, datadir: Path, pair: str) -> Path:
|
||||
def _pair_trades_filename(cls, datadir: Path, pair: str, trading_mode: TradingMode) -> Path:
|
||||
pair_s = misc.pair_to_filename(pair)
|
||||
if trading_mode == TradingMode.FUTURES:
|
||||
# Futures pair ...
|
||||
datadir = datadir.joinpath('futures')
|
||||
|
||||
filename = datadir.joinpath(f'{pair_s}-trades.{cls._get_file_extension()}')
|
||||
return filename
|
||||
|
||||
@@ -8,7 +8,7 @@ from freqtrade import misc
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS
|
||||
from freqtrade.data.converter import trades_dict_to_list, trades_list_to_df
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
|
||||
from .idatahandler import IDataHandler
|
||||
|
||||
@@ -37,7 +37,7 @@ class JsonDataHandler(IDataHandler):
|
||||
self.create_dir_if_needed(filename)
|
||||
_data = data.copy()
|
||||
# Convert date to int
|
||||
_data['date'] = _data['date'].view(np.int64) // 1000 // 1000
|
||||
_data['date'] = _data['date'].astype(np.int64) // 1000 // 1000
|
||||
|
||||
# Reset index, select only appropriate columns and save as json
|
||||
_data.reset_index(drop=True).loc[:, self._columns].to_json(
|
||||
@@ -94,14 +94,15 @@ class JsonDataHandler(IDataHandler):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_store(self, pair: str, data: DataFrame) -> None:
|
||||
def _trades_store(self, pair: str, data: DataFrame, trading_mode: TradingMode) -> None:
|
||||
"""
|
||||
Store trades data (list of Dicts) to file
|
||||
:param pair: Pair - used for filename
|
||||
:param data: Dataframe containing trades
|
||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
trades = data.values.tolist()
|
||||
misc.file_dump_json(filename, trades, is_zip=self._use_zip)
|
||||
|
||||
@@ -114,15 +115,18 @@ class JsonDataHandler(IDataHandler):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> DataFrame:
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
# TODO: respect timerange ...
|
||||
:param pair: Load trades for this pair
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: Dataframe containing trades
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
tradesdata = misc.file_load_json(filename)
|
||||
|
||||
if not tradesdata:
|
||||
@@ -4,8 +4,8 @@ from typing import Optional
|
||||
from pandas import DataFrame, read_parquet, to_datetime
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
|
||||
from .idatahandler import IDataHandler
|
||||
|
||||
@@ -81,14 +81,15 @@ class ParquetDataHandler(IDataHandler):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_store(self, pair: str, data: DataFrame) -> None:
|
||||
def _trades_store(self, pair: str, data: DataFrame, trading_mode: TradingMode) -> None:
|
||||
"""
|
||||
Store trades data (list of Dicts) to file
|
||||
:param pair: Pair - used for filename
|
||||
:param data: Dataframe containing trades
|
||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
self.create_dir_if_needed(filename)
|
||||
data.reset_index(drop=True).to_parquet(filename)
|
||||
|
||||
@@ -101,15 +102,18 @@ class ParquetDataHandler(IDataHandler):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
# TODO: respect timerange ...
|
||||
:param pair: Load trades for this pair
|
||||
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: List of trades
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
if not filename.exists():
|
||||
return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
||||
|
||||
@@ -12,8 +12,8 @@ from freqtrade.constants import (DATETIME_PRINT_FORMAT, DEFAULT_DATAFRAME_COLUMN
|
||||
from freqtrade.data.converter import (clean_ohlcv_dataframe, convert_trades_to_ohlcv,
|
||||
ohlcv_to_dataframe, trades_df_remove_duplicates,
|
||||
trades_list_to_df)
|
||||
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.data.history.datahandlers import IDataHandler, get_datahandler
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
|
||||
@@ -333,7 +333,8 @@ def _download_trades_history(exchange: Exchange,
|
||||
pair: str, *,
|
||||
new_pairs_days: int = 30,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
data_handler: IDataHandler
|
||||
data_handler: IDataHandler,
|
||||
trading_mode: TradingMode,
|
||||
) -> bool:
|
||||
"""
|
||||
Download trade history from the exchange.
|
||||
@@ -349,7 +350,7 @@ def _download_trades_history(exchange: Exchange,
|
||||
if timerange.stoptype == 'date':
|
||||
until = timerange.stopts * 1000
|
||||
|
||||
trades = data_handler.trades_load(pair)
|
||||
trades = data_handler.trades_load(pair, trading_mode)
|
||||
|
||||
# TradesList columns are defined in constants.DEFAULT_TRADES_COLUMNS
|
||||
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
||||
@@ -388,7 +389,7 @@ def _download_trades_history(exchange: Exchange,
|
||||
trades = concat([trades, new_trades_df], axis=0)
|
||||
# Remove duplicates to make sure we're not storing data we don't need
|
||||
trades = trades_df_remove_duplicates(trades)
|
||||
data_handler.trades_store(pair, data=trades)
|
||||
data_handler.trades_store(pair, trades, trading_mode)
|
||||
|
||||
logger.debug("New Start: %s", 'None' if trades.empty else
|
||||
f"{trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}")
|
||||
@@ -405,8 +406,10 @@ def _download_trades_history(exchange: Exchange,
|
||||
|
||||
|
||||
def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path,
|
||||
timerange: TimeRange, new_pairs_days: int = 30,
|
||||
erase: bool = False, data_format: str = 'feather') -> List[str]:
|
||||
timerange: TimeRange, trading_mode: TradingMode,
|
||||
new_pairs_days: int = 30,
|
||||
erase: bool = False, data_format: str = 'feather',
|
||||
) -> List[str]:
|
||||
"""
|
||||
Refresh stored trades data for backtesting and hyperopt operations.
|
||||
Used by freqtrade download-data subcommand.
|
||||
@@ -421,7 +424,7 @@ def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir:
|
||||
continue
|
||||
|
||||
if erase:
|
||||
if data_handler.trades_purge(pair):
|
||||
if data_handler.trades_purge(pair, trading_mode):
|
||||
logger.info(f'Deleting existing data for pair {pair}.')
|
||||
|
||||
logger.info(f'Downloading trades for pair {pair}.')
|
||||
@@ -429,7 +432,8 @@ def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir:
|
||||
pair=pair,
|
||||
new_pairs_days=new_pairs_days,
|
||||
timerange=timerange,
|
||||
data_handler=data_handler)
|
||||
data_handler=data_handler,
|
||||
trading_mode=trading_mode)
|
||||
return pairs_not_available
|
||||
|
||||
|
||||
@@ -516,12 +520,12 @@ def download_data_main(config: Config) -> None:
|
||||
# Start downloading
|
||||
try:
|
||||
if config.get('download_trades'):
|
||||
if config.get('trading_mode') == 'futures':
|
||||
raise OperationalException("Trade download not supported for futures.")
|
||||
pairs_not_available = refresh_backtest_trades_data(
|
||||
exchange, pairs=expanded_pairs, datadir=config['datadir'],
|
||||
timerange=timerange, new_pairs_days=config['new_pairs_days'],
|
||||
erase=bool(config.get('erase')), data_format=config['dataformat_trades'])
|
||||
erase=bool(config.get('erase')), data_format=config['dataformat_trades'],
|
||||
trading_mode=config.get('trading_mode', TradingMode.SPOT),
|
||||
)
|
||||
|
||||
# Convert downloaded trade data to different timeframes
|
||||
convert_trades_to_ohlcv(
|
||||
@@ -529,6 +533,7 @@ def download_data_main(config: Config) -> None:
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||
data_format_trades=config['dataformat_trades'],
|
||||
candle_type=config.get('candle_type_def', CandleType.SPOT),
|
||||
)
|
||||
else:
|
||||
if not exchange.get_option('ohlcv_has_history', True):
|
||||
|
||||
@@ -12,6 +12,12 @@ class OperationalException(FreqtradeException):
|
||||
"""
|
||||
|
||||
|
||||
class ConfigurationError(OperationalException):
|
||||
"""
|
||||
Configuration error. Usually caused by invalid configuration.
|
||||
"""
|
||||
|
||||
|
||||
class DependencyException(FreqtradeException):
|
||||
"""
|
||||
Indicates that an assumed dependency is not met.
|
||||
|
||||
@@ -15,10 +15,12 @@ from freqtrade.exchange.exchange_utils import (ROUND_DOWN, ROUND_UP, amount_to_c
|
||||
contracts_to_amount, date_minus_candles,
|
||||
is_exchange_known_ccxt, list_available_exchanges,
|
||||
market_is_active, price_to_precision,
|
||||
timeframe_to_minutes, timeframe_to_msecs,
|
||||
timeframe_to_next_date, timeframe_to_prev_date,
|
||||
timeframe_to_resample_freq, timeframe_to_seconds,
|
||||
validate_exchange)
|
||||
from freqtrade.exchange.exchange_utils_timeframe import (timeframe_to_minutes, timeframe_to_msecs,
|
||||
timeframe_to_next_date,
|
||||
timeframe_to_prev_date,
|
||||
timeframe_to_resample_freq,
|
||||
timeframe_to_seconds)
|
||||
from freqtrade.exchange.gate import Gate
|
||||
from freqtrade.exchange.hitbtc import Hitbtc
|
||||
from freqtrade.exchange.htx import Htx
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@ import logging
|
||||
import signal
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import floor
|
||||
from math import floor, isnan
|
||||
from threading import Lock
|
||||
from typing import Any, Coroutine, Dict, List, Literal, Optional, Tuple, Union
|
||||
|
||||
@@ -24,19 +24,21 @@ from freqtrade.constants import (DEFAULT_AMOUNT_RESERVE_PERCENT, NON_OPEN_EXCHAN
|
||||
ListPairsWithTimeframes, MakerTaker, OBLiteral, PairWithTimeframe)
|
||||
from freqtrade.data.converter import clean_ohlcv_dataframe, ohlcv_to_dataframe, trades_dict_to_list
|
||||
from freqtrade.enums import OPTIMIZE_MODES, CandleType, MarginMode, PriceType, RunMode, TradingMode
|
||||
from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFundsError,
|
||||
InvalidOrderException, OperationalException, PricingError,
|
||||
RetryableOrderError, TemporaryError)
|
||||
from freqtrade.exceptions import (ConfigurationError, DDosProtection, ExchangeError,
|
||||
InsufficientFundsError, InvalidOrderException,
|
||||
OperationalException, PricingError, RetryableOrderError,
|
||||
TemporaryError)
|
||||
from freqtrade.exchange.common import (API_FETCH_ORDER_RETRY_COUNT, remove_exchange_credentials,
|
||||
retrier, retrier_async)
|
||||
from freqtrade.exchange.exchange_utils import (ROUND, ROUND_DOWN, ROUND_UP, CcxtModuleType,
|
||||
amount_to_contract_precision, amount_to_contracts,
|
||||
amount_to_precision, contracts_to_amount,
|
||||
date_minus_candles, is_exchange_known_ccxt,
|
||||
market_is_active, price_to_precision,
|
||||
timeframe_to_minutes, timeframe_to_msecs,
|
||||
timeframe_to_next_date, timeframe_to_prev_date,
|
||||
timeframe_to_seconds)
|
||||
market_is_active, price_to_precision)
|
||||
from freqtrade.exchange.exchange_utils_timeframe import (timeframe_to_minutes, timeframe_to_msecs,
|
||||
timeframe_to_next_date,
|
||||
timeframe_to_prev_date,
|
||||
timeframe_to_seconds)
|
||||
from freqtrade.exchange.types import OHLCVResponse, OrderBook, Ticker, Tickers
|
||||
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
|
||||
safe_value_fallback2)
|
||||
@@ -87,6 +89,8 @@ class Exchange:
|
||||
"order_props_in_contracts": ['amount', 'filled', 'remaining'],
|
||||
# Override createMarketBuyOrderRequiresPrice where ccxt has it wrong
|
||||
"marketOrderRequiresPrice": False,
|
||||
"exchange_has_overrides": {}, # Dictionary overriding ccxt's "has".
|
||||
# Expected to be in the format {"fetchOHLCV": True} or {"fetchOHLCV": False}
|
||||
}
|
||||
_ft_has: Dict = {}
|
||||
_ft_has_futures: Dict = {}
|
||||
@@ -526,7 +530,7 @@ class Exchange:
|
||||
)
|
||||
quote_currencies = self.get_quote_currencies()
|
||||
if stake_currency not in quote_currencies:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f"{stake_currency} is not available as stake on {self.name}. "
|
||||
f"Available currencies are: {', '.join(quote_currencies)}")
|
||||
|
||||
@@ -594,7 +598,7 @@ class Exchange:
|
||||
f"is therefore not supported. ccxt fetchOHLCV: {self.exchange_has('fetchOHLCV')}")
|
||||
|
||||
if timeframe and (timeframe not in self.timeframes):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f"Invalid timeframe '{timeframe}'. This exchange supports: {self.timeframes}")
|
||||
|
||||
if (
|
||||
@@ -602,7 +606,7 @@ class Exchange:
|
||||
and self._config['runmode'] != RunMode.UTIL_EXCHANGE
|
||||
and timeframe_to_minutes(timeframe) < 1
|
||||
):
|
||||
raise OperationalException("Timeframes < 1m are currently not supported by Freqtrade.")
|
||||
raise ConfigurationError("Timeframes < 1m are currently not supported by Freqtrade.")
|
||||
|
||||
def validate_ordertypes(self, order_types: Dict) -> None:
|
||||
"""
|
||||
@@ -610,7 +614,7 @@ class Exchange:
|
||||
"""
|
||||
if any(v == 'market' for k, v in order_types.items()):
|
||||
if not self.exchange_has('createMarketOrder'):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f'Exchange {self.name} does not support market orders.')
|
||||
self.validate_stop_ordertypes(order_types)
|
||||
|
||||
@@ -620,7 +624,7 @@ class Exchange:
|
||||
"""
|
||||
if (order_types.get("stoploss_on_exchange")
|
||||
and not self._ft_has.get("stoploss_on_exchange", False)):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f'On exchange stoploss is not supported for {self.name}.'
|
||||
)
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
@@ -630,17 +634,17 @@ class Exchange:
|
||||
and 'stoploss_price_type' in order_types
|
||||
and order_types['stoploss_price_type'] not in price_mapping
|
||||
):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f'On exchange stoploss price type is not supported for {self.name}.'
|
||||
)
|
||||
|
||||
def validate_pricing(self, pricing: Dict) -> None:
|
||||
if pricing.get('use_order_book', False) and not self.exchange_has('fetchL2OrderBook'):
|
||||
raise OperationalException(f'Orderbook not available for {self.name}.')
|
||||
raise ConfigurationError(f'Orderbook not available for {self.name}.')
|
||||
if (not pricing.get('use_order_book', False) and (
|
||||
not self.exchange_has('fetchTicker')
|
||||
or not self._ft_has['tickers_have_price'])):
|
||||
raise OperationalException(f'Ticker pricing not available for {self.name}.')
|
||||
raise ConfigurationError(f'Ticker pricing not available for {self.name}.')
|
||||
|
||||
def validate_order_time_in_force(self, order_time_in_force: Dict) -> None:
|
||||
"""
|
||||
@@ -648,7 +652,7 @@ class Exchange:
|
||||
"""
|
||||
if any(v.upper() not in self._ft_has["order_time_in_force"]
|
||||
for k, v in order_time_in_force.items()):
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f'Time in force policies are not supported for {self.name} yet.')
|
||||
|
||||
def validate_required_startup_candles(self, startup_candles: int, timeframe: str) -> int:
|
||||
@@ -659,7 +663,7 @@ class Exchange:
|
||||
|
||||
candle_limit = self.ohlcv_candle_limit(
|
||||
timeframe, self._config['candle_type_def'],
|
||||
int(date_minus_candles(timeframe, startup_candles).timestamp() * 1000)
|
||||
dt_ts(date_minus_candles(timeframe, startup_candles))
|
||||
if timeframe else None)
|
||||
# Require one more candle - to account for the still open candle.
|
||||
candle_count = startup_candles + 1
|
||||
@@ -670,12 +674,12 @@ class Exchange:
|
||||
|
||||
if required_candle_call_count > 5:
|
||||
# Only allow 5 calls per pair to somewhat limit the impact
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f"This strategy requires {startup_candles} candles to start, "
|
||||
"which is more than 5x "
|
||||
f"the amount of candles {self.name} provides for {timeframe}.")
|
||||
elif required_candle_call_count > 1:
|
||||
raise OperationalException(
|
||||
raise ConfigurationError(
|
||||
f"This strategy requires {startup_candles} candles to start, which is more than "
|
||||
f"the amount of candles {self.name} provides for {timeframe}.")
|
||||
if required_candle_call_count > 1:
|
||||
@@ -716,6 +720,8 @@ class Exchange:
|
||||
:param endpoint: Name of endpoint (e.g. 'fetchOHLCV', 'fetchTickers')
|
||||
:return: bool
|
||||
"""
|
||||
if endpoint in self._ft_has.get('exchange_has_overrides', {}):
|
||||
return self._ft_has['exchange_has_overrides'][endpoint]
|
||||
return endpoint in self._api.has and self._api.has[endpoint]
|
||||
|
||||
def get_precision_amount(self, pair: str) -> Optional[float]:
|
||||
@@ -2043,7 +2049,7 @@ class Exchange:
|
||||
timeframe, candle_type, since_ms)
|
||||
move_to = one_call * self.required_candle_call_count
|
||||
now = timeframe_to_next_date(timeframe)
|
||||
since_ms = int((now - timedelta(seconds=move_to // 1000)).timestamp() * 1000)
|
||||
since_ms = dt_ts(now - timedelta(seconds=move_to // 1000))
|
||||
|
||||
if since_ms:
|
||||
return self._async_get_historic_ohlcv(
|
||||
@@ -2503,7 +2509,7 @@ class Exchange:
|
||||
)
|
||||
|
||||
if type(since) is datetime:
|
||||
since = int(since.timestamp()) * 1000 # * 1000 for ms
|
||||
since = dt_ts(since)
|
||||
|
||||
try:
|
||||
funding_history = self._api.fetch_funding_history(
|
||||
@@ -2833,7 +2839,7 @@ class Exchange:
|
||||
|
||||
if not close_date:
|
||||
close_date = datetime.now(timezone.utc)
|
||||
since_ms = int(timeframe_to_prev_date(timeframe, open_date).timestamp()) * 1000
|
||||
since_ms = dt_ts(timeframe_to_prev_date(timeframe, open_date))
|
||||
|
||||
mark_comb: PairWithTimeframe = (pair, timeframe, mark_price_type)
|
||||
funding_comb: PairWithTimeframe = (pair, timeframe_ff, CandleType.FUNDING_RATE)
|
||||
@@ -2887,7 +2893,7 @@ class Exchange:
|
||||
else:
|
||||
# Fill up missing funding_rate candles with fallback value
|
||||
combined = mark_rates.merge(
|
||||
funding_rates, on='date', how="outer", suffixes=["_mark", "_fund"]
|
||||
funding_rates, on='date', how="left", suffixes=["_mark", "_fund"]
|
||||
)
|
||||
combined['open_fund'] = combined['open_fund'].fillna(futures_funding_rate)
|
||||
return combined
|
||||
@@ -2916,7 +2922,8 @@ class Exchange:
|
||||
if not df.empty:
|
||||
df1 = df[(df['date'] >= open_date) & (df['date'] <= close_date)]
|
||||
fees = sum(df1['open_fund'] * df1['open_mark'] * amount)
|
||||
|
||||
if isnan(fees):
|
||||
fees = 0.0
|
||||
# Negate fees for longs as funding_fees expects it this way based on live endpoints.
|
||||
return fees if is_short else -fees
|
||||
|
||||
@@ -3092,3 +3099,4 @@ class Exchange:
|
||||
# describes the min amt for a tier, and the lowest tier will always go down to 0
|
||||
else:
|
||||
raise ExchangeError(f"Cannot get maintenance ratio using {self.name}")
|
||||
raise ExchangeError(f"Cannot get maintenance ratio using {self.name}")
|
||||
|
||||
@@ -11,9 +11,9 @@ from ccxt import (DECIMAL_PLACES, ROUND, ROUND_DOWN, ROUND_UP, SIGNIFICANT_DIGIT
|
||||
|
||||
from freqtrade.exchange.common import (BAD_EXCHANGES, EXCHANGE_HAS_OPTIONAL, EXCHANGE_HAS_REQUIRED,
|
||||
SUPPORTED_EXCHANGES)
|
||||
from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_minutes, timeframe_to_prev_date
|
||||
from freqtrade.types import ValidExchangesType
|
||||
from freqtrade.util import FtPrecise
|
||||
from freqtrade.util.datetime_helpers import dt_from_ts, dt_ts
|
||||
|
||||
|
||||
CcxtModuleType = Any
|
||||
@@ -108,78 +108,6 @@ def list_available_exchanges(all_exchanges: bool) -> List[ValidExchangesType]:
|
||||
return exchanges_valid
|
||||
|
||||
|
||||
def timeframe_to_seconds(timeframe: str) -> int:
|
||||
"""
|
||||
Translates the timeframe interval value written in the human readable
|
||||
form ('1m', '5m', '1h', '1d', '1w', etc.) to the number
|
||||
of seconds for one timeframe interval.
|
||||
"""
|
||||
return ccxt.Exchange.parse_timeframe(timeframe)
|
||||
|
||||
|
||||
def timeframe_to_minutes(timeframe: str) -> int:
|
||||
"""
|
||||
Same as timeframe_to_seconds, but returns minutes.
|
||||
"""
|
||||
return ccxt.Exchange.parse_timeframe(timeframe) // 60
|
||||
|
||||
|
||||
def timeframe_to_msecs(timeframe: str) -> int:
|
||||
"""
|
||||
Same as timeframe_to_seconds, but returns milliseconds.
|
||||
"""
|
||||
return ccxt.Exchange.parse_timeframe(timeframe) * 1000
|
||||
|
||||
|
||||
def timeframe_to_resample_freq(timeframe: str) -> str:
|
||||
"""
|
||||
Translates the timeframe interval value written in the human readable
|
||||
form ('1m', '5m', '1h', '1d', '1w', etc.) to the resample frequency
|
||||
used by pandas ('1T', '5T', '1H', '1D', '1W', etc.)
|
||||
"""
|
||||
if timeframe == '1y':
|
||||
return '1YS'
|
||||
timeframe_seconds = timeframe_to_seconds(timeframe)
|
||||
timeframe_minutes = timeframe_seconds // 60
|
||||
resample_interval = f'{timeframe_seconds}s'
|
||||
if 10000 < timeframe_minutes < 43200:
|
||||
resample_interval = '1W-MON'
|
||||
elif timeframe_minutes >= 43200 and timeframe_minutes < 525600:
|
||||
# Monthly candles need special treatment to stick to the 1st of the month
|
||||
resample_interval = f'{timeframe}S'
|
||||
elif timeframe_minutes > 43200:
|
||||
resample_interval = timeframe
|
||||
return resample_interval
|
||||
|
||||
|
||||
def timeframe_to_prev_date(timeframe: str, date: Optional[datetime] = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine the candle start date for this date.
|
||||
Does not round when given a candle start date.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
:param date: date to use. Defaults to now(utc)
|
||||
:returns: date of previous candle (with utc timezone)
|
||||
"""
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_DOWN) // 1000
|
||||
return dt_from_ts(new_timestamp)
|
||||
|
||||
|
||||
def timeframe_to_next_date(timeframe: str, date: Optional[datetime] = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine next candle.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
:param date: date to use. Defaults to now(utc)
|
||||
:returns: date of next candle (with utc timezone)
|
||||
"""
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_UP) // 1000
|
||||
return dt_from_ts(new_timestamp)
|
||||
|
||||
|
||||
def date_minus_candles(
|
||||
timeframe: str, candle_count: int, date: Optional[datetime] = None) -> datetime:
|
||||
"""
|
||||
|
||||
81
freqtrade/exchange/exchange_utils_timeframe.py
Normal file
81
freqtrade/exchange/exchange_utils_timeframe.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
import ccxt
|
||||
from ccxt import ROUND_DOWN, ROUND_UP
|
||||
|
||||
from freqtrade.util.datetime_helpers import dt_from_ts, dt_ts
|
||||
|
||||
|
||||
def timeframe_to_seconds(timeframe: str) -> int:
|
||||
"""
|
||||
Translates the timeframe interval value written in the human readable
|
||||
form ('1m', '5m', '1h', '1d', '1w', etc.) to the number
|
||||
of seconds for one timeframe interval.
|
||||
"""
|
||||
return ccxt.Exchange.parse_timeframe(timeframe)
|
||||
|
||||
|
||||
def timeframe_to_minutes(timeframe: str) -> int:
|
||||
"""
|
||||
Same as timeframe_to_seconds, but returns minutes.
|
||||
"""
|
||||
return ccxt.Exchange.parse_timeframe(timeframe) // 60
|
||||
|
||||
|
||||
def timeframe_to_msecs(timeframe: str) -> int:
|
||||
"""
|
||||
Same as timeframe_to_seconds, but returns milliseconds.
|
||||
"""
|
||||
return ccxt.Exchange.parse_timeframe(timeframe) * 1000
|
||||
|
||||
|
||||
def timeframe_to_resample_freq(timeframe: str) -> str:
|
||||
"""
|
||||
Translates the timeframe interval value written in the human readable
|
||||
form ('1m', '5m', '1h', '1d', '1w', etc.) to the resample frequency
|
||||
used by pandas ('1T', '5T', '1H', '1D', '1W', etc.)
|
||||
"""
|
||||
if timeframe == '1y':
|
||||
return '1YS'
|
||||
timeframe_seconds = timeframe_to_seconds(timeframe)
|
||||
timeframe_minutes = timeframe_seconds // 60
|
||||
resample_interval = f'{timeframe_seconds}s'
|
||||
if 10000 < timeframe_minutes < 43200:
|
||||
resample_interval = '1W-MON'
|
||||
elif timeframe_minutes >= 43200 and timeframe_minutes < 525600:
|
||||
# Monthly candles need special treatment to stick to the 1st of the month
|
||||
resample_interval = f'{timeframe}S'
|
||||
elif timeframe_minutes > 43200:
|
||||
resample_interval = timeframe
|
||||
return resample_interval
|
||||
|
||||
|
||||
def timeframe_to_prev_date(timeframe: str, date: Optional[datetime] = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine the candle start date for this date.
|
||||
Does not round when given a candle start date.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
:param date: date to use. Defaults to now(utc)
|
||||
:returns: date of previous candle (with utc timezone)
|
||||
"""
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(
|
||||
timeframe, dt_ts(date), ROUND_DOWN) // 1000
|
||||
return dt_from_ts(new_timestamp)
|
||||
|
||||
|
||||
def timeframe_to_next_date(timeframe: str, date: Optional[datetime] = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine next candle.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
:param date: date to use. Defaults to now(utc)
|
||||
:returns: date of next candle (with utc timezone)
|
||||
"""
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(
|
||||
timeframe, dt_ts(date), ROUND_UP) // 1000
|
||||
return dt_from_ts(new_timestamp)
|
||||
@@ -96,9 +96,7 @@ class Gate(Exchange):
|
||||
return trades
|
||||
|
||||
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
return safe_value_fallback2(order, order, 'id_stop', 'id')
|
||||
return order['id']
|
||||
return safe_value_fallback2(order, order, 'id_stop', 'id')
|
||||
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
order = self.fetch_order(
|
||||
@@ -106,17 +104,19 @@ class Gate(Exchange):
|
||||
pair=pair,
|
||||
params={'stop': True}
|
||||
)
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
if order['status'] == 'closed':
|
||||
# Places a real order - which we need to fetch explicitly.
|
||||
new_orderid = order.get('info', {}).get('trade_id')
|
||||
if new_orderid:
|
||||
order1 = self.fetch_order(order_id=new_orderid, pair=pair, params=params)
|
||||
order1['id_stop'] = order1['id']
|
||||
order1['id'] = order_id
|
||||
order1['stopPrice'] = order.get('stopPrice')
|
||||
if order.get('status', 'open') == 'closed':
|
||||
# Places a real order - which we need to fetch explicitly.
|
||||
val = 'trade_id' if self.trading_mode == TradingMode.FUTURES else 'fired_order_id'
|
||||
|
||||
return order1
|
||||
if new_orderid := order.get('info', {}).get(val):
|
||||
order1 = self.fetch_order(order_id=new_orderid, pair=pair, params=params)
|
||||
order1['id_stop'] = order1['id']
|
||||
order1['id'] = order_id
|
||||
order1['type'] = 'stoploss'
|
||||
order1['stopPrice'] = order.get('stopPrice')
|
||||
order1['status_stop'] = 'triggered'
|
||||
|
||||
return order1
|
||||
return order
|
||||
|
||||
def cancel_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
|
||||
@@ -825,7 +825,7 @@ class IFreqaiModel(ABC):
|
||||
"""
|
||||
if self.config.get("freqai_backtest_live_models", False) and len_dataframe_backtest == 0:
|
||||
logger.info(f"No data found for pair {pair} from "
|
||||
f"from { tr_backtest.start_fmt} to {tr_backtest.stop_fmt}. "
|
||||
f"from {tr_backtest.start_fmt} to {tr_backtest.stop_fmt}. "
|
||||
"Probably more than one training within the same candle period.")
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -36,8 +36,15 @@ class XGBoostRegressor(BaseRegressionModel):
|
||||
eval_set = None
|
||||
eval_weights = None
|
||||
else:
|
||||
eval_set = [(data_dictionary["test_features"], data_dictionary["test_labels"])]
|
||||
eval_weights = [data_dictionary['test_weights']]
|
||||
eval_set = [
|
||||
(data_dictionary["test_features"],
|
||||
data_dictionary["test_labels"]),
|
||||
(X, y)
|
||||
]
|
||||
eval_weights = [
|
||||
data_dictionary['test_weights'],
|
||||
data_dictionary['train_weights']
|
||||
]
|
||||
|
||||
sample_weight = data_dictionary["train_weights"]
|
||||
|
||||
|
||||
@@ -43,13 +43,11 @@ class TensorBoardCallback(BaseTensorBoardCallback):
|
||||
if not evals_log:
|
||||
return False
|
||||
|
||||
for data, metric in evals_log.items():
|
||||
for metric_name, log in metric.items():
|
||||
evals = ["validation", "train"]
|
||||
for metric, eval in zip(evals_log.items(), evals):
|
||||
for metric_name, log in metric[1].items():
|
||||
score = log[-1][0] if isinstance(log[-1], tuple) else log[-1]
|
||||
if data == "train":
|
||||
self.writer.add_scalar("train_loss", score, epoch)
|
||||
else:
|
||||
self.writer.add_scalar("valid_loss", score, epoch)
|
||||
self.writer.add_scalar(f"{eval}-{metric_name}", score, epoch)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@@ -152,7 +152,7 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
|
||||
"""
|
||||
assert isinstance(self.n_steps, int), "Either `n_steps` or `n_epochs` should be set."
|
||||
n_batches = n_obs // self.batch_size
|
||||
n_epochs = min(self.n_steps // n_batches, 1)
|
||||
n_epochs = max(self.n_steps // n_batches, 1)
|
||||
if n_epochs <= 10:
|
||||
logger.warning(
|
||||
f"Setting low n_epochs: {n_epochs}. "
|
||||
|
||||
@@ -37,7 +37,6 @@ from freqtrade.rpc.rpc_types import (ProfitLossStr, RPCCancelMsg, RPCEntryMsg, R
|
||||
RPCExitMsg, RPCProtectionMsg)
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
from freqtrade.util import FtPrecise
|
||||
from freqtrade.util.migrations import migrate_binance_futures_names
|
||||
from freqtrade.wallets import Wallets
|
||||
|
||||
@@ -667,7 +666,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# We should decrease our position
|
||||
amount = self.exchange.amount_to_contract_precision(
|
||||
trade.pair,
|
||||
abs(float(FtPrecise(stake_amount * trade.leverage) / FtPrecise(current_exit_rate))))
|
||||
abs(float(stake_amount * trade.amount / trade.stake_amount)))
|
||||
|
||||
if amount == 0.0:
|
||||
logger.info("Amount to exit is 0.0 due to exchange limits - not exiting.")
|
||||
@@ -962,7 +961,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# edge-case for now.
|
||||
min_stake_amount = self.exchange.get_min_pair_stake_amount(
|
||||
pair, enter_limit_requested,
|
||||
self.strategy.stoploss if not mode != 'pos_adjust' else 0.0,
|
||||
self.strategy.stoploss if not mode == 'pos_adjust' else 0.0,
|
||||
leverage)
|
||||
max_stake_amount = self.exchange.get_max_pair_stake_amount(
|
||||
pair, enter_limit_requested, leverage)
|
||||
@@ -1945,6 +1944,9 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
def _update_trade_after_fill(self, trade: Trade, order: Order) -> Trade:
|
||||
if order.status in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
strategy_safe_wrapper(
|
||||
self.strategy.order_filled, default_retval=None)(
|
||||
pair=trade.pair, trade=trade, order=order, current_time=datetime.now(timezone.utc))
|
||||
# If a entry order was closed, force update on stoploss on exchange
|
||||
if order.ft_order_side == trade.entry_side:
|
||||
trade = self.cancel_stoploss_on_exchange(trade)
|
||||
|
||||
@@ -7,8 +7,6 @@ import logging
|
||||
import sys
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from freqtrade.util.gc_setup import gc_set_threshold
|
||||
|
||||
|
||||
# check min. python version
|
||||
if sys.version_info < (3, 9): # pragma: no cover
|
||||
@@ -16,8 +14,10 @@ if sys.version_info < (3, 9): # pragma: no cover
|
||||
|
||||
from freqtrade import __version__
|
||||
from freqtrade.commands import Arguments
|
||||
from freqtrade.exceptions import FreqtradeException, OperationalException
|
||||
from freqtrade.constants import DOCS_LINK
|
||||
from freqtrade.exceptions import ConfigurationError, FreqtradeException, OperationalException
|
||||
from freqtrade.loggers import setup_logging_pre
|
||||
from freqtrade.util.gc_setup import gc_set_threshold
|
||||
|
||||
|
||||
logger = logging.getLogger('freqtrade')
|
||||
@@ -56,6 +56,9 @@ def main(sysargv: Optional[List[str]] = None) -> None:
|
||||
except KeyboardInterrupt:
|
||||
logger.info('SIGINT received, aborting ...')
|
||||
return_code = 0
|
||||
except ConfigurationError as e:
|
||||
logger.error(f"Configuration error: {e}\n"
|
||||
f"Please make sure to review the documentation at {DOCS_LINK}.")
|
||||
except FreqtradeException as e:
|
||||
logger.error(str(e))
|
||||
return_code = 2
|
||||
|
||||
@@ -107,9 +107,9 @@ class LookaheadAnalysisSubFunctions:
|
||||
csv_df = add_or_update_row(csv_df, new_row_data)
|
||||
|
||||
# Fill NaN values with a default value (e.g., 0)
|
||||
csv_df['total_signals'] = csv_df['total_signals'].fillna(0)
|
||||
csv_df['biased_entry_signals'] = csv_df['biased_entry_signals'].fillna(0)
|
||||
csv_df['biased_exit_signals'] = csv_df['biased_exit_signals'].fillna(0)
|
||||
csv_df['total_signals'] = csv_df['total_signals'].astype(int).fillna(0)
|
||||
csv_df['biased_entry_signals'] = csv_df['biased_entry_signals'].astype(int).fillna(0)
|
||||
csv_df['biased_exit_signals'] = csv_df['biased_exit_signals'].astype(int).fillna(0)
|
||||
|
||||
# Convert columns to integers
|
||||
csv_df['total_signals'] = csv_df['total_signals'].astype(int)
|
||||
@@ -121,14 +121,22 @@ class LookaheadAnalysisSubFunctions:
|
||||
|
||||
@staticmethod
|
||||
def calculate_config_overrides(config: Config):
|
||||
if config.get('enable_protections', False):
|
||||
# if protections are used globally, they can produce false positives.
|
||||
config['enable_protections'] = False
|
||||
logger.info('Protections were enabled. '
|
||||
'Disabling protections now '
|
||||
'since they could otherwise produce false positives.')
|
||||
if config['targeted_trade_amount'] < config['minimum_trade_amount']:
|
||||
# this combo doesn't make any sense.
|
||||
raise OperationalException(
|
||||
"Targeted trade amount can't be smaller than minimum trade amount."
|
||||
)
|
||||
if len(config['pairs']) > config['max_open_trades']:
|
||||
logger.info('Max_open_trades were less than amount of pairs. '
|
||||
'Set max_open_trades to amount of pairs just to avoid false positives.')
|
||||
if len(config['pairs']) > config.get('max_open_trades', 0):
|
||||
logger.info('Max_open_trades were less than amount of pairs '
|
||||
'or defined in the strategy. '
|
||||
'Set max_open_trades to amount of pairs '
|
||||
'just to avoid false positives.')
|
||||
config['max_open_trades'] = len(config['pairs'])
|
||||
|
||||
min_dry_run_wallet = 1000000000
|
||||
|
||||
@@ -33,8 +33,8 @@ from freqtrade.optimize.optimize_reports import (generate_backtest_stats, genera
|
||||
show_backtest_results,
|
||||
store_backtest_analysis_results,
|
||||
store_backtest_stats)
|
||||
from freqtrade.persistence import (LocalTrade, Order, PairLocks, Trade, disable_database_use,
|
||||
enable_database_use)
|
||||
from freqtrade.persistence import (CustomDataWrapper, LocalTrade, Order, PairLocks, Trade,
|
||||
disable_database_use, enable_database_use)
|
||||
from freqtrade.plugins.pairlistmanager import PairListManager
|
||||
from freqtrade.plugins.protectionmanager import ProtectionManager
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
@@ -337,6 +337,7 @@ class Backtesting:
|
||||
self.disable_database_use()
|
||||
PairLocks.reset_locks()
|
||||
Trade.reset_trades()
|
||||
CustomDataWrapper.reset_custom_data()
|
||||
self.rejected_trades = 0
|
||||
self.timedout_entry_orders = 0
|
||||
self.timedout_exit_orders = 0
|
||||
@@ -602,6 +603,11 @@ class Backtesting:
|
||||
if order and self._get_order_filled(order.ft_price, row):
|
||||
order.close_bt_order(current_date, trade)
|
||||
self._run_funding_fees(trade, current_date, force=True)
|
||||
strategy_safe_wrapper(
|
||||
self.strategy.order_filled,
|
||||
default_retval=None)(
|
||||
pair=trade.pair, trade=trade, # type: ignore[arg-type]
|
||||
order=order, current_time=current_date)
|
||||
|
||||
if not (order.ft_order_side == trade.exit_side and order.safe_amount == trade.amount):
|
||||
# trade is still open
|
||||
@@ -881,6 +887,9 @@ class Backtesting:
|
||||
precision_amount = self.exchange.get_precision_amount(pair)
|
||||
amount = amount_to_contract_precision(amount_p, precision_amount, self.precision_mode,
|
||||
contract_size)
|
||||
if not amount:
|
||||
# No amount left after truncating to precision.
|
||||
return trade
|
||||
# Backcalculate actual stake amount.
|
||||
stake_amount = amount * propose_rate / leverage
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ def _get_line_floatfmt(stake_currency: str) -> List[str]:
|
||||
"""
|
||||
Generate floatformat (goes in line with _generate_result_line())
|
||||
"""
|
||||
return ['s', 'd', '.2f', '.2f', f'.{decimals_per_coin(stake_currency)}f',
|
||||
return ['s', 'd', '.2f', f'.{decimals_per_coin(stake_currency)}f',
|
||||
'.2f', 'd', 's', 's']
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ def _get_line_header(first_column: str, stake_currency: str,
|
||||
"""
|
||||
Generate header lines (goes in line with _generate_result_line())
|
||||
"""
|
||||
return [first_column, direction, 'Avg Profit %', 'Cum Profit %',
|
||||
return [first_column, direction, 'Avg Profit %',
|
||||
f'Tot Profit {stake_currency}', 'Tot Profit %', 'Avg Duration',
|
||||
'Win Draw Loss Win%']
|
||||
|
||||
@@ -51,7 +51,7 @@ def text_table_bt_results(pair_results: List[Dict[str, Any]], stake_currency: st
|
||||
headers = _get_line_header('Pair', stake_currency)
|
||||
floatfmt = _get_line_floatfmt(stake_currency)
|
||||
output = [[
|
||||
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
|
||||
t['key'], t['trades'], t['profit_mean_pct'], t['profit_total_abs'],
|
||||
t['profit_total_pct'], t['duration_avg'],
|
||||
generate_wins_draws_losses(t['wins'], t['draws'], t['losses'])
|
||||
] for t in pair_results]
|
||||
@@ -72,7 +72,6 @@ def text_table_exit_reason(exit_reason_stats: List[Dict[str, Any]], stake_curren
|
||||
'Exits',
|
||||
'Win Draws Loss Win%',
|
||||
'Avg Profit %',
|
||||
'Cum Profit %',
|
||||
f'Tot Profit {stake_currency}',
|
||||
'Tot Profit %',
|
||||
]
|
||||
@@ -80,7 +79,7 @@ def text_table_exit_reason(exit_reason_stats: List[Dict[str, Any]], stake_curren
|
||||
output = [[
|
||||
t.get('exit_reason', t.get('sell_reason')), t['trades'],
|
||||
generate_wins_draws_losses(t['wins'], t['draws'], t['losses']),
|
||||
t['profit_mean_pct'], t['profit_sum_pct'],
|
||||
t['profit_mean_pct'],
|
||||
fmt_coin(t['profit_total_abs'], stake_currency, False),
|
||||
t['profit_total_pct'],
|
||||
] for t in exit_reason_stats]
|
||||
@@ -105,7 +104,6 @@ def text_table_tags(tag_type: str, tag_results: List[Dict[str, Any]], stake_curr
|
||||
t['key']) > 0 else "OTHER",
|
||||
t['trades'],
|
||||
t['profit_mean_pct'],
|
||||
t['profit_sum_pct'],
|
||||
t['profit_total_abs'],
|
||||
t['profit_total_pct'],
|
||||
t['duration_avg'],
|
||||
@@ -166,7 +164,7 @@ def text_table_strategy(strategy_results, stake_currency: str) -> str:
|
||||
for t, dd in zip(strategy_results, drawdown)]
|
||||
|
||||
output = [[
|
||||
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
|
||||
t['key'], t['trades'], t['profit_mean_pct'], t['profit_total_abs'],
|
||||
t['profit_total_pct'], t['duration_avg'],
|
||||
generate_wins_draws_losses(t['wins'], t['draws'], t['losses']), drawdown]
|
||||
for t, drawdown in zip(strategy_results, drawdown)]
|
||||
@@ -256,9 +254,9 @@ def text_table_add_metrics(strat_results: Dict) -> str:
|
||||
*short_metrics,
|
||||
('', ''), # Empty line to improve readability
|
||||
('Best Pair', f"{strat_results['best_pair']['key']} "
|
||||
f"{strat_results['best_pair']['profit_sum']:.2%}"),
|
||||
f"{strat_results['best_pair']['profit_total']:.2%}"),
|
||||
('Worst Pair', f"{strat_results['worst_pair']['key']} "
|
||||
f"{strat_results['worst_pair']['profit_sum']:.2%}"),
|
||||
f"{strat_results['worst_pair']['profit_total']:.2%}"),
|
||||
('Best trade', f"{best_trade['pair']} {best_trade['profit_ratio']:.2%}"),
|
||||
('Worst trade', f"{worst_trade['pair']} "
|
||||
f"{worst_trade['profit_ratio']:.2%}"),
|
||||
|
||||
@@ -215,7 +215,7 @@ def _get_resample_from_period(period: str) -> str:
|
||||
# Weekly defaulting to Monday.
|
||||
return '1W-MON'
|
||||
if period == 'month':
|
||||
return '1M'
|
||||
return '1ME'
|
||||
raise ValueError(f"Period {period} is not supported.")
|
||||
|
||||
|
||||
|
||||
@@ -20,8 +20,10 @@ class SKDecimal(Integer):
|
||||
super().__init__(_low, _high, prior, base, transform, name, dtype)
|
||||
|
||||
def __repr__(self):
|
||||
return "Decimal(low={}, high={}, decimals={}, prior='{}', transform='{}')".format(
|
||||
self.low_orig, self.high_orig, self.decimals, self.prior, self.transform_)
|
||||
return (
|
||||
f"Decimal(low={self.low_orig}, high={self.high_orig}, decimals={self.decimals}, "
|
||||
f"prior='{self.prior}', transform='{self.transform_}')"
|
||||
)
|
||||
|
||||
def __contains__(self, point):
|
||||
if isinstance(point, list):
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# flake8: noqa: F401
|
||||
|
||||
from freqtrade.persistence.custom_data import CustomDataWrapper
|
||||
from freqtrade.persistence.key_value_store import KeyStoreKeys, KeyValueStore
|
||||
from freqtrade.persistence.models import init_db
|
||||
from freqtrade.persistence.pairlock_middleware import PairLocks
|
||||
|
||||
174
freqtrade/persistence/custom_data.py
Normal file
174
freqtrade/persistence/custom_data.py
Normal file
@@ -0,0 +1,174 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, ClassVar, List, Optional, Sequence
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Integer, String, Text, UniqueConstraint, select
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||
from freqtrade.persistence.base import ModelBase, SessionType
|
||||
from freqtrade.util import dt_now
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _CustomData(ModelBase):
|
||||
"""
|
||||
CustomData database model
|
||||
Keeps records of metadata as key/value store
|
||||
for trades or global persistant values
|
||||
One to many relationship with Trades:
|
||||
- One trade can have many metadata entries
|
||||
- One metadata entry can only be associated with one Trade
|
||||
"""
|
||||
__tablename__ = 'trade_custom_data'
|
||||
__allow_unmapped__ = True
|
||||
session: ClassVar[SessionType]
|
||||
|
||||
# Uniqueness should be ensured over pair, order_id
|
||||
# its likely that order_id is unique per Pair on some exchanges.
|
||||
__table_args__ = (UniqueConstraint('ft_trade_id', 'cd_key', name="_trade_id_cd_key"),)
|
||||
|
||||
id = mapped_column(Integer, primary_key=True)
|
||||
ft_trade_id = mapped_column(Integer, ForeignKey('trades.id'), index=True)
|
||||
|
||||
trade = relationship("Trade", back_populates="custom_data")
|
||||
|
||||
cd_key: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
cd_type: Mapped[str] = mapped_column(String(25), nullable=False)
|
||||
cd_value: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, default=dt_now)
|
||||
updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
||||
|
||||
# Empty container value - not persisted, but filled with cd_value on query
|
||||
value: Any = None
|
||||
|
||||
def __repr__(self):
|
||||
create_time = (self.created_at.strftime(DATETIME_PRINT_FORMAT)
|
||||
if self.created_at is not None else None)
|
||||
update_time = (self.updated_at.strftime(DATETIME_PRINT_FORMAT)
|
||||
if self.updated_at is not None else None)
|
||||
return (f'CustomData(id={self.id}, key={self.cd_key}, type={self.cd_type}, ' +
|
||||
f'value={self.cd_value}, trade_id={self.ft_trade_id}, created={create_time}, ' +
|
||||
f'updated={update_time})')
|
||||
|
||||
@classmethod
|
||||
def query_cd(cls, key: Optional[str] = None,
|
||||
trade_id: Optional[int] = None) -> Sequence['_CustomData']:
|
||||
"""
|
||||
Get all CustomData, if trade_id is not specified
|
||||
return will be for generic values not tied to a trade
|
||||
:param trade_id: id of the Trade
|
||||
"""
|
||||
filters = []
|
||||
if trade_id is not None:
|
||||
filters.append(_CustomData.ft_trade_id == trade_id)
|
||||
if key is not None:
|
||||
filters.append(_CustomData.cd_key.ilike(key))
|
||||
|
||||
return _CustomData.session.scalars(select(_CustomData).filter(*filters)).all()
|
||||
|
||||
|
||||
class CustomDataWrapper:
|
||||
"""
|
||||
CustomData middleware class
|
||||
Abstracts the database layer away so it becomes optional - which will be necessary to support
|
||||
backtesting and hyperopt in the future.
|
||||
"""
|
||||
|
||||
use_db = True
|
||||
custom_data: List[_CustomData] = []
|
||||
unserialized_types = ['bool', 'float', 'int', 'str']
|
||||
|
||||
@staticmethod
|
||||
def _convert_custom_data(data: _CustomData) -> _CustomData:
|
||||
if data.cd_type in CustomDataWrapper.unserialized_types:
|
||||
data.value = data.cd_value
|
||||
if data.cd_type == 'bool':
|
||||
data.value = data.cd_value.lower() == 'true'
|
||||
elif data.cd_type == 'int':
|
||||
data.value = int(data.cd_value)
|
||||
elif data.cd_type == 'float':
|
||||
data.value = float(data.cd_value)
|
||||
else:
|
||||
data.value = json.loads(data.cd_value)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def reset_custom_data() -> None:
|
||||
"""
|
||||
Resets all key-value pairs. Only active for backtesting mode.
|
||||
"""
|
||||
if not CustomDataWrapper.use_db:
|
||||
CustomDataWrapper.custom_data = []
|
||||
|
||||
@staticmethod
|
||||
def delete_custom_data(trade_id: int) -> None:
|
||||
_CustomData.session.query(_CustomData).filter(_CustomData.ft_trade_id == trade_id).delete()
|
||||
_CustomData.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def get_custom_data(*, trade_id: int, key: Optional[str] = None) -> List[_CustomData]:
|
||||
|
||||
if CustomDataWrapper.use_db:
|
||||
filters = [
|
||||
_CustomData.ft_trade_id == trade_id,
|
||||
]
|
||||
if key is not None:
|
||||
filters.append(_CustomData.cd_key.ilike(key))
|
||||
filtered_custom_data = _CustomData.session.scalars(select(_CustomData).filter(
|
||||
*filters)).all()
|
||||
|
||||
else:
|
||||
filtered_custom_data = [
|
||||
data_entry for data_entry in CustomDataWrapper.custom_data
|
||||
if (data_entry.ft_trade_id == trade_id)
|
||||
]
|
||||
if key is not None:
|
||||
filtered_custom_data = [
|
||||
data_entry for data_entry in filtered_custom_data
|
||||
if (data_entry.cd_key.casefold() == key.casefold())
|
||||
]
|
||||
return [CustomDataWrapper._convert_custom_data(d) for d in filtered_custom_data]
|
||||
|
||||
@staticmethod
|
||||
def set_custom_data(trade_id: int, key: str, value: Any) -> None:
|
||||
|
||||
value_type = type(value).__name__
|
||||
|
||||
if value_type not in CustomDataWrapper.unserialized_types:
|
||||
try:
|
||||
value_db = json.dumps(value)
|
||||
except TypeError as e:
|
||||
logger.warning(f"could not serialize {key} value due to {e}")
|
||||
return
|
||||
else:
|
||||
value_db = str(value)
|
||||
|
||||
if trade_id is None:
|
||||
trade_id = 0
|
||||
|
||||
custom_data = CustomDataWrapper.get_custom_data(trade_id=trade_id, key=key)
|
||||
if custom_data:
|
||||
data_entry = custom_data[0]
|
||||
data_entry.cd_value = value_db
|
||||
data_entry.updated_at = dt_now()
|
||||
else:
|
||||
data_entry = _CustomData(
|
||||
ft_trade_id=trade_id,
|
||||
cd_key=key,
|
||||
cd_type=value_type,
|
||||
cd_value=value_db,
|
||||
created_at=dt_now(),
|
||||
)
|
||||
data_entry.value = value
|
||||
|
||||
if CustomDataWrapper.use_db and value_db is not None:
|
||||
_CustomData.session.add(data_entry)
|
||||
_CustomData.session.commit()
|
||||
else:
|
||||
if not custom_data:
|
||||
CustomDataWrapper.custom_data.append(data_entry)
|
||||
# Existing data will have updated interactively.
|
||||
@@ -13,6 +13,7 @@ from sqlalchemy.pool import StaticPool
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.persistence.base import ModelBase
|
||||
from freqtrade.persistence.custom_data import _CustomData
|
||||
from freqtrade.persistence.key_value_store import _KeyValueStoreModel
|
||||
from freqtrade.persistence.migrations import check_migrate
|
||||
from freqtrade.persistence.pairlock import PairLock
|
||||
@@ -78,6 +79,8 @@ def init_db(db_url: str) -> None:
|
||||
Order.session = Trade.session
|
||||
PairLock.session = Trade.session
|
||||
_KeyValueStoreModel.session = Trade.session
|
||||
_CustomData.session = scoped_session(sessionmaker(bind=engine, autoflush=True),
|
||||
scopefunc=get_request_or_thread_id)
|
||||
|
||||
previous_tables = inspect(engine).get_table_names()
|
||||
ModelBase.metadata.create_all(engine)
|
||||
|
||||
@@ -23,6 +23,7 @@ from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, amount_to_contract_precisi
|
||||
from freqtrade.leverage import interest
|
||||
from freqtrade.misc import safe_value_fallback
|
||||
from freqtrade.persistence.base import ModelBase, SessionType
|
||||
from freqtrade.persistence.custom_data import CustomDataWrapper, _CustomData
|
||||
from freqtrade.util import FtPrecise, dt_from_ts, dt_now, dt_ts, dt_ts_none
|
||||
|
||||
|
||||
@@ -1214,6 +1215,40 @@ class LocalTrade:
|
||||
or (o.ft_is_open is True and o.status is not None)
|
||||
]
|
||||
|
||||
def set_custom_data(self, key: str, value: Any) -> None:
|
||||
"""
|
||||
Set custom data for this trade
|
||||
:param key: key of the custom data
|
||||
:param value: value of the custom data (must be JSON serializable)
|
||||
"""
|
||||
CustomDataWrapper.set_custom_data(trade_id=self.id, key=key, value=value)
|
||||
|
||||
def get_custom_data(self, key: str, default: Any = None) -> Any:
|
||||
"""
|
||||
Get custom data for this trade
|
||||
:param key: key of the custom data
|
||||
"""
|
||||
data = CustomDataWrapper.get_custom_data(trade_id=self.id, key=key)
|
||||
if data:
|
||||
return data[0].value
|
||||
return default
|
||||
|
||||
def get_custom_data_entry(self, key: str) -> Optional[_CustomData]:
|
||||
"""
|
||||
Get custom data for this trade
|
||||
:param key: key of the custom data
|
||||
"""
|
||||
data = CustomDataWrapper.get_custom_data(trade_id=self.id, key=key)
|
||||
if data:
|
||||
return data[0]
|
||||
return None
|
||||
|
||||
def get_all_custom_data(self) -> List[_CustomData]:
|
||||
"""
|
||||
Get all custom data for this trade
|
||||
"""
|
||||
return CustomDataWrapper.get_custom_data(trade_id=self.id)
|
||||
|
||||
@property
|
||||
def nr_of_successful_entries(self) -> int:
|
||||
"""
|
||||
@@ -1469,6 +1504,9 @@ class Trade(ModelBase, LocalTrade):
|
||||
orders: Mapped[List[Order]] = relationship(
|
||||
"Order", order_by="Order.id", cascade="all, delete-orphan", lazy="selectin",
|
||||
innerjoin=True) # type: ignore
|
||||
custom_data: Mapped[List[_CustomData]] = relationship(
|
||||
"_CustomData", cascade="all, delete-orphan",
|
||||
lazy="raise")
|
||||
|
||||
exchange: Mapped[str] = mapped_column(String(25), nullable=False) # type: ignore
|
||||
pair: Mapped[str] = mapped_column(String(25), nullable=False, index=True) # type: ignore
|
||||
@@ -1572,6 +1610,8 @@ class Trade(ModelBase, LocalTrade):
|
||||
for order in self.orders:
|
||||
Order.session.delete(order)
|
||||
|
||||
CustomDataWrapper.delete_custom_data(trade_id=self.id)
|
||||
|
||||
Trade.session.delete(self)
|
||||
Trade.commit()
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
|
||||
from freqtrade.persistence.custom_data import CustomDataWrapper
|
||||
from freqtrade.persistence.pairlock_middleware import PairLocks
|
||||
from freqtrade.persistence.trade_model import Trade
|
||||
|
||||
@@ -11,6 +12,7 @@ def disable_database_use(timeframe: str) -> None:
|
||||
PairLocks.use_db = False
|
||||
PairLocks.timeframe = timeframe
|
||||
Trade.use_db = False
|
||||
CustomDataWrapper.use_db = False
|
||||
|
||||
|
||||
def enable_database_use() -> None:
|
||||
@@ -20,6 +22,7 @@ def enable_database_use() -> None:
|
||||
PairLocks.use_db = True
|
||||
PairLocks.timeframe = ''
|
||||
Trade.use_db = True
|
||||
CustomDataWrapper.use_db = True
|
||||
|
||||
|
||||
class FtNoDBContext:
|
||||
|
||||
@@ -3,7 +3,6 @@ Volatility pairlist filter
|
||||
"""
|
||||
import logging
|
||||
import sys
|
||||
from copy import deepcopy
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
@@ -37,6 +36,7 @@ class VolatilityFilter(IPairList):
|
||||
self._max_volatility = pairlistconfig.get('max_volatility', sys.maxsize)
|
||||
self._refresh_period = pairlistconfig.get('refresh_period', 1440)
|
||||
self._def_candletype = self._config['candle_type_def']
|
||||
self._sort_direction: Optional[str] = pairlistconfig.get('sort_direction', None)
|
||||
|
||||
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
||||
|
||||
@@ -46,6 +46,9 @@ class VolatilityFilter(IPairList):
|
||||
if self._days > candle_limit:
|
||||
raise OperationalException("VolatilityFilter requires lookback_days to not "
|
||||
f"exceed exchange max request size ({candle_limit})")
|
||||
if self._sort_direction not in [None, 'asc', 'desc']:
|
||||
raise OperationalException("VolatilityFilter requires sort_direction to be "
|
||||
"either None (undefined), 'asc' or 'desc'")
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
@@ -89,6 +92,13 @@ class VolatilityFilter(IPairList):
|
||||
"description": "Maximum Volatility",
|
||||
"help": "Maximum volatility a pair must have to be considered.",
|
||||
},
|
||||
"sort_direction": {
|
||||
"type": "option",
|
||||
"default": None,
|
||||
"options": ["", "asc", "desc"],
|
||||
"description": "Sort pairlist",
|
||||
"help": "Sort Pairlist ascending or descending by volatility.",
|
||||
},
|
||||
**IPairList.refresh_period_parameter()
|
||||
}
|
||||
|
||||
@@ -105,43 +115,61 @@ class VolatilityFilter(IPairList):
|
||||
since_ms = dt_ts(dt_floor_day(dt_now()) - timedelta(days=self._days))
|
||||
candles = self._exchange.refresh_ohlcv_with_cache(needed_pairs, since_ms=since_ms)
|
||||
|
||||
if self._enabled:
|
||||
for p in deepcopy(pairlist):
|
||||
daily_candles = candles[(p, '1d', self._def_candletype)] if (
|
||||
p, '1d', self._def_candletype) in candles else None
|
||||
if not self._validate_pair_loc(p, daily_candles):
|
||||
pairlist.remove(p)
|
||||
return pairlist
|
||||
resulting_pairlist: List[str] = []
|
||||
volatilitys: Dict[str, float] = {}
|
||||
for p in pairlist:
|
||||
daily_candles = candles.get((p, '1d', self._def_candletype), None)
|
||||
|
||||
def _validate_pair_loc(self, pair: str, daily_candles: Optional[DataFrame]) -> bool:
|
||||
"""
|
||||
Validate trading range
|
||||
:param pair: Pair that's currently validated
|
||||
:param daily_candles: Downloaded daily candles
|
||||
:return: True if the pair can stay, false if it should be removed
|
||||
"""
|
||||
volatility_avg = self._calculate_volatility(p, daily_candles)
|
||||
|
||||
if volatility_avg is not None:
|
||||
if self._validate_pair_loc(p, volatility_avg):
|
||||
resulting_pairlist.append(p)
|
||||
volatilitys[p] = (
|
||||
volatility_avg if volatility_avg and not np.isnan(volatility_avg) else 0
|
||||
)
|
||||
else:
|
||||
self.log_once(f"Removed {p} from whitelist, no candles found.", logger.info)
|
||||
|
||||
if self._sort_direction:
|
||||
resulting_pairlist = sorted(resulting_pairlist,
|
||||
key=lambda p: volatilitys[p],
|
||||
reverse=self._sort_direction == 'desc')
|
||||
return resulting_pairlist
|
||||
|
||||
def _calculate_volatility(self, pair: str, daily_candles: DataFrame) -> Optional[float]:
|
||||
# Check symbol in cache
|
||||
if (cached_res := self._pair_cache.get(pair, None)) is not None:
|
||||
return cached_res
|
||||
if (volatility_avg := self._pair_cache.get(pair, None)) is not None:
|
||||
return volatility_avg
|
||||
|
||||
result = False
|
||||
if daily_candles is not None and not daily_candles.empty:
|
||||
returns = (np.log(daily_candles["close"].shift(1) / daily_candles["close"]))
|
||||
returns.fillna(0, inplace=True)
|
||||
|
||||
volatility_series = returns.rolling(window=self._days).std() * np.sqrt(self._days)
|
||||
volatility_avg = volatility_series.mean()
|
||||
self._pair_cache[pair] = volatility_avg
|
||||
|
||||
if self._min_volatility <= volatility_avg <= self._max_volatility:
|
||||
result = True
|
||||
else:
|
||||
self.log_once(f"Removed {pair} from whitelist, because volatility "
|
||||
f"over {self._days} {plural(self._days, 'day')} "
|
||||
f"is: {volatility_avg:.3f} "
|
||||
f"which is not in the configured range of "
|
||||
f"{self._min_volatility}-{self._max_volatility}.",
|
||||
logger.info)
|
||||
result = False
|
||||
self._pair_cache[pair] = result
|
||||
return volatility_avg
|
||||
else:
|
||||
return None
|
||||
|
||||
def _validate_pair_loc(self, pair: str, volatility_avg: float) -> bool:
|
||||
"""
|
||||
Validate trading range
|
||||
:param pair: Pair that's currently validated
|
||||
:param volatility_avg: Average volatility
|
||||
:return: True if the pair can stay, false if it should be removed
|
||||
"""
|
||||
|
||||
if self._min_volatility <= volatility_avg <= self._max_volatility:
|
||||
result = True
|
||||
else:
|
||||
self.log_once(f"Removed {pair} from whitelist, because volatility "
|
||||
f"over {self._days} {plural(self._days, 'day')} "
|
||||
f"is: {volatility_avg:.3f} "
|
||||
f"which is not in the configured range of "
|
||||
f"{self._min_volatility}-{self._max_volatility}.",
|
||||
logger.info)
|
||||
result = False
|
||||
return result
|
||||
|
||||
@@ -41,6 +41,7 @@ class VolumePairList(IPairList):
|
||||
self._number_pairs = self._pairlistconfig['number_assets']
|
||||
self._sort_key: Literal['quoteVolume'] = self._pairlistconfig.get('sort_key', 'quoteVolume')
|
||||
self._min_value = self._pairlistconfig.get('min_value', 0)
|
||||
self._max_value = self._pairlistconfig.get("max_value", None)
|
||||
self._refresh_period = self._pairlistconfig.get('refresh_period', 1800)
|
||||
self._pair_cache: TTLCache = TTLCache(maxsize=1, ttl=self._refresh_period)
|
||||
self._lookback_days = self._pairlistconfig.get('lookback_days', 0)
|
||||
@@ -139,6 +140,12 @@ class VolumePairList(IPairList):
|
||||
"description": "Minimum value",
|
||||
"help": "Minimum value to use for filtering the pairlist.",
|
||||
},
|
||||
"max_value": {
|
||||
"type": "number",
|
||||
"default": None,
|
||||
"description": "Maximum value",
|
||||
"help": "Maximum value to use for filtering the pairlist.",
|
||||
},
|
||||
**IPairList.refresh_period_parameter(),
|
||||
"lookback_days": {
|
||||
"type": "number",
|
||||
@@ -270,6 +277,9 @@ class VolumePairList(IPairList):
|
||||
if self._min_value > 0:
|
||||
filtered_tickers = [
|
||||
v for v in filtered_tickers if v[self._sort_key] > self._min_value]
|
||||
if self._max_value is not None:
|
||||
filtered_tickers = [
|
||||
v for v in filtered_tickers if v[self._sort_key] < self._max_value]
|
||||
|
||||
sorted_tickers = sorted(filtered_tickers, reverse=True, key=lambda t: t[self._sort_key])
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
Rate of change pairlist filter
|
||||
"""
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
@@ -32,6 +31,7 @@ class RangeStabilityFilter(IPairList):
|
||||
self._max_rate_of_change = pairlistconfig.get('max_rate_of_change')
|
||||
self._refresh_period = pairlistconfig.get('refresh_period', 86400)
|
||||
self._def_candletype = self._config['candle_type_def']
|
||||
self._sort_direction: Optional[str] = pairlistconfig.get('sort_direction', None)
|
||||
|
||||
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
||||
|
||||
@@ -41,6 +41,9 @@ class RangeStabilityFilter(IPairList):
|
||||
if self._days > candle_limit:
|
||||
raise OperationalException("RangeStabilityFilter requires lookback_days to not "
|
||||
f"exceed exchange max request size ({candle_limit})")
|
||||
if self._sort_direction not in [None, 'asc', 'desc']:
|
||||
raise OperationalException("RangeStabilityFilter requires sort_direction to be "
|
||||
"either None (undefined), 'asc' or 'desc'")
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
@@ -87,6 +90,13 @@ class RangeStabilityFilter(IPairList):
|
||||
"description": "Maximum Rate of Change",
|
||||
"help": "Maximum rate of change to filter pairs.",
|
||||
},
|
||||
"sort_direction": {
|
||||
"type": "option",
|
||||
"default": None,
|
||||
"options": ["", "asc", "desc"],
|
||||
"description": "Sort pairlist",
|
||||
"help": "Sort Pairlist ascending or descending by rate of change.",
|
||||
},
|
||||
**IPairList.refresh_period_parameter()
|
||||
}
|
||||
|
||||
@@ -103,45 +113,62 @@ class RangeStabilityFilter(IPairList):
|
||||
since_ms = dt_ts(dt_floor_day(dt_now()) - timedelta(days=self._days + 1))
|
||||
candles = self._exchange.refresh_ohlcv_with_cache(needed_pairs, since_ms=since_ms)
|
||||
|
||||
if self._enabled:
|
||||
for p in deepcopy(pairlist):
|
||||
daily_candles = candles[(p, '1d', self._def_candletype)] if (
|
||||
p, '1d', self._def_candletype) in candles else None
|
||||
if not self._validate_pair_loc(p, daily_candles):
|
||||
pairlist.remove(p)
|
||||
return pairlist
|
||||
resulting_pairlist: List[str] = []
|
||||
pct_changes: Dict[str, float] = {}
|
||||
|
||||
def _validate_pair_loc(self, pair: str, daily_candles: Optional[DataFrame]) -> bool:
|
||||
"""
|
||||
Validate trading range
|
||||
:param pair: Pair that's currently validated
|
||||
:param daily_candles: Downloaded daily candles
|
||||
:return: True if the pair can stay, false if it should be removed
|
||||
"""
|
||||
for p in pairlist:
|
||||
daily_candles = candles.get((p, '1d', self._def_candletype), None)
|
||||
|
||||
pct_change = self._calculate_rate_of_change(p, daily_candles)
|
||||
|
||||
if pct_change is not None:
|
||||
if self._validate_pair_loc(p, pct_change):
|
||||
resulting_pairlist.append(p)
|
||||
pct_changes[p] = pct_change
|
||||
else:
|
||||
self.log_once(f"Removed {p} from whitelist, no candles found.", logger.info)
|
||||
|
||||
if self._sort_direction:
|
||||
resulting_pairlist = sorted(resulting_pairlist,
|
||||
key=lambda p: pct_changes[p],
|
||||
reverse=self._sort_direction == 'desc')
|
||||
return resulting_pairlist
|
||||
|
||||
def _calculate_rate_of_change(self, pair: str, daily_candles: DataFrame) -> Optional[float]:
|
||||
# Check symbol in cache
|
||||
if (cached_res := self._pair_cache.get(pair, None)) is not None:
|
||||
return cached_res
|
||||
|
||||
result = True
|
||||
if (pct_change := self._pair_cache.get(pair, None)) is not None:
|
||||
return pct_change
|
||||
if daily_candles is not None and not daily_candles.empty:
|
||||
|
||||
highest_high = daily_candles['high'].max()
|
||||
lowest_low = daily_candles['low'].min()
|
||||
pct_change = ((highest_high - lowest_low) / lowest_low) if lowest_low > 0 else 0
|
||||
if pct_change < self._min_rate_of_change:
|
||||
self.log_once(f"Removed {pair} from whitelist, because rate of change "
|
||||
f"over {self._days} {plural(self._days, 'day')} is {pct_change:.3f}, "
|
||||
f"which is below the threshold of {self._min_rate_of_change}.",
|
||||
logger.info)
|
||||
result = False
|
||||
if self._max_rate_of_change:
|
||||
if pct_change > self._max_rate_of_change:
|
||||
self.log_once(
|
||||
f"Removed {pair} from whitelist, because rate of change "
|
||||
f"over {self._days} {plural(self._days, 'day')} is {pct_change:.3f}, "
|
||||
f"which is above the threshold of {self._max_rate_of_change}.",
|
||||
logger.info)
|
||||
result = False
|
||||
self._pair_cache[pair] = result
|
||||
self._pair_cache[pair] = pct_change
|
||||
return pct_change
|
||||
else:
|
||||
self.log_once(f"Removed {pair} from whitelist, no candles found.", logger.info)
|
||||
return None
|
||||
|
||||
def _validate_pair_loc(self, pair: str, pct_change: float) -> bool:
|
||||
"""
|
||||
Validate trading range
|
||||
:param pair: Pair that's currently validated
|
||||
:param pct_change: Rate of change
|
||||
:return: True if the pair can stay, false if it should be removed
|
||||
"""
|
||||
|
||||
result = True
|
||||
if pct_change < self._min_rate_of_change:
|
||||
self.log_once(f"Removed {pair} from whitelist, because rate of change "
|
||||
f"over {self._days} {plural(self._days, 'day')} is {pct_change:.3f}, "
|
||||
f"which is below the threshold of {self._min_rate_of_change}.",
|
||||
logger.info)
|
||||
result = False
|
||||
if self._max_rate_of_change:
|
||||
if pct_change > self._max_rate_of_change:
|
||||
self.log_once(
|
||||
f"Removed {pair} from whitelist, because rate of change "
|
||||
f"over {self._days} {plural(self._days, 'day')} is {pct_change:.3f}, "
|
||||
f"which is above the threshold of {self._max_rate_of_change}.",
|
||||
logger.info)
|
||||
result = False
|
||||
return result
|
||||
|
||||
@@ -14,7 +14,7 @@ from freqtrade.data.btanalysis import (delete_backtest_result, get_backtest_resu
|
||||
get_backtest_resultlist, load_and_merge_backtest_result,
|
||||
update_backtest_metadata)
|
||||
from freqtrade.enums import BacktestState
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
from freqtrade.exceptions import ConfigurationError, DependencyException, OperationalException
|
||||
from freqtrade.exchange.common import remove_exchange_credentials
|
||||
from freqtrade.misc import deep_merge_dicts, is_file_in_dir
|
||||
from freqtrade.rpc.api_server.api_schemas import (BacktestHistoryEntry, BacktestMetadataUpdate,
|
||||
@@ -98,10 +98,12 @@ def __run_backtest_bg(btconfig: Config):
|
||||
|
||||
logger.info("Backtest finished.")
|
||||
|
||||
except ConfigurationError as e:
|
||||
logger.error(f"Backtesting encountered a configuration Error: {e}")
|
||||
|
||||
except (Exception, OperationalException, DependencyException) as e:
|
||||
logger.exception(f"Backtesting caused an error: {e}")
|
||||
ApiBG.bt['bt_error'] = str(e)
|
||||
pass
|
||||
finally:
|
||||
ApiBG.bgtask_running = False
|
||||
|
||||
|
||||
@@ -559,3 +559,7 @@ class SysInfo(BaseModel):
|
||||
class Health(BaseModel):
|
||||
last_process: Optional[datetime] = None
|
||||
last_process_ts: Optional[int] = None
|
||||
bot_start: Optional[datetime] = None
|
||||
bot_start_ts: Optional[int] = None
|
||||
bot_startup: Optional[datetime] = None
|
||||
bot_startup_ts: Optional[int] = None
|
||||
|
||||
@@ -77,9 +77,8 @@ class CryptoToFiatConverter(LoggingMixin):
|
||||
return
|
||||
# If the request is not a 429 error we want to raise the normal error
|
||||
logger.error(
|
||||
"Could not load FIAT Cryptocurrency map for the following problem: {}".format(
|
||||
request_exception
|
||||
)
|
||||
"Could not load FIAT Cryptocurrency map for the following problem: "
|
||||
f"{request_exception}"
|
||||
)
|
||||
except (Exception) as exception:
|
||||
logger.error(
|
||||
|
||||
@@ -291,6 +291,10 @@ class RPC:
|
||||
profit_str += f" ({fiat_profit:.2f})"
|
||||
fiat_profit_sum = fiat_profit if isnan(fiat_profit_sum) \
|
||||
else fiat_profit_sum + fiat_profit
|
||||
else:
|
||||
profit_str += f" ({trade_profit:.2f})"
|
||||
fiat_profit_sum = trade_profit if isnan(fiat_profit_sum) \
|
||||
else fiat_profit_sum + trade_profit
|
||||
|
||||
active_attempt_side_symbols = [
|
||||
'*' if (oo and oo.ft_order_side == trade.entry_side) else '**'
|
||||
@@ -317,6 +321,8 @@ class RPC:
|
||||
profitcol = "Profit"
|
||||
if self._fiat_converter:
|
||||
profitcol += " (" + fiat_display_currency + ")"
|
||||
else:
|
||||
profitcol += " (" + stake_currency + ")"
|
||||
|
||||
columns = [
|
||||
'ID L/S' if nonspot else 'ID',
|
||||
@@ -927,6 +933,7 @@ class RPC:
|
||||
is_short=is_short,
|
||||
enter_tag=enter_tag,
|
||||
leverage_=leverage,
|
||||
mode='pos_adjust' if trade else 'initial'
|
||||
):
|
||||
Trade.commit()
|
||||
trade = Trade.get_trades([Trade.is_open.is_(True), Trade.pair == pair]).first()
|
||||
@@ -999,6 +1006,32 @@ class RPC:
|
||||
'cancel_order_count': c_count,
|
||||
}
|
||||
|
||||
def _rpc_list_custom_data(self, trade_id: int, key: Optional[str]) -> List[Dict[str, Any]]:
|
||||
# Query for trade
|
||||
trade = Trade.get_trades(trade_filter=[Trade.id == trade_id]).first()
|
||||
if trade is None:
|
||||
return []
|
||||
# Query custom_data
|
||||
custom_data = []
|
||||
if key:
|
||||
data = trade.get_custom_data(key=key)
|
||||
if data:
|
||||
custom_data = [data]
|
||||
else:
|
||||
custom_data = trade.get_all_custom_data()
|
||||
return [
|
||||
{
|
||||
'id': data_entry.id,
|
||||
'ft_trade_id': data_entry.ft_trade_id,
|
||||
'cd_key': data_entry.cd_key,
|
||||
'cd_type': data_entry.cd_type,
|
||||
'cd_value': data_entry.cd_value,
|
||||
'created_at': data_entry.created_at,
|
||||
'updated_at': data_entry.updated_at
|
||||
}
|
||||
for data_entry in custom_data
|
||||
]
|
||||
|
||||
def _rpc_performance(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Handler for performance.
|
||||
@@ -1155,7 +1188,7 @@ class RPC:
|
||||
}
|
||||
if has_content:
|
||||
|
||||
dataframe.loc[:, '__date_ts'] = dataframe.loc[:, 'date'].view(int64) // 1000 // 1000
|
||||
dataframe.loc[:, '__date_ts'] = dataframe.loc[:, 'date'].astype(int64) // 1000 // 1000
|
||||
# Move signal close to separate column when signal for easy plotting
|
||||
for sig_type in signals.keys():
|
||||
if sig_type in dataframe.columns:
|
||||
@@ -1333,19 +1366,40 @@ class RPC:
|
||||
|
||||
def health(self) -> Dict[str, Optional[Union[str, int]]]:
|
||||
last_p = self._freqtrade.last_process
|
||||
if last_p is None:
|
||||
return {
|
||||
"last_process": None,
|
||||
"last_process_loc": None,
|
||||
"last_process_ts": None,
|
||||
}
|
||||
|
||||
return {
|
||||
"last_process": str(last_p),
|
||||
"last_process_loc": format_date(last_p.astimezone(tzlocal())),
|
||||
"last_process_ts": int(last_p.timestamp()),
|
||||
res: Dict[str, Union[None, str, int]] = {
|
||||
"last_process": None,
|
||||
"last_process_loc": None,
|
||||
"last_process_ts": None,
|
||||
"bot_start": None,
|
||||
"bot_start_loc": None,
|
||||
"bot_start_ts": None,
|
||||
"bot_startup": None,
|
||||
"bot_startup_loc": None,
|
||||
"bot_startup_ts": None,
|
||||
}
|
||||
|
||||
if last_p is not None:
|
||||
res.update({
|
||||
"last_process": str(last_p),
|
||||
"last_process_loc": format_date(last_p.astimezone(tzlocal())),
|
||||
"last_process_ts": int(last_p.timestamp()),
|
||||
})
|
||||
|
||||
if (bot_start := KeyValueStore.get_datetime_value(KeyStoreKeys.BOT_START_TIME)):
|
||||
res.update({
|
||||
"bot_start": str(bot_start),
|
||||
"bot_start_loc": format_date(bot_start.astimezone(tzlocal())),
|
||||
"bot_start_ts": int(bot_start.timestamp()),
|
||||
})
|
||||
if (bot_startup := KeyValueStore.get_datetime_value(KeyStoreKeys.STARTUP_TIME)):
|
||||
res.update({
|
||||
"bot_startup": str(bot_startup),
|
||||
"bot_startup_loc": format_date(bot_startup.astimezone(tzlocal())),
|
||||
"bot_startup_ts": int(bot_startup.timestamp()),
|
||||
})
|
||||
|
||||
return res
|
||||
|
||||
def _update_market_direction(self, direction: MarketDirection) -> None:
|
||||
self._freqtrade.strategy.market_direction = direction
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ from freqtrade.misc import chunks, plural
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.rpc import RPC, RPCException, RPCHandler
|
||||
from freqtrade.rpc.rpc_types import RPCEntryMsg, RPCExitMsg, RPCOrderMsg, RPCSendMsg
|
||||
from freqtrade.util import dt_humanize, fmt_coin, round_value
|
||||
from freqtrade.util import dt_humanize, fmt_coin, format_date, round_value
|
||||
|
||||
|
||||
MAX_MESSAGE_LENGTH = MessageLimit.MAX_TEXT_LENGTH
|
||||
@@ -243,6 +243,7 @@ class Telegram(RPCHandler):
|
||||
CommandHandler('version', self._version),
|
||||
CommandHandler('marketdir', self._changemarketdir),
|
||||
CommandHandler('order', self._order),
|
||||
CommandHandler('list_custom_data', self._list_custom_data),
|
||||
]
|
||||
callbacks = [
|
||||
CallbackQueryHandler(self._status_table, pattern='update_status_table'),
|
||||
@@ -355,9 +356,11 @@ class Telegram(RPCHandler):
|
||||
if msg.get('leverage') and msg.get('leverage', 1.0) != 1.0:
|
||||
message += f" ({msg['leverage']:.3g}x)"
|
||||
message += "`\n"
|
||||
message += f"*Open Rate:* `{fmt_coin(msg['open_rate'], msg['quote_currency'])}`\n"
|
||||
message += f"*Open Rate:* `{round_value(msg['open_rate'], 8)} {msg['quote_currency']}`\n"
|
||||
if msg['type'] == RPCMessageType.ENTRY and msg['current_rate']:
|
||||
message += f"*Current Rate:* `{fmt_coin(msg['current_rate'], msg['quote_currency'])}`\n"
|
||||
message += (
|
||||
f"*Current Rate:* `{round_value(msg['current_rate'], 8)} {msg['quote_currency']}`\n"
|
||||
)
|
||||
|
||||
profit_fiat_extra = self.__format_profit_fiat(msg, 'stake_amount') # type: ignore
|
||||
total = fmt_coin(msg['stake_amount'], msg['quote_currency'])
|
||||
@@ -562,19 +565,19 @@ class Telegram(RPCHandler):
|
||||
lines.append(f"*{wording} #{order_nr}:*")
|
||||
if order_nr == 1:
|
||||
lines.append(
|
||||
f"*Amount:* {cur_entry_amount:.8g} "
|
||||
f"*Amount:* {round_value(cur_entry_amount, 8)} "
|
||||
f"({fmt_coin(order['cost'], quote_currency)})"
|
||||
)
|
||||
lines.append(f"*Average Price:* {cur_entry_average:.8g}")
|
||||
lines.append(f"*Average Price:* {round_value(cur_entry_average, 8)}")
|
||||
else:
|
||||
# TODO: This calculation ignores fees.
|
||||
price_to_1st_entry = ((cur_entry_average - first_avg) / first_avg)
|
||||
if is_open:
|
||||
lines.append("({})".format(dt_humanize(order["order_filled_date"],
|
||||
granularity=["day", "hour", "minute"])))
|
||||
lines.append(f"*Amount:* {cur_entry_amount:.8g} "
|
||||
lines.append(f"*Amount:* {round_value(cur_entry_amount, 8)} "
|
||||
f"({fmt_coin(order['cost'], quote_currency)})")
|
||||
lines.append(f"*Average {wording} Price:* {cur_entry_average:.8g} "
|
||||
lines.append(f"*Average {wording} Price:* {round_value(cur_entry_average, 8)} "
|
||||
f"({price_to_1st_entry:.2%} from 1st entry rate)")
|
||||
lines.append(f"*Order Filled:* {order['order_filled_date']}")
|
||||
|
||||
@@ -686,11 +689,11 @@ class Telegram(RPCHandler):
|
||||
])
|
||||
|
||||
lines.extend([
|
||||
"*Open Rate:* `{open_rate:.8g}`",
|
||||
"*Close Rate:* `{close_rate:.8g}`" if r['close_rate'] else "",
|
||||
f"*Open Rate:* `{round_value(r['open_rate'], 8)}`",
|
||||
f"*Close Rate:* `{round_value(r['close_rate'], 8)}`" if r['close_rate'] else "",
|
||||
"*Open Date:* `{open_date}`",
|
||||
"*Close Date:* `{close_date}`" if r['close_date'] else "",
|
||||
" \n*Current Rate:* `{current_rate:.8g}`" if r['is_open'] else "",
|
||||
f" \n*Current Rate:* `{round_value(r['current_rate'], 8)}`" if r['is_open'] else "",
|
||||
("*Unrealized Profit:* " if r['is_open'] else "*Close Profit: *")
|
||||
+ "`{profit_ratio:.2%}` `({profit_abs_r})`",
|
||||
])
|
||||
@@ -711,9 +714,9 @@ class Telegram(RPCHandler):
|
||||
"`({initial_stop_loss_ratio:.2%})`")
|
||||
|
||||
# Adding stoploss and stoploss percentage only if it is not None
|
||||
lines.append("*Stoploss:* `{stop_loss_abs:.8g}` " +
|
||||
lines.append(f"*Stoploss:* `{round_value(r['stop_loss_abs'], 8)}` " +
|
||||
("`({stop_loss_ratio:.2%})`" if r['stop_loss_ratio'] else ""))
|
||||
lines.append("*Stoploss distance:* `{stoploss_current_dist:.8g}` "
|
||||
lines.append(f"*Stoploss distance:* `{round_value(r['stoploss_current_dist'], 8)}` "
|
||||
"`({stoploss_current_dist_ratio:.2%})`")
|
||||
if r.get('open_orders'):
|
||||
lines.append(
|
||||
@@ -1667,6 +1670,8 @@ class Telegram(RPCHandler):
|
||||
"*/marketdir [long | short | even | none]:* `Updates the user managed variable "
|
||||
"that represents the current market direction. If no direction is provided `"
|
||||
"`the currently set market direction will be output.` \n"
|
||||
"*/list_custom_data <trade_id> <key>:* `List custom_data for Trade ID & Key combo.`\n"
|
||||
"`If no Key is supplied it will list all key-value pairs found for that Trade ID.`"
|
||||
|
||||
"_Statistics_\n"
|
||||
"------------\n"
|
||||
@@ -1689,7 +1694,7 @@ class Telegram(RPCHandler):
|
||||
"*/stats:* `Shows Wins / losses by Sell reason as well as "
|
||||
"Avg. holding durations for buys and sells.`\n"
|
||||
"*/help:* `This help message`\n"
|
||||
"*/version:* `Show version`"
|
||||
"*/version:* `Show version`\n"
|
||||
)
|
||||
|
||||
await self._send_msg(message, parse_mode=ParseMode.MARKDOWN)
|
||||
@@ -1701,7 +1706,9 @@ class Telegram(RPCHandler):
|
||||
Shows the last process timestamp
|
||||
"""
|
||||
health = self._rpc.health()
|
||||
message = f"Last process: `{health['last_process_loc']}`"
|
||||
message = f"Last process: `{health['last_process_loc']}`\n"
|
||||
message += f"Initial bot start: `{health['bot_start_loc']}`\n"
|
||||
message += f"Last bot restart: `{health['bot_startup_loc']}`"
|
||||
await self._send_msg(message)
|
||||
|
||||
@authorized_only
|
||||
@@ -1766,6 +1773,53 @@ class Telegram(RPCHandler):
|
||||
f"*Current state:* `{val['state']}`"
|
||||
)
|
||||
|
||||
@authorized_only
|
||||
async def _list_custom_data(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
Handler for /list_custom_data <id> <key>.
|
||||
List custom_data for specified trade (and key if supplied).
|
||||
:param bot: telegram bot
|
||||
:param update: message update
|
||||
:return: None
|
||||
"""
|
||||
try:
|
||||
if not context.args or len(context.args) == 0:
|
||||
raise RPCException("Trade-id not set.")
|
||||
trade_id = int(context.args[0])
|
||||
key = None if len(context.args) < 2 else str(context.args[1])
|
||||
|
||||
results = self._rpc._rpc_list_custom_data(trade_id, key)
|
||||
messages = []
|
||||
if len(results) > 0:
|
||||
messages.append(
|
||||
'Found custom-data entr' + ('ies: ' if len(results) > 1 else 'y: ')
|
||||
)
|
||||
for result in results:
|
||||
lines = [
|
||||
f"*Key:* `{result['cd_key']}`",
|
||||
f"*ID:* `{result['id']}`",
|
||||
f"*Trade ID:* `{result['ft_trade_id']}`",
|
||||
f"*Type:* `{result['cd_type']}`",
|
||||
f"*Value:* `{result['cd_value']}`",
|
||||
f"*Create Date:* `{format_date(result['created_at'])}`",
|
||||
f"*Update Date:* `{format_date(result['updated_at'])}`"
|
||||
]
|
||||
# Filter empty lines using list-comprehension
|
||||
messages.append("\n".join([line for line in lines if line]))
|
||||
for msg in messages:
|
||||
if len(msg) > MAX_MESSAGE_LENGTH:
|
||||
msg = "Message dropped because length exceeds "
|
||||
msg += f"maximum allowed characters: {MAX_MESSAGE_LENGTH}"
|
||||
logger.warning(msg)
|
||||
await self._send_msg(msg)
|
||||
else:
|
||||
message = f"Didn't find any custom-data entries for Trade ID: `{trade_id}`"
|
||||
message += f" and Key: `{key}`." if key is not None else ""
|
||||
await self._send_msg(message)
|
||||
|
||||
except RPCException as e:
|
||||
await self._send_msg(str(e))
|
||||
|
||||
async def _update_msg(self, query: CallbackQuery, msg: str, callback_path: str = "",
|
||||
reload_able: bool = False, parse_mode: str = ParseMode.MARKDOWN) -> None:
|
||||
if reload_able:
|
||||
|
||||
@@ -372,6 +372,19 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
"""
|
||||
return True
|
||||
|
||||
def order_filled(self, pair: str, trade: Trade, order: Order,
|
||||
current_time: datetime, **kwargs) -> None:
|
||||
"""
|
||||
Called right after an order fills.
|
||||
Will be called for all order types (entry, exit, stoploss, position adjustment).
|
||||
:param pair: Pair for trade
|
||||
:param trade: trade object.
|
||||
:param order: Order object.
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
"""
|
||||
pass
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime, current_rate: float,
|
||||
current_profit: float, after_fill: bool, **kwargs) -> Optional[float]:
|
||||
"""
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
"project_root = \"somedir/freqtrade\"\n",
|
||||
"i=0\n",
|
||||
"try:\n",
|
||||
" os.chdirdir(project_root)\n",
|
||||
" os.chdir(project_root)\n",
|
||||
" assert Path('LICENSE').is_file()\n",
|
||||
"except:\n",
|
||||
" while i<4 and (not Path('LICENSE').is_file()):\n",
|
||||
@@ -181,7 +181,7 @@
|
||||
"\n",
|
||||
"# if backtest_dir points to a directory, it'll automatically load the last backtest file.\n",
|
||||
"backtest_dir = config[\"user_data_dir\"] / \"backtest_results\"\n",
|
||||
"# backtest_dir can also point to a specific file \n",
|
||||
"# backtest_dir can also point to a specific file\n",
|
||||
"# backtest_dir = config[\"user_data_dir\"] / \"backtest_results/backtest-result-2020-07-01_20-04-22.json\""
|
||||
]
|
||||
},
|
||||
|
||||
@@ -300,3 +300,17 @@ def leverage(self, pair: str, current_time: datetime, current_rate: float,
|
||||
:return: A leverage amount, which is between 1.0 and max_leverage.
|
||||
"""
|
||||
return 1.0
|
||||
|
||||
|
||||
def order_filled(self, pair: str, trade: 'Trade', order: 'Order',
|
||||
current_time: datetime, **kwargs) -> None:
|
||||
"""
|
||||
Called right after an order fills.
|
||||
Will be called for all order types (entry, exit, stoploss, position adjustment).
|
||||
:param pair: Pair for trade
|
||||
:param trade: trade object.
|
||||
:param order: Order object.
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -10,6 +10,15 @@ def decimals_per_coin(coin: str):
|
||||
return DECIMALS_PER_COIN.get(coin, DECIMAL_PER_COIN_FALLBACK)
|
||||
|
||||
|
||||
def strip_trailing_zeros(value: str) -> str:
|
||||
"""
|
||||
Strip trailing zeros from a string
|
||||
:param value: Value to be stripped
|
||||
:return: Stripped value
|
||||
"""
|
||||
return value.rstrip('0').rstrip('.')
|
||||
|
||||
|
||||
def round_value(value: float, decimals: int, keep_trailing_zeros=False) -> str:
|
||||
"""
|
||||
Round value to given decimals
|
||||
@@ -20,7 +29,7 @@ def round_value(value: float, decimals: int, keep_trailing_zeros=False) -> str:
|
||||
"""
|
||||
val = f"{value:.{decimals}f}"
|
||||
if not keep_trailing_zeros:
|
||||
val = val.rstrip('0').rstrip('.')
|
||||
val = strip_trailing_zeros(val)
|
||||
return val
|
||||
|
||||
|
||||
@@ -34,7 +43,6 @@ def fmt_coin(
|
||||
:param keep_trailing_zeros: Keep trailing zeros "222.200" vs. "222.2"
|
||||
:return: Formatted / rounded value (with or without coin name)
|
||||
"""
|
||||
val = f"{value:.{decimals_per_coin(coin)}f}"
|
||||
val = round_value(value, decimals_per_coin(coin), keep_trailing_zeros)
|
||||
if show_coin_name:
|
||||
val = f"{val} {coin}"
|
||||
|
||||
@@ -63,7 +63,7 @@ def migrate_binance_futures_data(config: Config):
|
||||
# only act on new futures
|
||||
return
|
||||
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
from freqtrade.data.history import get_datahandler
|
||||
dhc = get_datahandler(config['datadir'], config['dataformat_ohlcv'])
|
||||
|
||||
paircombs = dhc.ohlcv_get_available_data(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user