mirror of
https://github.com/freqtrade/freqtrade.git
synced 2025-12-02 01:53:05 +00:00
Compare commits
907 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0654186400 | ||
|
|
cb01a46089 | ||
|
|
0bcbb28d51 | ||
|
|
e2a47ee95a | ||
|
|
19d9adaf9c | ||
|
|
cf078f8093 | ||
|
|
5ddca4e9f9 | ||
|
|
6db66b1e58 | ||
|
|
0a903e45f2 | ||
|
|
d0a2b9403e | ||
|
|
a18c85ec64 | ||
|
|
0914b8b5f4 | ||
|
|
cb2a871538 | ||
|
|
ef877b5fcc | ||
|
|
2738f3e437 | ||
|
|
79f765a930 | ||
|
|
696f8b308a | ||
|
|
4a2473e7e9 | ||
|
|
a0caa7c5a2 | ||
|
|
388a0c605c | ||
|
|
55e4d08fd8 | ||
|
|
7c00a4ed13 | ||
|
|
793cc1acc0 | ||
|
|
33d910ccea | ||
|
|
8a44337c30 | ||
|
|
35871d7b4b | ||
|
|
786d4e03fc | ||
|
|
5e5b56e445 | ||
|
|
384ebc5b38 | ||
|
|
8567af8d28 | ||
|
|
88e818926a | ||
|
|
4bcb4fdd82 | ||
|
|
0b3a6aa9de | ||
|
|
12ea1cde80 | ||
|
|
df73b8288c | ||
|
|
a50291cd90 | ||
|
|
be8ea685de | ||
|
|
1380cadf15 | ||
|
|
e8d0b01991 | ||
|
|
94020a664b | ||
|
|
8c5194d5e2 | ||
|
|
67e81c9018 | ||
|
|
25ebdb4cb3 | ||
|
|
1657fd1f0f | ||
|
|
63e702a3bf | ||
|
|
e9f21d0209 | ||
|
|
a044649eef | ||
|
|
d598c91de3 | ||
|
|
5003c2af21 | ||
|
|
6963a92332 | ||
|
|
e2863e1620 | ||
|
|
869756d87d | ||
|
|
203890844b | ||
|
|
ba76b87952 | ||
|
|
fbab18b081 | ||
|
|
f7178ecd6f | ||
|
|
59cbac7147 | ||
|
|
fbc70a60f7 | ||
|
|
b45365d385 | ||
|
|
9990780e6b | ||
|
|
65df2f30c1 | ||
|
|
e7aa706f73 | ||
|
|
3619247123 | ||
|
|
047dac7b7d | ||
|
|
7646849405 | ||
|
|
10ae0ed9c1 | ||
|
|
e060e729df | ||
|
|
d81bbba4c2 | ||
|
|
c3980a2746 | ||
|
|
2cadbd1229 | ||
|
|
0f57291aa7 | ||
|
|
0e9169e1ec | ||
|
|
8680b5faa0 | ||
|
|
03149cfa42 | ||
|
|
4eb4732de7 | ||
|
|
3a5b22c397 | ||
|
|
c3a5d4b4e4 | ||
|
|
b97ff3a784 | ||
|
|
806af7d796 | ||
|
|
3c25737f38 | ||
|
|
bf98851dc1 | ||
|
|
202b72fd8e | ||
|
|
6f60117319 | ||
|
|
7070ec30e4 | ||
|
|
bf77019d1c | ||
|
|
b776e5a8e0 | ||
|
|
ca208305a0 | ||
|
|
972c5c327c | ||
|
|
80edfd1531 | ||
|
|
58cd455642 | ||
|
|
caf59d25e8 | ||
|
|
7df32a34a0 | ||
|
|
48097f4a7d | ||
|
|
ca3f349d84 | ||
|
|
0815bb6244 | ||
|
|
835cb54876 | ||
|
|
08c2765633 | ||
|
|
f9202c3f6b | ||
|
|
eb186e22d6 | ||
|
|
cbc323875b | ||
|
|
736b583ff4 | ||
|
|
757ec53877 | ||
|
|
7814927dd1 | ||
|
|
5f8ff373ab | ||
|
|
f286cb0a90 | ||
|
|
59bd1a1f1f | ||
|
|
e52f6ca6c0 | ||
|
|
e6663c0304 | ||
|
|
ced92300e0 | ||
|
|
f9d061b648 | ||
|
|
e6661a33f9 | ||
|
|
d2afe281bf | ||
|
|
c237aa00b8 | ||
|
|
23efa4c9c3 | ||
|
|
8b028734c2 | ||
|
|
51feeddc5c | ||
|
|
af3e42f59d | ||
|
|
34d58eb7d5 | ||
|
|
ca0a04fba5 | ||
|
|
84b3f9924a | ||
|
|
50b264c6f8 | ||
|
|
88e8a9fe2b | ||
|
|
d65aef72e8 | ||
|
|
8273c0c2cd | ||
|
|
c7624b1ed6 | ||
|
|
54536a1c8e | ||
|
|
7daa06a207 | ||
|
|
da647735b6 | ||
|
|
7d0ecfde93 | ||
|
|
2ef716e94c | ||
|
|
de68850d28 | ||
|
|
59cefda283 | ||
|
|
babcd5887f | ||
|
|
c9a5e5a98c | ||
|
|
51dc5811c2 | ||
|
|
05e36f7b21 | ||
|
|
0738cae4a0 | ||
|
|
da3c42bbbc | ||
|
|
e728e3bc75 | ||
|
|
315783441d | ||
|
|
62f1421cfb | ||
|
|
52f12b704b | ||
|
|
ee5051f788 | ||
|
|
b30b1227e3 | ||
|
|
dd2f8db392 | ||
|
|
5a76d81529 | ||
|
|
b9851c78ad | ||
|
|
777cf2ae7a | ||
|
|
3729b3d793 | ||
|
|
39fc78205d | ||
|
|
46a67e8d4f | ||
|
|
69017805c0 | ||
|
|
bcec611973 | ||
|
|
fbb11a394a | ||
|
|
5cb916977e | ||
|
|
7d4c9bc0cf | ||
|
|
ea2beb46e7 | ||
|
|
216ee8117a | ||
|
|
9c1210b34e | ||
|
|
921e034266 | ||
|
|
6f358b16c0 | ||
|
|
264ab2c471 | ||
|
|
a9d6f0023a | ||
|
|
31af1d5dc7 | ||
|
|
588bbb45b5 | ||
|
|
278c999f1f | ||
|
|
825ae15109 | ||
|
|
30cacc7cca | ||
|
|
7ed037bad7 | ||
|
|
77b00b2a12 | ||
|
|
773bc386f7 | ||
|
|
90dacaca09 | ||
|
|
3689ad16b5 | ||
|
|
ab9954350e | ||
|
|
533f11d9eb | ||
|
|
551033a7c3 | ||
|
|
e003930e18 | ||
|
|
2efef72ec2 | ||
|
|
eb40fc698d | ||
|
|
751db2be78 | ||
|
|
6ea353447e | ||
|
|
5a3839320d | ||
|
|
555f4b51e1 | ||
|
|
be82248e01 | ||
|
|
7bed7801cc | ||
|
|
eb409de916 | ||
|
|
8ce39a6d75 | ||
|
|
72dc65cb6a | ||
|
|
c94c667fb1 | ||
|
|
2a0175f629 | ||
|
|
cd68173440 | ||
|
|
c920e3a031 | ||
|
|
83afc4ac7e | ||
|
|
19f62649ea | ||
|
|
e4186cde9e | ||
|
|
61ed6aee62 | ||
|
|
051b6e9458 | ||
|
|
44a24685ac | ||
|
|
1cde980c08 | ||
|
|
05ef4f9b6f | ||
|
|
53f00f248e | ||
|
|
a9cd282500 | ||
|
|
f142abfb76 | ||
|
|
c98c6c38dc | ||
|
|
26aa14dbfc | ||
|
|
04cd931cc8 | ||
|
|
b8a6330c3f | ||
|
|
949c3c660b | ||
|
|
9297a90d7f | ||
|
|
b19f17fdfa | ||
|
|
bbdc6c0f5c | ||
|
|
528c65c194 | ||
|
|
a932df7320 | ||
|
|
2d9b05a3ab | ||
|
|
7cf1d25dc6 | ||
|
|
19c955d505 | ||
|
|
bd243da47e | ||
|
|
bf24c3b279 | ||
|
|
e9c72acff2 | ||
|
|
09fd22c2f8 | ||
|
|
45721ed62e | ||
|
|
5182045cda | ||
|
|
51ab8fecfc | ||
|
|
b36907133c | ||
|
|
b6d899344e | ||
|
|
095f9fc1ab | ||
|
|
0ab9eb7b05 | ||
|
|
d0c582e137 | ||
|
|
d3d4464983 | ||
|
|
1d0934074c | ||
|
|
9382fb3be8 | ||
|
|
223af95da7 | ||
|
|
8b68b55de4 | ||
|
|
99592d1a12 | ||
|
|
b69f5aaac3 | ||
|
|
0c1fefe6e7 | ||
|
|
a049ff9371 | ||
|
|
0c51e2637f | ||
|
|
7492b75e1c | ||
|
|
9a73f7a7b8 | ||
|
|
7813d1acd6 | ||
|
|
24dfd9b93b | ||
|
|
7d65b3e1b4 | ||
|
|
2c20464983 | ||
|
|
e97d9013d5 | ||
|
|
cf43427be5 | ||
|
|
7ae41be975 | ||
|
|
1e18b35f22 | ||
|
|
bb78285661 | ||
|
|
b47ba8cfd3 | ||
|
|
1dccdf0594 | ||
|
|
5da48d8ffc | ||
|
|
4f4b7c5625 | ||
|
|
9dcf236387 | ||
|
|
97bc5804fd | ||
|
|
beab966851 | ||
|
|
00a7e8f9a2 | ||
|
|
59dc526683 | ||
|
|
aef7f9dc03 | ||
|
|
abc6a08397 | ||
|
|
eb4da8a6f6 | ||
|
|
494fd734bc | ||
|
|
9174f60b1d | ||
|
|
c2905a4522 | ||
|
|
adf03e2ee3 | ||
|
|
fe5f085ec1 | ||
|
|
748ac50d83 | ||
|
|
d2069c1729 | ||
|
|
38cf50830a | ||
|
|
bf96a43efb | ||
|
|
22b4555926 | ||
|
|
7fb3632e7b | ||
|
|
e5f0fe288a | ||
|
|
58aaaffe2b | ||
|
|
c62aefbfde | ||
|
|
16dfde22fc | ||
|
|
bbbc8a760c | ||
|
|
9b45bb2e24 | ||
|
|
e69c21c9f5 | ||
|
|
72933859cf | ||
|
|
0a5cee6a73 | ||
|
|
a80c979482 | ||
|
|
4a4b695b94 | ||
|
|
a8c246ce0d | ||
|
|
c6e979ddda | ||
|
|
1a78346cf1 | ||
|
|
e6d0d53e47 | ||
|
|
68680bb2c6 | ||
|
|
7de415a6a4 | ||
|
|
f46fa440f0 | ||
|
|
56177d7e92 | ||
|
|
41584317e4 | ||
|
|
3264e2c31d | ||
|
|
fadc7a2051 | ||
|
|
cebc55687b | ||
|
|
5940279f7b | ||
|
|
35f6333b67 | ||
|
|
262b044969 | ||
|
|
a571df784c | ||
|
|
078aa8327b | ||
|
|
10945edbe6 | ||
|
|
5d9b69cbfe | ||
|
|
89117dd53f | ||
|
|
4af9654696 | ||
|
|
1e0ce7d47a | ||
|
|
27cd54b42c | ||
|
|
ec64a182f9 | ||
|
|
dd30df7057 | ||
|
|
4c6e33aa82 | ||
|
|
8cb934849a | ||
|
|
c627aee096 | ||
|
|
9d552d45c2 | ||
|
|
1e1b8dbe53 | ||
|
|
2d9d8dc976 | ||
|
|
ba674fc796 | ||
|
|
58550515ad | ||
|
|
27bae60b68 | ||
|
|
646dd63faf | ||
|
|
8d2b389e27 | ||
|
|
fa174a392a | ||
|
|
042e35e8d3 | ||
|
|
b76513ce33 | ||
|
|
368bfcf476 | ||
|
|
fee3c598d0 | ||
|
|
69264cc164 | ||
|
|
e81929bc55 | ||
|
|
0843b19b6c | ||
|
|
813b472c6c | ||
|
|
2f079711ec | ||
|
|
97e9d2dc42 | ||
|
|
7344f20803 | ||
|
|
bfe04464b4 | ||
|
|
2225f5661b | ||
|
|
9e77c93a54 | ||
|
|
b65fa98cee | ||
|
|
b57821b273 | ||
|
|
db7f9598b0 | ||
|
|
e547da10de | ||
|
|
5a0c15f377 | ||
|
|
0f5b69b4f2 | ||
|
|
86d4497aaf | ||
|
|
4b80961c65 | ||
|
|
785cc9df85 | ||
|
|
1bfecb0313 | ||
|
|
b88a561eb5 | ||
|
|
4bf026605e | ||
|
|
db6933856f | ||
|
|
01107ffa59 | ||
|
|
74f678a26f | ||
|
|
bf93648198 | ||
|
|
51dccaeb4c | ||
|
|
ed4252af53 | ||
|
|
2fde902c82 | ||
|
|
4ea75af2d9 | ||
|
|
1bba0cf7fc | ||
|
|
b1fd0c73c7 | ||
|
|
29eb6d938b | ||
|
|
19620470bd | ||
|
|
7a69b01b9b | ||
|
|
bc531cf846 | ||
|
|
14908e52e2 | ||
|
|
a86bf8d6d7 | ||
|
|
3662ca3ca6 | ||
|
|
5fae03f7be | ||
|
|
f7f0f3bebb | ||
|
|
ccbab6ee6f | ||
|
|
ef12dafea8 | ||
|
|
08a65b50aa | ||
|
|
d28d6cbdb4 | ||
|
|
41a6709ece | ||
|
|
756f5d5408 | ||
|
|
422b9c8fbc | ||
|
|
40ec2c4921 | ||
|
|
5cf7ad85a0 | ||
|
|
f02c9f0435 | ||
|
|
c59b5e7781 | ||
|
|
e525c6694b | ||
|
|
897c14e2e5 | ||
|
|
6c00bf423b | ||
|
|
094984eb2f | ||
|
|
081411fba4 | ||
|
|
178c0972ff | ||
|
|
910e317a45 | ||
|
|
1ef5adbb0e | ||
|
|
f5db856a75 | ||
|
|
b2ad40253f | ||
|
|
1e0d622d80 | ||
|
|
1d941249ed | ||
|
|
5bfd3fdc33 | ||
|
|
753b807a45 | ||
|
|
04409602a6 | ||
|
|
b2796b850b | ||
|
|
8c633aab19 | ||
|
|
f14b4133c3 | ||
|
|
8f2e4aa4fd | ||
|
|
3c2e043e82 | ||
|
|
609ed2d7b3 | ||
|
|
183166b3fb | ||
|
|
12dc91719e | ||
|
|
cd071c3bdc | ||
|
|
0618ecce47 | ||
|
|
13438ad8de | ||
|
|
60c2b1d6ca | ||
|
|
eaf4ffcbaf | ||
|
|
79e59bdda0 | ||
|
|
b409ab5ead | ||
|
|
790c7036a1 | ||
|
|
73902f507a | ||
|
|
1a80f6bc4e | ||
|
|
5feb7fe654 | ||
|
|
980960bc6a | ||
|
|
52c614733f | ||
|
|
e913d12c71 | ||
|
|
5dc24232ca | ||
|
|
ef2115d667 | ||
|
|
74476eae43 | ||
|
|
84335d58b3 | ||
|
|
f175fc6e9b | ||
|
|
9edd7e6bc8 | ||
|
|
7478c56ca5 | ||
|
|
1f36f4802a | ||
|
|
4d920d7866 | ||
|
|
19fb3f9b54 | ||
|
|
fafc102573 | ||
|
|
3868f9c698 | ||
|
|
0204f666b3 | ||
|
|
f32a13bfb0 | ||
|
|
4809c9f07a | ||
|
|
19af144b33 | ||
|
|
4db7962048 | ||
|
|
7e6f2cba53 | ||
|
|
c6199e5ca6 | ||
|
|
e73f215a67 | ||
|
|
f7ff1dc610 | ||
|
|
96c5db3e38 | ||
|
|
c7f248fe3b | ||
|
|
e27636b798 | ||
|
|
2d56b1bc8c | ||
|
|
659cbd987a | ||
|
|
aff5372a8f | ||
|
|
2e7b5e2be9 | ||
|
|
7971cb29bb | ||
|
|
154149ff03 | ||
|
|
92d7f27983 | ||
|
|
20ea679b2b | ||
|
|
39ede449a0 | ||
|
|
37550d3bdb | ||
|
|
b688526623 | ||
|
|
d52d30cfbe | ||
|
|
9cf08e0434 | ||
|
|
56a85690b4 | ||
|
|
0f2612c231 | ||
|
|
f9edc72f35 | ||
|
|
90475e52db | ||
|
|
504f51fabb | ||
|
|
a905d1bd67 | ||
|
|
a27baf1a51 | ||
|
|
f174c2e01e | ||
|
|
3f60b2c140 | ||
|
|
927d1d2686 | ||
|
|
d04a4db270 | ||
|
|
8ad6eb9896 | ||
|
|
74709461e3 | ||
|
|
9e6cc5ebbd | ||
|
|
a2d8f92e05 | ||
|
|
a7d90e2a25 | ||
|
|
83b37e2f78 | ||
|
|
488629096b | ||
|
|
a3f167f6df | ||
|
|
1e8814b43e | ||
|
|
2e430519e3 | ||
|
|
2164b02c66 | ||
|
|
ed45dcdf8a | ||
|
|
e518e741af | ||
|
|
56b9c250ba | ||
|
|
a009816ac4 | ||
|
|
c29d2ac05b | ||
|
|
551d1b0962 | ||
|
|
5e696f4ea0 | ||
|
|
d8423272da | ||
|
|
b605d4d71f | ||
|
|
c39290d7da | ||
|
|
4849b23fd6 | ||
|
|
c1beb0ff33 | ||
|
|
d08a015b45 | ||
|
|
4f7487e931 | ||
|
|
d032aaaba0 | ||
|
|
073cb0a5a9 | ||
|
|
ce6c44eda0 | ||
|
|
0099381dd0 | ||
|
|
7bcb3d5580 | ||
|
|
c15c13a367 | ||
|
|
53a7273065 | ||
|
|
da9a8b616e | ||
|
|
e6277222ca | ||
|
|
8002b476ff | ||
|
|
d9b5d05e2a | ||
|
|
f5d7a4417d | ||
|
|
04b3b1d4d9 | ||
|
|
38a3ce6164 | ||
|
|
123e51774e | ||
|
|
2322d0f3f3 | ||
|
|
8a5287581f | ||
|
|
41cdd28f5f | ||
|
|
b387c315da | ||
|
|
3c647a3794 | ||
|
|
aac9ee0d55 | ||
|
|
966247baaf | ||
|
|
589b9858f4 | ||
|
|
c0b2b0b96d | ||
|
|
6cfc1836a2 | ||
|
|
35800f3ada | ||
|
|
9fa365e766 | ||
|
|
a4a3b19566 | ||
|
|
8452399c12 | ||
|
|
ff8a5ad2e5 | ||
|
|
dda5a59019 | ||
|
|
30e8466cae | ||
|
|
3881c51892 | ||
|
|
4959d124ad | ||
|
|
f133ee3cca | ||
|
|
ef6afaa2cb | ||
|
|
0e406c4d7d | ||
|
|
b21775142d | ||
|
|
8ccc66cd97 | ||
|
|
b4e732617e | ||
|
|
450219c83b | ||
|
|
eb7df30061 | ||
|
|
9385400000 | ||
|
|
961f50e335 | ||
|
|
478e6f1e64 | ||
|
|
a0e115ebd9 | ||
|
|
ce4f1b0709 | ||
|
|
b0c639bac8 | ||
|
|
aba576f79f | ||
|
|
d92a6d7b73 | ||
|
|
89d47ffd8f | ||
|
|
b9e9f82503 | ||
|
|
08b94a2077 | ||
|
|
9f445cb053 | ||
|
|
1d23ba6e30 | ||
|
|
28e43a4867 | ||
|
|
1b28521875 | ||
|
|
4ba3363bc6 | ||
|
|
f1ed9ed048 | ||
|
|
d465fcffd5 | ||
|
|
32b0098ec1 | ||
|
|
e77b9de89e | ||
|
|
aec67cda66 | ||
|
|
f3fb801b33 | ||
|
|
fe33088245 | ||
|
|
4bca8b97f3 | ||
|
|
f62f4f7711 | ||
|
|
896e2b6285 | ||
|
|
ddb0ae10b4 | ||
|
|
37fa186c55 | ||
|
|
979e485f24 | ||
|
|
1555667da7 | ||
|
|
8a52a7b50d | ||
|
|
a74b6df14e | ||
|
|
8600ae0387 | ||
|
|
02306884f0 | ||
|
|
82ce5af454 | ||
|
|
830f2219c8 | ||
|
|
d21f0f4081 | ||
|
|
8f883f2310 | ||
|
|
2f63b0199d | ||
|
|
5a96c775fd | ||
|
|
cfdd9d6be9 | ||
|
|
c749aae19f | ||
|
|
fd4877759c | ||
|
|
2e7acb4482 | ||
|
|
b43b60b26d | ||
|
|
5ca8076072 | ||
|
|
8d3c2470e3 | ||
|
|
3f85f3cce6 | ||
|
|
2801bccdc7 | ||
|
|
22651a8629 | ||
|
|
c7106a6802 | ||
|
|
6e1f457fb3 | ||
|
|
533a16658c | ||
|
|
9b35dae465 | ||
|
|
e6d01b04ea | ||
|
|
d185b2020a | ||
|
|
74ed0e0b11 | ||
|
|
75f89ec12f | ||
|
|
f6fce2162c | ||
|
|
158bf09774 | ||
|
|
1f1abfe798 | ||
|
|
29a5e049b9 | ||
|
|
2bc0c4ecd5 | ||
|
|
224213840d | ||
|
|
d81fdeb3ed | ||
|
|
14c5f435aa | ||
|
|
5745722a37 | ||
|
|
a7cd9d77f2 | ||
|
|
5b857aeaf0 | ||
|
|
afb1b787c8 | ||
|
|
659168d341 | ||
|
|
3f4715ba49 | ||
|
|
1d0f1bd1ee | ||
|
|
220bc3c23e | ||
|
|
c90be601f5 | ||
|
|
5d8e0573f6 | ||
|
|
3d858f6599 | ||
|
|
6d2d5f93d0 | ||
|
|
d01bc0fb9f | ||
|
|
394d758d32 | ||
|
|
91c710408a | ||
|
|
3a7f390510 | ||
|
|
a7d6efdcd6 | ||
|
|
fed24c1308 | ||
|
|
ae4021da14 | ||
|
|
8378a0234d | ||
|
|
d26869ea0a | ||
|
|
cf96ad1d1b | ||
|
|
3919bf3740 | ||
|
|
9814cf5360 | ||
|
|
c19fe95d39 | ||
|
|
383bdb7d56 | ||
|
|
41765b14dc | ||
|
|
33bf7f9f30 | ||
|
|
454c2343a8 | ||
|
|
6d8bf75572 | ||
|
|
310c9f6034 | ||
|
|
c0a600858f | ||
|
|
5b07385414 | ||
|
|
844ab4aef5 | ||
|
|
44ca6f1c46 | ||
|
|
552f947248 | ||
|
|
7655bf6ea7 | ||
|
|
25ae25248c | ||
|
|
dd01ae880f | ||
|
|
e0a06ca454 | ||
|
|
a93592c467 | ||
|
|
08dffc95d8 | ||
|
|
57800e78c7 | ||
|
|
bd4d7efbc9 | ||
|
|
5019fb5bf3 | ||
|
|
a0e0d7fe27 | ||
|
|
6377fd2689 | ||
|
|
bd9ea9bd8c | ||
|
|
cfeefa8754 | ||
|
|
475d8486bb | ||
|
|
628963c207 | ||
|
|
4d1810c2b6 | ||
|
|
05f0dccb8e | ||
|
|
6360e7fb15 | ||
|
|
40695a39d5 | ||
|
|
008f621211 | ||
|
|
a52cf42218 | ||
|
|
a866b0a35e | ||
|
|
af1054fa70 | ||
|
|
5608bbde9c | ||
|
|
ee9d2c637a | ||
|
|
f0819d9df1 | ||
|
|
96e5615d1b | ||
|
|
4798dea6e1 | ||
|
|
23a4ff9f25 | ||
|
|
5ddfbea611 | ||
|
|
e8e67d1c85 | ||
|
|
dc9c31252b | ||
|
|
82baa3f93f | ||
|
|
78ca11155b | ||
|
|
45c90c2013 | ||
|
|
3a1057e1ff | ||
|
|
e45d5d3594 | ||
|
|
14ee5a2076 | ||
|
|
da83243eb3 | ||
|
|
46ec7d5ea6 | ||
|
|
29fbac9f96 | ||
|
|
421a6c02a0 | ||
|
|
2ee152c5a6 | ||
|
|
bef27467b6 | ||
|
|
52f971cbb7 | ||
|
|
563e68e894 | ||
|
|
49b4ab6d3b | ||
|
|
7455e56a29 | ||
|
|
245a67e37e | ||
|
|
e76d4241a0 | ||
|
|
01ff054a0b | ||
|
|
9e1f7dc6f7 | ||
|
|
067c9219b6 | ||
|
|
f4e5ce892b | ||
|
|
830fc7580c | ||
|
|
21859f79db | ||
|
|
376b5fce54 | ||
|
|
cb5cd5cd86 | ||
|
|
f8b97b6aa7 | ||
|
|
7b6e2eac49 | ||
|
|
f13e134e9b | ||
|
|
97a37198b9 | ||
|
|
43bb4114d0 | ||
|
|
8c95207ca4 | ||
|
|
ffaa121bc7 | ||
|
|
227b194a88 | ||
|
|
95d8c45481 | ||
|
|
bae4abace2 | ||
|
|
44461bd1a1 | ||
|
|
2c095c07f2 | ||
|
|
9c3656e24e | ||
|
|
97275f5a46 | ||
|
|
7ccff18437 | ||
|
|
c5f26e72e1 | ||
|
|
f09c0a5bba | ||
|
|
aba9450098 | ||
|
|
4ef1647132 | ||
|
|
6e93bff374 | ||
|
|
4d20e37f4d | ||
|
|
20cca01d10 | ||
|
|
8cbb5d2a93 | ||
|
|
fbae2142d1 | ||
|
|
feea20fb0a | ||
|
|
2ed52a77e5 | ||
|
|
e854667eb8 | ||
|
|
7bf20d9060 | ||
|
|
7c5a11623e | ||
|
|
783a2d945e | ||
|
|
2073c71811 | ||
|
|
d8122962db | ||
|
|
1ea626fb5b | ||
|
|
f66719fc1f | ||
|
|
4b2813ff37 | ||
|
|
191c85bccc | ||
|
|
31816cdb2a | ||
|
|
574744348a | ||
|
|
efb9975e3e | ||
|
|
cd8743b80c | ||
|
|
237f3c58c8 | ||
|
|
be7b0e4247 | ||
|
|
5954e7796c | ||
|
|
f44eef3855 | ||
|
|
0232eeca54 | ||
|
|
f5f0506a29 | ||
|
|
39b6a00224 | ||
|
|
206557d4b7 | ||
|
|
07e07bd66b | ||
|
|
fac8e0fde5 | ||
|
|
ffb1cf52b1 | ||
|
|
399f144c27 | ||
|
|
459b9d80d4 | ||
|
|
28e685ee2b | ||
|
|
c58a1649cb | ||
|
|
a4512ac791 | ||
|
|
ac6cdb76ce | ||
|
|
8ac3e8ad5f | ||
|
|
e0f52696a5 | ||
|
|
f75f0ccf2c | ||
|
|
440f6ab8e1 | ||
|
|
e7daf3177d | ||
|
|
8ffc0ad9fc | ||
|
|
df83bdea8d | ||
|
|
420f6fb9a9 | ||
|
|
cea3f7d3fa | ||
|
|
a9f63c6a99 | ||
|
|
0ada2d9390 | ||
|
|
e1b6b9b5a6 | ||
|
|
3fea2a35a2 | ||
|
|
821a598ff4 | ||
|
|
feab5f82c1 | ||
|
|
607c604a45 | ||
|
|
b77f926cdd | ||
|
|
d2c0e9e438 | ||
|
|
a4077e96ba | ||
|
|
a4fc7ce0c4 | ||
|
|
00cef56a57 | ||
|
|
d1984945d5 | ||
|
|
3c20ab683a | ||
|
|
f838bc760f | ||
|
|
25f5dbfcbd | ||
|
|
be044fbacf | ||
|
|
7a4276f6c7 | ||
|
|
925e18368a | ||
|
|
250e00e6c7 | ||
|
|
81672da57b | ||
|
|
d68c22b21d | ||
|
|
e806e4a796 | ||
|
|
6f86e30c7e | ||
|
|
a87404b5a8 | ||
|
|
0357d373a9 | ||
|
|
28c62724df | ||
|
|
1eb90b115b | ||
|
|
d13357bb0c | ||
|
|
65fe1a671c | ||
|
|
6429282f05 | ||
|
|
b5fa013600 | ||
|
|
02bd052e45 | ||
|
|
4ed46ef6b3 | ||
|
|
65c7607e36 | ||
|
|
bfc2c70b44 | ||
|
|
3a6c00dbaf | ||
|
|
133660ff4e | ||
|
|
994b2a0f28 | ||
|
|
d4c042c523 | ||
|
|
b48fe4ce51 | ||
|
|
78f356c0df | ||
|
|
7263d321f8 | ||
|
|
1ccbe87f90 | ||
|
|
e7d2a48766 | ||
|
|
c894843d0f | ||
|
|
23aa8dcd51 | ||
|
|
f3187ddcbf | ||
|
|
5eb446f1ce | ||
|
|
5efa40215b | ||
|
|
1a8b793c0a | ||
|
|
e0e0a25811 | ||
|
|
99b01a39d1 | ||
|
|
ae08a832c8 | ||
|
|
2bae4c98b1 | ||
|
|
f717928ae0 | ||
|
|
b66db8e89c | ||
|
|
3643ad0059 | ||
|
|
2bdf4c54bd | ||
|
|
66be4fd159 | ||
|
|
078a4ca8b6 | ||
|
|
07ab9bba69 | ||
|
|
a964f56b07 | ||
|
|
46b8708ffa | ||
|
|
9136c13cd2 | ||
|
|
0df9436d06 | ||
|
|
d32aabecd3 | ||
|
|
f2a3dd6414 | ||
|
|
fc23b47c4d | ||
|
|
f256701679 | ||
|
|
c1df94b507 | ||
|
|
f19f3ed4eb | ||
|
|
2f97b00d31 | ||
|
|
911e238494 | ||
|
|
5659ca2ecd | ||
|
|
0181abc629 | ||
|
|
acda2ff909 | ||
|
|
43c73c75c5 | ||
|
|
95daff182d | ||
|
|
b82b77d03f | ||
|
|
193dcf578d | ||
|
|
4c0a6611c8 | ||
|
|
3e986e24fa | ||
|
|
5cf6f0b491 | ||
|
|
5ed5907809 | ||
|
|
a78d704998 | ||
|
|
cb85a53042 | ||
|
|
7f1a81eeed | ||
|
|
106dffe2c5 | ||
|
|
070a1990e8 | ||
|
|
ddf79088fb | ||
|
|
3ed682a9c6 | ||
|
|
6e32f172be | ||
|
|
bef5e191a4 | ||
|
|
fc60c0df19 | ||
|
|
62d83b8dbd | ||
|
|
e1eeaa24d2 | ||
|
|
6249392526 | ||
|
|
4da8c91161 | ||
|
|
ec8ba821ed | ||
|
|
6b9547a9ad | ||
|
|
ae9f730624 | ||
|
|
147cc4f0b6 | ||
|
|
e2274e813a | ||
|
|
d091931279 | ||
|
|
88925d6c1d | ||
|
|
2893f0544a | ||
|
|
f397d973f3 | ||
|
|
f224f743da | ||
|
|
be062c5fbe | ||
|
|
5c0d89feb5 | ||
|
|
4e6068a923 | ||
|
|
07c629922a | ||
|
|
3f506bb474 | ||
|
|
1c4c2272f5 | ||
|
|
1ed6f1875b | ||
|
|
ca4ef22d07 | ||
|
|
db5383927c | ||
|
|
2b88137612 | ||
|
|
9fddc1499e | ||
|
|
c7f4dc1651 | ||
|
|
29d77a17e5 | ||
|
|
e34bfa9767 | ||
|
|
d08bad7288 | ||
|
|
ac4e3028d2 | ||
|
|
f1bed95153 | ||
|
|
f3f5b63b7f | ||
|
|
a98e8ef201 | ||
|
|
b031470979 | ||
|
|
171c4f182d | ||
|
|
32c919cfad | ||
|
|
bf60f38a23 | ||
|
|
93994756e8 | ||
|
|
7c55a2c6e2 | ||
|
|
ee43792566 | ||
|
|
f14b42f202 | ||
|
|
6e8c765ece | ||
|
|
5f70406880 | ||
|
|
069759c7c5 | ||
|
|
ebd5fac91d | ||
|
|
156c202889 | ||
|
|
8e0faf4aaa | ||
|
|
20a2b27498 | ||
|
|
73d1201ed8 | ||
|
|
fcbacae6f1 | ||
|
|
60a50a2ea8 | ||
|
|
defa6f45b2 | ||
|
|
9cdff0b0a5 | ||
|
|
450fc5763f | ||
|
|
2495661554 | ||
|
|
ae92557dd7 | ||
|
|
057f852e06 | ||
|
|
4874d10455 |
44
.github/workflows/ci.yml
vendored
44
.github/workflows/ci.yml
vendored
@@ -25,10 +25,10 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ubuntu-20.04, ubuntu-22.04 ]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.9", "3.10", "3.11"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
|
||||
- name: Backtesting (multi)
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade new-strategy -s AwesomeStrategy
|
||||
freqtrade new-strategy -s AwesomeStrategyMin --template minimal
|
||||
@@ -98,7 +98,7 @@ jobs:
|
||||
|
||||
- name: Hyperopt
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade hyperopt --datadir tests/testdata -e 6 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
|
||||
|
||||
@@ -108,7 +108,7 @@ jobs:
|
||||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --format=github .
|
||||
ruff check --output-format=github .
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
@@ -127,10 +127,10 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ macos-latest ]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.9", "3.10", "3.11"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
@@ -200,14 +200,14 @@ jobs:
|
||||
|
||||
- name: Backtesting
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade new-strategy -s AwesomeStrategyAdv --template advanced
|
||||
freqtrade backtesting --datadir tests/testdata --strategy AwesomeStrategyAdv
|
||||
|
||||
- name: Hyperopt
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
|
||||
|
||||
@@ -217,7 +217,7 @@ jobs:
|
||||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --format=github .
|
||||
ruff check --output-format=github .
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
@@ -237,10 +237,10 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ windows-latest ]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.9", "3.10", "3.11"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
@@ -275,19 +275,19 @@ jobs:
|
||||
|
||||
- name: Backtesting
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade backtesting --datadir tests/testdata --strategy SampleStrategy
|
||||
|
||||
- name: Hyperopt
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
|
||||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --format=github .
|
||||
ruff check --output-format=github .
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
@@ -304,7 +304,7 @@ jobs:
|
||||
mypy_version_check:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
@@ -319,7 +319,7 @@ jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
@@ -329,7 +329,7 @@ jobs:
|
||||
docs_check:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Documentation syntax
|
||||
run: |
|
||||
@@ -359,7 +359,7 @@ jobs:
|
||||
# Run pytest with "live" checks
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
@@ -443,12 +443,12 @@ jobs:
|
||||
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Extract branch name
|
||||
shell: bash
|
||||
@@ -515,7 +515,7 @@ jobs:
|
||||
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Extract branch name
|
||||
shell: bash
|
||||
|
||||
2
.github/workflows/docker_update_readme.yml
vendored
2
.github/workflows/docker_update_readme.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
dockerHubDescription:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Docker Hub Description
|
||||
uses: peter-evans/dockerhub-description@v3
|
||||
env:
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -83,6 +83,9 @@ instance/
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# memray
|
||||
memray-*
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
# Mkdocs documentation
|
||||
|
||||
@@ -8,17 +8,17 @@ repos:
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: "v1.5.0"
|
||||
rev: "v1.7.0"
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: build_helpers
|
||||
additional_dependencies:
|
||||
- types-cachetools==5.3.0.6
|
||||
- types-cachetools==5.3.0.7
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.31.0.2
|
||||
- types-requests==2.31.0.10
|
||||
- types-tabulate==0.9.0.3
|
||||
- types-python-dateutil==2.8.19.14
|
||||
- SQLAlchemy==2.0.20
|
||||
- SQLAlchemy==2.0.23
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
@@ -30,7 +30,7 @@ repos:
|
||||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.270'
|
||||
rev: 'v0.1.1'
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
|
||||
@@ -125,7 +125,7 @@ Exceptions:
|
||||
|
||||
Contributors may be given commit privileges. Preference will be given to those with:
|
||||
|
||||
1. Past contributions to Freqtrade and other related open-source projects. Contributions to Freqtrade include both code (both accepted and pending) and friendly participation in the issue tracker and Pull request reviews. Quantity and quality are considered.
|
||||
1. Past contributions to Freqtrade and other related open-source projects. Contributions to Freqtrade include both code (both accepted and pending) and friendly participation in the issue tracker and Pull request reviews. Both quantity and quality are considered.
|
||||
1. A coding style that the other core committers find simple, minimal, and clean.
|
||||
1. Access to resources for cross-platform development and testing.
|
||||
1. Time to devote to the project regularly.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11.4-slim-bullseye as base
|
||||
FROM python:3.11.6-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
@@ -28,7 +28,7 @@ hesitate to read the source code and understand the mechanism of this bot.
|
||||
Please read the [exchange specific notes](docs/exchanges.md) to learn about eventual, special configurations needed for each exchange.
|
||||
|
||||
- [X] [Binance](https://www.binance.com/)
|
||||
- [X] [Bittrex](https://bittrex.com/)
|
||||
- [X] [Bitmart](https://bitmart.com/)
|
||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||
- [X] [Huobi](http://huobi.com/)
|
||||
- [X] [Kraken](https://kraken.com/)
|
||||
@@ -59,7 +59,7 @@ Please find the complete documentation on the [freqtrade website](https://www.fr
|
||||
|
||||
## Features
|
||||
|
||||
- [x] **Based on Python 3.8+**: For botting on any operating system - Windows, macOS and Linux.
|
||||
- [x] **Based on Python 3.9+**: For botting on any operating system - Windows, macOS and Linux.
|
||||
- [x] **Persistence**: Persistence is achieved through sqlite.
|
||||
- [x] **Dry-run**: Run the bot without paying money.
|
||||
- [x] **Backtesting**: Run a simulation of your buy/sell strategy.
|
||||
@@ -207,7 +207,7 @@ To run this bot we recommend you a cloud instance with a minimum of:
|
||||
|
||||
### Software requirements
|
||||
|
||||
- [Python >= 3.8](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
- [Python >= 3.9](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
- [pip](https://pip.pypa.io/en/stable/installing/)
|
||||
- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
|
||||
- [TA-Lib](https://ta-lib.github.io/ta-lib-python/)
|
||||
|
||||
BIN
build_helpers/TA_Lib-0.4.28-cp311-cp311-linux_armv7l.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.28-cp311-cp311-linux_armv7l.whl
Normal file
Binary file not shown.
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.28-cp39-cp39-linux_armv7l.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.28-cp39-cp39-linux_armv7l.whl
Normal file
Binary file not shown.
@@ -54,7 +54,7 @@ docker tag freqtrade:$TAG_FREQAI_ARM ${CACHE_IMAGE}:$TAG_FREQAI_ARM
|
||||
docker tag freqtrade:$TAG_FREQAI_RL_ARM ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
|
||||
|
||||
# Run backtest
|
||||
docker run --rm -v $(pwd)/config_examples/config_bittrex.example.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG_ARM} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
|
||||
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG_ARM} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "failed running backtest"
|
||||
|
||||
@@ -67,7 +67,7 @@ docker tag freqtrade:$TAG_FREQAI ${CACHE_IMAGE}:$TAG_FREQAI
|
||||
docker tag freqtrade:$TAG_FREQAI_RL ${CACHE_IMAGE}:$TAG_FREQAI_RL
|
||||
|
||||
# Run backtest
|
||||
docker run --rm -v $(pwd)/config_examples/config_bittrex.example.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
|
||||
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "failed running backtest"
|
||||
|
||||
Binary file not shown.
BIN
build_helpers/pyarrow-14.0.1-cp39-cp39-linux_armv7l.whl
Normal file
BIN
build_helpers/pyarrow-14.0.1-cp39-cp39-linux_armv7l.whl
Normal file
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"max_open_trades": 3,
|
||||
"stake_currency": "BTC",
|
||||
"stake_currency": "USDT",
|
||||
"stake_amount": 0.05,
|
||||
"tradable_balance_ratio": 0.99,
|
||||
"fiat_display_currency": "USD",
|
||||
@@ -36,21 +36,21 @@
|
||||
"ccxt_async_config": {
|
||||
},
|
||||
"pair_whitelist": [
|
||||
"ALGO/BTC",
|
||||
"ATOM/BTC",
|
||||
"BAT/BTC",
|
||||
"BCH/BTC",
|
||||
"BRD/BTC",
|
||||
"EOS/BTC",
|
||||
"ETH/BTC",
|
||||
"IOTA/BTC",
|
||||
"LINK/BTC",
|
||||
"LTC/BTC",
|
||||
"NEO/BTC",
|
||||
"NXS/BTC",
|
||||
"XMR/BTC",
|
||||
"XRP/BTC",
|
||||
"XTZ/BTC"
|
||||
"ALGO/USDT",
|
||||
"ATOM/USDT",
|
||||
"BAT/USDT",
|
||||
"BCH/USDT",
|
||||
"BRD/USDT",
|
||||
"EOS/USDT",
|
||||
"ETH/USDT",
|
||||
"IOTA/USDT",
|
||||
"LINK/USDT",
|
||||
"LTC/USDT",
|
||||
"NEO/USDT",
|
||||
"NXS/USDT",
|
||||
"XMR/USDT",
|
||||
"XRP/USDT",
|
||||
"XTZ/USDT"
|
||||
],
|
||||
"pair_blacklist": [
|
||||
"BNB/.*"
|
||||
|
||||
@@ -32,11 +32,8 @@
|
||||
"name": "bittrex",
|
||||
"key": "your_exchange_key",
|
||||
"secret": "your_exchange_secret",
|
||||
"ccxt_config": {"enableRateLimit": true},
|
||||
"ccxt_async_config": {
|
||||
"enableRateLimit": true,
|
||||
"rateLimit": 500
|
||||
},
|
||||
"ccxt_config": {},
|
||||
"ccxt_async_config": {},
|
||||
"pair_whitelist": [
|
||||
"ETH/BTC",
|
||||
"LTC/BTC",
|
||||
|
||||
@@ -70,6 +70,7 @@
|
||||
},
|
||||
"pairlists": [
|
||||
{"method": "StaticPairList"},
|
||||
{"method": "FullTradesFilter"},
|
||||
{
|
||||
"method": "VolumePairList",
|
||||
"number_assets": 20,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.9.16-slim-bullseye as base
|
||||
FROM python:3.11.6-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
@@ -11,12 +11,13 @@ ENV FT_APP_ENV="docker"
|
||||
# Prepare environment
|
||||
RUN mkdir /freqtrade \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install sudo libatlas3-base curl sqlite3 libhdf5-dev libutf8proc-dev libsnappy-dev \
|
||||
&& apt-get -y install sudo libatlas3-base libopenblas-dev curl sqlite3 libhdf5-dev libutf8proc-dev libsnappy-dev \
|
||||
&& apt-get clean \
|
||||
&& useradd -u 1000 -G sudo -U -m ftuser \
|
||||
&& chown ftuser:ftuser /freqtrade \
|
||||
# Allow sudoers
|
||||
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers
|
||||
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers \
|
||||
&& pip install --upgrade pip
|
||||
|
||||
WORKDIR /freqtrade
|
||||
|
||||
@@ -25,19 +26,16 @@ FROM base as python-deps
|
||||
RUN apt-get update \
|
||||
&& apt-get -y install build-essential libssl-dev libffi-dev libgfortran5 pkg-config cmake gcc \
|
||||
&& apt-get clean \
|
||||
&& pip install --upgrade pip \
|
||||
&& echo "[global]\nextra-index-url=https://www.piwheels.org/simple" > /etc/pip.conf
|
||||
|
||||
# Install TA-lib
|
||||
COPY build_helpers/* /tmp/
|
||||
RUN cd /tmp && /tmp/install_ta-lib.sh && rm -r /tmp/*ta-lib*
|
||||
ENV LD_LIBRARY_PATH /usr/local/lib
|
||||
|
||||
# Install dependencies
|
||||
COPY --chown=ftuser:ftuser requirements.txt /freqtrade/
|
||||
USER ftuser
|
||||
RUN pip install --user --no-cache-dir numpy \
|
||||
&& pip install --user /tmp/pyarrow-*.whl \
|
||||
&& pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib==0.4.28 \
|
||||
&& pip install --user --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy dependencies to runtime-image
|
||||
|
||||
@@ -31,9 +31,9 @@ optional arguments:
|
||||
Specify timeframe (`1m`, `5m`, `30m`, `1h`, `1d`).
|
||||
--timerange TIMERANGE
|
||||
Specify what timerange of data to use.
|
||||
--data-format-ohlcv {json,jsongz,hdf5}
|
||||
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
|
||||
Storage format for downloaded candle (OHLCV) data.
|
||||
(default: `json`).
|
||||
(default: `feather`).
|
||||
--max-open-trades INT
|
||||
Override the value of the `max_open_trades`
|
||||
configuration setting.
|
||||
@@ -170,11 +170,11 @@ freqtrade backtesting --strategy AwesomeStrategy --dry-run-wallet 1000
|
||||
|
||||
Using a different on-disk historical candle (OHLCV) data source
|
||||
|
||||
Assume you downloaded the history data from the Bittrex exchange and kept it in the `user_data/data/bittrex-20180101` directory.
|
||||
Assume you downloaded the history data from the Binance exchange and kept it in the `user_data/data/binance-20180101` directory.
|
||||
You can then use this data for backtesting as follows:
|
||||
|
||||
```bash
|
||||
freqtrade backtesting --strategy AwesomeStrategy --datadir user_data/data/bittrex-20180101
|
||||
freqtrade backtesting --strategy AwesomeStrategy --datadir user_data/data/binance-20180101
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -177,7 +177,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
||||
| `exit_pricing.order_book_top` | Bot will use the top N rate in Order Book "price_side" to exit. I.e. a value of 2 will allow the bot to pick the 2nd ask rate in [Order Book Exit](#exit-price-with-orderbook-enabled)<br>*Defaults to `1`.* <br> **Datatype:** Positive Integer
|
||||
| `custom_price_max_distance_ratio` | Configure maximum distance ratio between current and custom entry or exit price. <br>*Defaults to `0.02` 2%).*<br> **Datatype:** Positive float
|
||||
| | **TODO**
|
||||
| `use_exit_signal` | Use exit signals produced by the strategy in addition to the `minimal_roi`. [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `true`.* <br> **Datatype:** Boolean
|
||||
| `use_exit_signal` | Use exit signals produced by the strategy in addition to the `minimal_roi`. <br>Setting this to false disables the usage of `"exit_long"` and `"exit_short"` columns. Has no influence on other exit methods (Stoploss, ROI, callbacks). [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `true`.* <br> **Datatype:** Boolean
|
||||
| `exit_profit_only` | Wait until the bot reaches `exit_profit_offset` before taking an exit decision. [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
|
||||
| `exit_profit_offset` | Exit-signal is only active above this value. Only active in combination with `exit_profit_only=True`. [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `0.0`.* <br> **Datatype:** Float (as ratio)
|
||||
| `ignore_roi_if_entry_signal` | Do not exit if the entry signal is still active. This setting takes preference over `minimal_roi` and `use_exit_signal`. [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
|
||||
@@ -594,7 +594,7 @@ creating trades on the exchange.
|
||||
|
||||
```json
|
||||
"exchange": {
|
||||
"name": "bittrex",
|
||||
"name": "binance",
|
||||
"key": "key",
|
||||
"secret": "secret",
|
||||
...
|
||||
@@ -613,6 +613,7 @@ Once you will be happy with your bot performance running in the Dry-run mode, yo
|
||||
* Orders are simulated, and will not be posted to the exchange.
|
||||
* Market orders fill based on orderbook volume the moment the order is placed.
|
||||
* Limit orders fill once the price reaches the defined level - or time out based on `unfilledtimeout` settings.
|
||||
* Limit orders will be converted to market orders if they cross the price by more than 1%.
|
||||
* In combination with `stoploss_on_exchange`, the stop_loss price is assumed to be filled.
|
||||
* Open orders (not trades, which are stored in the database) are kept open after bot restarts, with the assumption that they were not filled while being offline.
|
||||
|
||||
@@ -643,7 +644,7 @@ API Keys are usually only required for live trading (trading for real money, bot
|
||||
```json
|
||||
{
|
||||
"exchange": {
|
||||
"name": "bittrex",
|
||||
"name": "binance",
|
||||
"key": "af8ddd35195e9dc500b9a6f799f6f5c93d89193b",
|
||||
"secret": "08a9dc6db3d7b53e1acebd9275677f4b0a04f1a5",
|
||||
//"password": "", // Optional, not needed by all exchanges)
|
||||
|
||||
@@ -10,7 +10,7 @@ You can run this server using the following command: `docker compose -f docker/d
|
||||
This will create a dockercontainer running jupyter lab, which will be accessible using `https://127.0.0.1:8888/lab`.
|
||||
Please use the link that's printed in the console after startup for simplified login.
|
||||
|
||||
For more information, Please visit the [Data analysis with Docker](docker_quickstart.md#data-analayis-using-docker-compose) section.
|
||||
For more information, Please visit the [Data analysis with Docker](docker_quickstart.md#data-analysis-using-docker-compose) section.
|
||||
|
||||
### Pro tips
|
||||
|
||||
|
||||
@@ -154,13 +154,13 @@ freqtrade download-data --exchange binance --pairs ETH/USDT XRP/USDT BTC/USDT --
|
||||
|
||||
Freqtrade currently supports the following data-formats:
|
||||
|
||||
* `feather` - a dataformat based on Apache Arrow
|
||||
* `json` - plain "text" json files
|
||||
* `jsongz` - a gzip-zipped version of json files
|
||||
* `hdf5` - a high performance datastore
|
||||
* `feather` - a dataformat based on Apache Arrow
|
||||
* `parquet` - columnar datastore (OHLCV only)
|
||||
|
||||
By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data.
|
||||
By default, both OHLCV data and trades data are stored in the `feather` format.
|
||||
|
||||
This can be changed via the `--data-format-ohlcv` and `--data-format-trades` command line arguments respectively.
|
||||
To persist this change, you should also add the following snippet to your configuration, so you don't have to insert the above arguments each time:
|
||||
@@ -203,15 +203,15 @@ time freqtrade list-data --show-timerange --data-format-ohlcv <dataformat>
|
||||
|
||||
| Format | Size | timing |
|
||||
|------------|-------------|-------------|
|
||||
| `feather` | 72Mb | 3.5s |
|
||||
| `json` | 149Mb | 25.6s |
|
||||
| `jsongz` | 39Mb | 27s |
|
||||
| `hdf5` | 145Mb | 3.9s |
|
||||
| `feather` | 72Mb | 3.5s |
|
||||
| `parquet` | 83Mb | 3.8s |
|
||||
|
||||
Size has been taken from the BTC/USDT 1m spot combination for the timerange specified above.
|
||||
|
||||
To have a best performance/size mix, we recommend the use of either feather or parquet.
|
||||
To have a best performance/size mix, we recommend using the default feather format, or parquet.
|
||||
|
||||
### Pairs file
|
||||
|
||||
|
||||
@@ -318,6 +318,7 @@ Additional tests / steps to complete:
|
||||
* Check if balance shows correctly (*)
|
||||
* Create market order (*)
|
||||
* Create limit order (*)
|
||||
* Cancel order (*)
|
||||
* Complete trade (enter + exit) (*)
|
||||
* Compare result calculation between exchange and bot
|
||||
* Ensure fees are applied correctly (check the database against the exchange)
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
The `Edge Positioning` module uses probability to calculate your win rate and risk reward ratio. It will use these statistics to control your strategy trade entry points, position size and, stoploss.
|
||||
|
||||
!!! Danger "Deprecated functionality"
|
||||
`Edge positioning` (or short Edge) is currently in maintenance mode only (we keep existing functionality alive) and should be considered as deprecated.
|
||||
It will currently not receive new features until either someone stepped forward to take up ownership of that module - or we'll decide to remove edge from freqtrade.
|
||||
|
||||
!!! Warning
|
||||
When using `Edge positioning` with a dynamic whitelist (VolumePairList), make sure to also use `AgeFilter` and set it to at least `calculate_since_number_of_days` to avoid problems with missing data.
|
||||
|
||||
|
||||
@@ -55,7 +55,7 @@ This configuration enables kraken, as well as rate-limiting to avoid bans from t
|
||||
## Binance
|
||||
|
||||
!!! Warning "Server location and geo-ip restrictions"
|
||||
Please be aware that binance restrict api access regarding the server country. The currents and non exhaustive countries blocked are United States, Malaysia (Singapour), Ontario (Canada). Please go to [binance terms > b. Eligibility](https://www.binance.com/en/terms) to find up to date list.
|
||||
Please be aware that Binance restricts API access regarding the server country. The current and non-exhaustive countries blocked are Canada, Malaysia, Netherlands and United States. Please go to [binance terms > b. Eligibility](https://www.binance.com/en/terms) to find up to date list.
|
||||
|
||||
Binance supports [time_in_force](configuration.md#understand-order_time_in_force).
|
||||
|
||||
@@ -136,13 +136,41 @@ Freqtrade will not attempt to change these settings.
|
||||
The Kraken API does only provide 720 historic candles, which is sufficient for Freqtrade dry-run and live trade modes, but is a problem for backtesting.
|
||||
To download data for the Kraken exchange, using `--dl-trades` is mandatory, otherwise the bot will download the same 720 candles over and over, and you'll not have enough backtest data.
|
||||
|
||||
Due to the heavy rate-limiting applied by Kraken, the following configuration section should be used to download data:
|
||||
To speed up downloading, you can download the [trades zip files](https://support.kraken.com/hc/en-us/articles/360047543791-Downloadable-historical-market-data-time-and-sales-) kraken provides.
|
||||
These are usually updated once per quarter. Freqtrade expects these files to be placed in `user_data/data/kraken/trades_csv`.
|
||||
|
||||
``` json
|
||||
"ccxt_async_config": {
|
||||
"enableRateLimit": true,
|
||||
"rateLimit": 3100
|
||||
},
|
||||
A structure as follows can make sense if using incremental files, with the "full" history in one directory, and incremental files in different directories.
|
||||
The assumption for this mode is that the data is downloaded and unzipped keeping filenames as they are.
|
||||
Duplicate content will be ignored (based on timestamp) - though the assumption is that there is no gap in the data.
|
||||
|
||||
This means, if your "full" history ends in Q4 2022 - then both incremental updates Q1 2023 and Q2 2023 are available.
|
||||
Not having this will lead to incomplete data, and therefore invalid results while using the data.
|
||||
|
||||
```
|
||||
└── trades_csv
|
||||
├── Kraken_full_history
|
||||
│ ├── BCHEUR.csv
|
||||
│ └── XBTEUR.csv
|
||||
├── Kraken_Trading_History_Q1_2023
|
||||
│ ├── BCHEUR.csv
|
||||
│ └── XBTEUR.csv
|
||||
└── Kraken_Trading_History_Q2_2023
|
||||
├── BCHEUR.csv
|
||||
└── XBTEUR.csv
|
||||
```
|
||||
|
||||
You can convert these files into freqtrade files:
|
||||
|
||||
``` bash
|
||||
freqtrade convert-trade-data --exchange kraken --format-from kraken_csv --format-to feather
|
||||
# Convert trade data to different ohlcv timeframes
|
||||
freqtrade trades-to-ohlcv -p BTC/EUR BCH/EUR --exchange kraken -t 1m 5m 15m 1h
|
||||
```
|
||||
|
||||
The converted data also makes downloading data possible, and will start the download after the latest loaded trade.
|
||||
|
||||
``` bash
|
||||
freqtrade download-data --exchange kraken --dl-trades -p BTC/EUR BCH/EUR
|
||||
```
|
||||
|
||||
!!! Warning "Downloading data from kraken"
|
||||
@@ -274,6 +302,24 @@ We do strongly recommend to limit all API keys to the IP you're going to use it
|
||||
Bybit (futures only) supports `stoploss_on_exchange` and uses `stop-loss-limit` orders. It provides great advantages, so we recommend to benefit from it by enabling stoploss on exchange.
|
||||
On futures, Bybit supports both `stop-limit` as well as `stop-market` orders. You can use either `"limit"` or `"market"` in the `order_types.stoploss` configuration setting to decide which type to use.
|
||||
|
||||
## Bitmart
|
||||
|
||||
Bitmart requires the API key Memo (the name you give the API key) to go along with the exchange key and secret.
|
||||
It's therefore required to pass the UID as well.
|
||||
|
||||
```json
|
||||
"exchange": {
|
||||
"name": "bitmart",
|
||||
"uid": "your_bitmart_api_key_memo",
|
||||
"secret": "your_exchange_secret",
|
||||
"password": "your_exchange_api_key_password",
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
!!! Warning "Necessary Verification"
|
||||
Bitmart requires Verification Lvl2 to successfully trade on the spot market through the API - even though trading via UI works just fine with just Lvl1 verification.
|
||||
|
||||
## All exchanges
|
||||
|
||||
Should you experience constant errors with Nonce (like `InvalidNonce`), it is best to regenerate the API keys. Resetting Nonce is difficult and it's usually easier to regenerate the API keys.
|
||||
|
||||
@@ -7,7 +7,7 @@ Low level feature engineering is performed in the user strategy within a set of
|
||||
| Function | Description |
|
||||
|---------------|-------------|
|
||||
| `feature_engineering_expand_all()` | This optional function will automatically expand the defined features on the config defined `indicator_periods_candles`, `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`.
|
||||
| `feature_engineering_expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `include_periods_candles`.
|
||||
| `feature_engineering_expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `indicator_periods_candles`.
|
||||
| `feature_engineering_standard()` | This optional function will be called once with the dataframe of the base timeframe. This is the final function to be called, which means that the dataframe entering this function will contain all the features and columns from the base asset created by the other `feature_engineering_expand` functions. This function is a good place to do custom exotic feature extractions (e.g. tsfresh). This function is also a good place for any feature that should not be auto-expanded upon (e.g., day of the week).
|
||||
| `set_freqai_targets()` | Required function to set the targets for the model. All targets must be prepended with `&` to be recognized by the FreqAI internals.
|
||||
|
||||
@@ -178,7 +178,7 @@ You can ask for each of the defined features to be included also for informative
|
||||
|
||||
`include_shifted_candles` indicates the number of previous candles to include in the feature set. For example, `include_shifted_candles: 2` tells FreqAI to include the past 2 candles for each of the features in the feature set.
|
||||
|
||||
In total, the number of features the user of the presented example strat has created is: length of `include_timeframes` * no. features in `feature_engineering_expand_*()` * length of `include_corr_pairlist` * no. `include_shifted_candles` * length of `indicator_periods_candles`
|
||||
In total, the number of features the user of the presented example strategy has created is: length of `include_timeframes` * no. features in `feature_engineering_expand_*()` * length of `include_corr_pairlist` * no. `include_shifted_candles` * length of `indicator_periods_candles`
|
||||
$= 3 * 3 * 3 * 2 * 2 = 108$.
|
||||
|
||||
!!! note "Learn more about creative feature engineering"
|
||||
|
||||
@@ -74,7 +74,6 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
||||
| | **Reinforcement Learning Parameters within the `freqai.rl_config` sub dictionary**
|
||||
| `rl_config` | A dictionary containing the control parameters for a Reinforcement Learning model. <br> **Datatype:** Dictionary.
|
||||
| `train_cycles` | Training time steps will be set based on the `train_cycles * number of training data points. <br> **Datatype:** Integer.
|
||||
| `cpu_count` | Number of processors to dedicate to the Reinforcement Learning training process. <br> **Datatype:** int.
|
||||
| `max_trade_duration_candles`| Guides the agent training to keep trades below desired length. Example usage shown in `prediction_models/ReinforcementLearner.py` within the customizable `calculate_reward()` function. <br> **Datatype:** int.
|
||||
| `model_type` | Model string from stable_baselines3 or SBcontrib. Available strings include: `'TRPO', 'ARS', 'RecurrentPPO', 'MaskablePPO', 'PPO', 'A2C', 'DQN'`. User should ensure that `model_training_parameters` match those available to the corresponding stable_baselines3 model by visiting their documentaiton. [PPO doc](https://stable-baselines3.readthedocs.io/en/master/modules/ppo.html) (external website) <br> **Datatype:** string.
|
||||
| `policy_type` | One of the available policy types from stable_baselines3 <br> **Datatype:** string.
|
||||
|
||||
@@ -237,11 +237,10 @@ class MyCoolRLModel(ReinforcementLearner):
|
||||
Reinforcement Learning models benefit from tracking training metrics. FreqAI has integrated Tensorboard to allow users to track training and evaluation performance across all coins and across all retrainings. Tensorboard is activated via the following command:
|
||||
|
||||
```bash
|
||||
cd freqtrade
|
||||
tensorboard --logdir user_data/models/unique-id
|
||||
```
|
||||
|
||||
where `unique-id` is the `identifier` set in the `freqai` configuration file. This command must be run in a separate shell to view the output in their browser at 127.0.0.1:6006 (6006 is the default port used by Tensorboard).
|
||||
where `unique-id` is the `identifier` set in the `freqai` configuration file. This command must be run in a separate shell to view the output in the browser at 127.0.0.1:6006 (6006 is the default port used by Tensorboard).
|
||||
|
||||

|
||||
|
||||
|
||||
@@ -337,11 +337,15 @@ There are four parameter types each suited for different purposes.
|
||||
* `CategoricalParameter` - defines a parameter with a predetermined number of choices.
|
||||
* `BooleanParameter` - Shorthand for `CategoricalParameter([True, False])` - great for "enable" parameters.
|
||||
|
||||
!!! Tip "Disabling parameter optimization"
|
||||
Each parameter takes two boolean parameters:
|
||||
* `load` - when set to `False` it will not load values configured in `buy_params` and `sell_params`.
|
||||
* `optimize` - when set to `False` parameter will not be included in optimization process.
|
||||
Use these parameters to quickly prototype various ideas.
|
||||
### Parameter options
|
||||
|
||||
There are two parameter options that can help you to quickly test various ideas:
|
||||
|
||||
* `optimize` - when set to `False`, the parameter will not be included in optimization process. (Default: True)
|
||||
* `load` - when set to `False`, results of a previous hyperopt run (in `buy_params` and `sell_params` either in your strategy or the JSON output file) will not be used as the starting value for subsequent hyperopts. The default value specified in the parameter will be used instead. (Default: True)
|
||||
|
||||
!!! Tip "Effects of `load=False` on backtesting"
|
||||
Be aware that setting the `load` option to `False` will mean backtesting will also use the default value specified in the parameter and *not* the value found through hyperoptimisation.
|
||||
|
||||
!!! Warning
|
||||
Hyperoptable parameters cannot be used in `populate_indicators` - as hyperopt does not recalculate indicators for each epoch, so the starting value would be used in this case.
|
||||
|
||||
@@ -25,6 +25,7 @@ You may also use something like `.*DOWN/BTC` or `.*UP/BTC` to exclude leveraged
|
||||
* [`ProducerPairList`](#producerpairlist)
|
||||
* [`RemotePairList`](#remotepairlist)
|
||||
* [`AgeFilter`](#agefilter)
|
||||
* [`FullTradesFilter`](#fulltradesfilter)
|
||||
* [`OffsetFilter`](#offsetfilter)
|
||||
* [`PerformanceFilter`](#performancefilter)
|
||||
* [`PrecisionFilter`](#precisionfilter)
|
||||
@@ -236,6 +237,17 @@ be caught out buying before the pair has finished dropping in price.
|
||||
|
||||
This filter allows freqtrade to ignore pairs until they have been listed for at least `min_days_listed` days and listed before `max_days_listed`.
|
||||
|
||||
#### FullTradesFilter
|
||||
|
||||
Shrink whitelist to consist only in-trade pairs when the trade slots are full (when `max_open_trades` isn't being set to `-1` in the config).
|
||||
|
||||
When the trade slots are full, there is no need to calculate indicators of the rest of the pairs (except informative pairs) since no new trade can be opened. By shrinking the whitelist to just the in-trade pairs, you can improve calculation speeds and reduce CPU usage. When a trade slot is free (either a trade is closed or `max_open_trades` value in config is increased), then the whitelist will return to normal state.
|
||||
|
||||
When multiple pairlist filters are being used, it's recommended to put this filter at second position directly below the primary pairlist, so when the trade slots are full, the bot doesn't have to download data for the rest of the filters.
|
||||
|
||||
!!! Warning "Backtesting"
|
||||
`FullTradesFilter` does not support backtesting mode.
|
||||
|
||||
#### OffsetFilter
|
||||
|
||||
Offsets an incoming pairlist by a given `offset` value.
|
||||
@@ -376,7 +388,7 @@ If the trading range over the last 10 days is <1% or >99%, remove the pair from
|
||||
"lookback_days": 10,
|
||||
"min_rate_of_change": 0.01,
|
||||
"max_rate_of_change": 0.99,
|
||||
"refresh_period": 1440
|
||||
"refresh_period": 86400
|
||||
}
|
||||
]
|
||||
```
|
||||
@@ -431,7 +443,7 @@ The below example blacklists `BNB/BTC`, uses `VolumePairList` with `20` assets,
|
||||
"method": "RangeStabilityFilter",
|
||||
"lookback_days": 10,
|
||||
"min_rate_of_change": 0.01,
|
||||
"refresh_period": 1440
|
||||
"refresh_period": 86400
|
||||
},
|
||||
{
|
||||
"method": "VolatilityFilter",
|
||||
|
||||
@@ -40,7 +40,7 @@ Freqtrade is a free and open source crypto trading bot written in Python. It is
|
||||
Please read the [exchange specific notes](exchanges.md) to learn about eventual, special configurations needed for each exchange.
|
||||
|
||||
- [X] [Binance](https://www.binance.com/)
|
||||
- [X] [Bittrex](https://bittrex.com/)
|
||||
- [X] [Bitmart](https://bitmart.com/)
|
||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||
- [X] [Huobi](http://huobi.com/)
|
||||
- [X] [Kraken](https://kraken.com/)
|
||||
@@ -83,7 +83,7 @@ To run this bot we recommend you a linux cloud instance with a minimum of:
|
||||
|
||||
Alternatively
|
||||
|
||||
- Python 3.8+
|
||||
- Python 3.9+
|
||||
- pip (pip3)
|
||||
- git
|
||||
- TA-Lib
|
||||
|
||||
@@ -24,7 +24,7 @@ The easiest way to install and run Freqtrade is to clone the bot Github reposito
|
||||
The `stable` branch contains the code of the last release (done usually once per month on an approximately one week old snapshot of the `develop` branch to prevent packaging bugs, so potentially it's more stable).
|
||||
|
||||
!!! Note
|
||||
Python3.8 or higher and the corresponding `pip` are assumed to be available. The install-script will warn you and stop if that's not the case. `git` is also needed to clone the Freqtrade repository.
|
||||
Python3.9 or higher and the corresponding `pip` are assumed to be available. The install-script will warn you and stop if that's not the case. `git` is also needed to clone the Freqtrade repository.
|
||||
Also, python headers (`python<yourversion>-dev` / `python<yourversion>-devel`) must be available for the installation to complete successfully.
|
||||
|
||||
!!! Warning "Up-to-date clock"
|
||||
@@ -42,7 +42,7 @@ These requirements apply to both [Script Installation](#script-installation) and
|
||||
|
||||
### Install guide
|
||||
|
||||
* [Python >= 3.8.x](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
* [Python >= 3.9](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
* [pip](https://pip.pypa.io/en/stable/installing/)
|
||||
* [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
|
||||
* [virtualenv](https://virtualenv.pypa.io/en/stable/installation.html) (Recommended)
|
||||
@@ -54,7 +54,7 @@ We've included/collected install instructions for Ubuntu, MacOS, and Windows. Th
|
||||
OS Specific steps are listed first, the [Common](#common) section below is necessary for all systems.
|
||||
|
||||
!!! Note
|
||||
Python3.8 or higher and the corresponding pip are assumed to be available.
|
||||
Python3.9 or higher and the corresponding pip are assumed to be available.
|
||||
|
||||
=== "Debian/Ubuntu"
|
||||
#### Install necessary dependencies
|
||||
@@ -169,7 +169,7 @@ You can as well update, configure and reset the codebase of your bot with `./scr
|
||||
** --install **
|
||||
|
||||
With this option, the script will install the bot and most dependencies:
|
||||
You will need to have git and python3.8+ installed beforehand for this to work.
|
||||
You will need to have git and python3.9+ installed beforehand for this to work.
|
||||
|
||||
* Mandatory software as: `ta-lib`
|
||||
* Setup your virtualenv under `.venv/`
|
||||
|
||||
119
docs/recursive-analysis.md
Normal file
119
docs/recursive-analysis.md
Normal file
@@ -0,0 +1,119 @@
|
||||
# Recursive analysis
|
||||
|
||||
This page explains how to validate your strategy for inaccuracies due to recursive issues with certain indicators.
|
||||
|
||||
A recursive formula defines any term of a sequence relative to its preceding term(s). An example of a recursive formula is a<sub>n</sub> = a<sub>n-1</sub> + b.
|
||||
|
||||
Why does this matter for Freqtrade? In backtesting, the bot will get full data of the pairs according to the timerange specified. But in a dry/live run, the bot will be limited by the amount of data each exchanges gives.
|
||||
|
||||
For example, to calculate a very basic indicator called `steps`, the first row's value is always 0, while the following rows' values are equal to the value of the previous row plus 1. If I were to calculate it using the latest 1000 candles, then the `steps` value of the first row is 0, and the `steps` value at the last closed candle is 999.
|
||||
|
||||
What happens if the calculation is using only the latest 500 candles? Then instead of 999, the `steps` value at last closed candle is 499. The difference of the value means your backtest result can differ from your dry/live run result.
|
||||
|
||||
The `recursive-analysis` command requires historic data to be available. To learn how to get data for the pairs and exchange you're interested in,
|
||||
head over to the [Data Downloading](data-download.md) section of the documentation.
|
||||
|
||||
This command is built upon preparing different lengths of data and calculates indicators based on them.
|
||||
This does not backtest the strategy itself, but rather only calculates the indicators. After calculating the indicators of different startup candle values (`startup_candle_count`) are done, the values of last rows across all specified `startup_candle_count` are compared to see how much variance they show compared to the base calculation.
|
||||
|
||||
Command settings:
|
||||
|
||||
- Use the `-p` option to set your desired pair to analyze. Since we are only looking at indicator values, using more than one pair is redundant. Preferably use a pair with a relatively high price and at least moderate volatility, such as BTC or ETH, to avoid rounding issues that can make the results inaccurate. If no pair is set on the command, the pair used for this analysis is the first pair in the whitelist.
|
||||
- It is recommended to set a long timerange (at least 5000 candles) so that the initial indicators' calculation that is going to be used as a benchmark has very small or no recursive issues itself. For example, for a 5m timeframe, a timerange of 5000 candles would be equal to 18 days.
|
||||
- `--cache` is forced to "none" to avoid loading previous indicators calculation automatically.
|
||||
|
||||
In addition to the recursive formula check, this command also carries out a simple lookahead bias check on the indicator values only. For a full lookahead check, use [Lookahead-analysis](lookahead-analysis.md).
|
||||
|
||||
## Recursive-analysis command reference
|
||||
|
||||
```
|
||||
usage: freqtrade recursive-analysis [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[-d PATH] [--userdir PATH] [-s NAME]
|
||||
[--strategy-path PATH]
|
||||
[--recursive-strategy-search]
|
||||
[--freqaimodel NAME]
|
||||
[--freqaimodel-path PATH] [-i TIMEFRAME]
|
||||
[--timerange TIMERANGE]
|
||||
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
|
||||
[-p PAIR]
|
||||
[--freqai-backtest-live-models]
|
||||
[--startup-candle STARTUP_CANDLES [STARTUP_CANDLES ...]]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-i TIMEFRAME, --timeframe TIMEFRAME
|
||||
Specify timeframe (`1m`, `5m`, `30m`, `1h`, `1d`).
|
||||
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
|
||||
Storage format for downloaded candle (OHLCV) data.
|
||||
(default: `feather`).
|
||||
-p PAIR, --pairs PAIR
|
||||
Limit command to this pair.
|
||||
--startup-candle STARTUP_CANDLE [STARTUP_CANDLE ...]
|
||||
Provide a space-separated list of startup_candle_count to
|
||||
be checked. Default : `199 399 499 999 1999`.
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE Log to the file specified. Special values are:
|
||||
'syslog', 'journald'. See the documentation for more
|
||||
details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default:
|
||||
`userdir/config.json` or `config.json` whichever
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
Strategy arguments:
|
||||
-s NAME, --strategy NAME
|
||||
Specify strategy class name which will be used by the
|
||||
bot.
|
||||
--strategy-path PATH Specify additional strategy lookup path.
|
||||
--timerange TIMERANGE
|
||||
Specify what timerange of data to use.
|
||||
```
|
||||
|
||||
### Why are odd-numbered default startup candles used?
|
||||
|
||||
The default value for startup candles are odd numbers. When the bot fetches candle data from the exchange's API, the last candle is the one being checked by the bot and the rest of the data are the "startup candles".
|
||||
|
||||
For example, Binance allows 1000 candles per API call. When the bot receives 1000 candles, the last candle is the "current candle", and the preceding 999 candles are the "startup candles". By setting the startup candle count as 1000 instead of 999, the bot will try to fetch 1001 candles instead. The exchange API will then send candle data in a paginated form, i.e. in case of the Binance API, this will be two groups- one of length 1000 and another of length 1. This results in the bot thinking the strategy needs 1001 candles of data, and so it will download 2000 candles worth of data instead, which means there will be 1 "current candle" and 1999 "startup candles".
|
||||
|
||||
Furthermore, exchanges limit the number of consecutive bulk API calls, e.g. Binance allows 5 calls. In this case, only 5000 candles can be downloaded from Binance API without hitting the API rate limit, which means the max `startup_candle_count` you can have is 4999.
|
||||
|
||||
Please note that this candle limit may be changed in the future by the exchanges without any prior notice.
|
||||
|
||||
### How does the command work?
|
||||
|
||||
- Firstly an initial indicator calculation is carried out using the supplied timerange to generate a benchmark for indicator values.
|
||||
- After setting the benchmark it will then carry out additional runs for each of the different startup candle count values.
|
||||
- The command will then compare the indicator values at the last candle rows and report the differences in a table.
|
||||
|
||||
## Understanding the recursive-analysis output
|
||||
|
||||
This is an example of an output results table where at least one indicator has a recursive formula issue:
|
||||
|
||||
```
|
||||
| indicators | 20 | 40 | 80 | 100 | 150 | 300 | 999 |
|
||||
|--------------+---------+---------+--------+--------+---------+---------+--------|
|
||||
| rsi_30 | nan% | -6.025% | 0.612% | 0.828% | -0.140% | 0.000% | 0.000% |
|
||||
| rsi_14 | 24.141% | -0.876% | 0.070% | 0.007% | -0.000% | -0.000% | - |
|
||||
```
|
||||
|
||||
The column headers indicate the different `startup_candle_count` used in the analysis. The values in the table indicate the variance of the calculated indicators compared to the benchmark value.
|
||||
|
||||
`nan%` means the value of that indicator cannot be calculated due to lack of data. In this example, you cannot calculate RSI with length 30 with just 21 candles (1 current candle + 20 startup candles).
|
||||
|
||||
Users should assess the table per indicator to decide if the specified `startup_candle_count` results in a sufficiently small variance so that the indicator does not have any effect on entries and/or exits.
|
||||
|
||||
As such, aiming for absolute zero variance (shown by `-` value) might not be the best option, because some indicators might require you to use such a long `startup_candle_count` to have zero variance.
|
||||
|
||||
## Caveats
|
||||
|
||||
- `recursive-analysis` will only calculate and compare the indicator values at the last row. The output table reports the percentage differences between the different startup candle count calculations and the original benchmark calculation. Whether it has any actual impact on your entries and exits is not included.
|
||||
- The ideal scenario is that indicators will have no variance (or at least very close to 0%) despite the startup candle being varied. In reality, indicators such as EMA are using a recursive formula to calculate indicator values, so the goal is not necessarily to have zero percentage variance, but to have the variance low enough (and therefore `startup_candle_count` high enough) that the recursion inherent in the indicator will not have any real impact on trading decisions.
|
||||
- `recursive-analysis` will only run calculations on `populate_indicators` and `@informative` decorator(s). If you put any indicator calculation on `populate_entry_trend` or `populate_exit_trend`, it won't be calculated.
|
||||
@@ -1,6 +1,6 @@
|
||||
markdown==3.4.4
|
||||
mkdocs==1.5.2
|
||||
mkdocs-material==9.2.1
|
||||
markdown==3.5.1
|
||||
mkdocs==1.5.3
|
||||
mkdocs-material==9.4.14
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.1
|
||||
pymdown-extensions==10.5
|
||||
jinja2==3.1.2
|
||||
|
||||
@@ -134,13 +134,16 @@ python3 scripts/rest_client.py --config rest_config.json <command> [optional par
|
||||
| `reload_config` | Reloads the configuration file.
|
||||
| `trades` | List last trades. Limited to 500 trades per call.
|
||||
| `trade/<tradeid>` | Get specific trade.
|
||||
| `trade/<tradeid>` | DELETE - Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
|
||||
| `trade/<tradeid>/open-order` | DELETE - Cancel open order for this trade.
|
||||
| `trade/<tradeid>/reload` | GET - Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.
|
||||
| `trades/<tradeid>` | DELETE - Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
|
||||
| `trades/<tradeid>/open-order` | DELETE - Cancel open order for this trade.
|
||||
| `trades/<tradeid>/reload` | GET - Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.
|
||||
| `show_config` | Shows part of the current configuration with relevant settings to operation.
|
||||
| `logs` | Shows last log messages.
|
||||
| `status` | Lists all open trades.
|
||||
| `count` | Displays number of trades used and available.
|
||||
| `entries [pair]` | Shows profit statistics for each enter tags for given pair (or all pairs if pair isn't given). Pair is optional.
|
||||
| `exits [pair]` | Shows profit statistics for each exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.
|
||||
| `mix_tags [pair]` | Shows profit statistics for each combinations of enter tag + exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.
|
||||
| `locks` | Displays currently locked pairs.
|
||||
| `delete_lock <lock_id>` | Deletes (disables) the lock by id.
|
||||
| `profit` | Display a summary of your profit/loss from close trades and some stats about your performance.
|
||||
@@ -151,6 +154,8 @@ python3 scripts/rest_client.py --config rest_config.json <command> [optional par
|
||||
| `performance` | Show performance of each finished trade grouped by pair.
|
||||
| `balance` | Show account balance per currency.
|
||||
| `daily <n>` | Shows profit or loss per day, over the last n days (n defaults to 7).
|
||||
| `weekly <n>` | Shows profit or loss per week, over the last n days (n defaults to 4).
|
||||
| `monthly <n>` | Shows profit or loss per month, over the last n days (n defaults to 3).
|
||||
| `stats` | Display a summary of profit / loss reasons as well as average holding times.
|
||||
| `whitelist` | Show the current whitelist.
|
||||
| `blacklist [pair]` | Show the current blacklist, or adds a pair to the blacklist.
|
||||
|
||||
@@ -164,6 +164,31 @@ E.g. If the `current_rate` is 200 USD, then returning `0.02` will set the stoplo
|
||||
During backtesting, `current_rate` (and `current_profit`) are provided against the candle's high (or low for short trades) - while the resulting stoploss is evaluated against the candle's low (or high for short trades).
|
||||
|
||||
The absolute value of the return value is used (the sign is ignored), so returning `0.05` or `-0.05` have the same result, a stoploss 5% below the current price.
|
||||
Returning None will be interpreted as "no desire to change", and is the only safe way to return when you'd like to not modify the stoploss.
|
||||
|
||||
Stoploss on exchange works similar to `trailing_stop`, and the stoploss on exchange is updated as configured in `stoploss_on_exchange_interval` ([More details about stoploss on exchange](stoploss.md#stop-loss-on-exchange-freqtrade)).
|
||||
|
||||
!!! Note "Use of dates"
|
||||
All time-based calculations should be done based on `current_time` - using `datetime.now()` or `datetime.utcnow()` is discouraged, as this will break backtesting support.
|
||||
|
||||
!!! Tip "Trailing stoploss"
|
||||
It's recommended to disable `trailing_stop` when using custom stoploss values. Both can work in tandem, but you might encounter the trailing stop to move the price higher while your custom function would not want this, causing conflicting behavior.
|
||||
|
||||
### Adjust stoploss after position adjustments
|
||||
|
||||
Depending on your strategy, you may encounter the need to adjust the stoploss in both directions after a [position adjustment](#adjust-trade-position).
|
||||
For this, freqtrade will make an additional call with `after_fill=True` after an order fills, which will allow the strategy to move the stoploss in any direction (also widening the gap between stoploss and current price, which is otherwise forbidden).
|
||||
|
||||
!!! Note "backwards compatibility"
|
||||
This call will only be made if the `after_fill` parameter is part of the function definition of your `custom_stoploss` function.
|
||||
As such, this will not impact (and with that, surprise) existing, running strategies.
|
||||
|
||||
### Custom stoploss examples
|
||||
|
||||
The next section will show some examples on what's possible with the custom stoploss function.
|
||||
Of course, many more things are possible, and all examples can be combined at will.
|
||||
|
||||
#### Trailing stop via custom stoploss
|
||||
|
||||
To simulate a regular trailing stoploss of 4% (trailing 4% behind the maximum reached price) you would use the following very simple method:
|
||||
|
||||
@@ -179,7 +204,8 @@ class AwesomeStrategy(IStrategy):
|
||||
use_custom_stoploss = True
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, **kwargs) -> float:
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
"""
|
||||
Custom stoploss logic, returning the new distance relative to current_rate (as ratio).
|
||||
e.g. returning -0.05 would create a stoploss 5% below current_rate.
|
||||
@@ -187,7 +213,7 @@ class AwesomeStrategy(IStrategy):
|
||||
|
||||
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-advanced/
|
||||
|
||||
When not implemented by a strategy, returns the initial stoploss value
|
||||
When not implemented by a strategy, returns the initial stoploss value.
|
||||
Only called when use_custom_stoploss is set to True.
|
||||
|
||||
:param pair: Pair that's currently analyzed
|
||||
@@ -195,25 +221,13 @@ class AwesomeStrategy(IStrategy):
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param current_rate: Rate, calculated based on pricing settings in exit_pricing.
|
||||
:param current_profit: Current profit (as ratio), calculated based on current_rate.
|
||||
:param after_fill: True if the stoploss is called after the order was filled.
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return float: New stoploss value, relative to the current rate
|
||||
:return float: New stoploss value, relative to the current_rate
|
||||
"""
|
||||
return -0.04
|
||||
```
|
||||
|
||||
Stoploss on exchange works similar to `trailing_stop`, and the stoploss on exchange is updated as configured in `stoploss_on_exchange_interval` ([More details about stoploss on exchange](stoploss.md#stop-loss-on-exchange-freqtrade)).
|
||||
|
||||
!!! Note "Use of dates"
|
||||
All time-based calculations should be done based on `current_time` - using `datetime.now()` or `datetime.utcnow()` is discouraged, as this will break backtesting support.
|
||||
|
||||
!!! Tip "Trailing stoploss"
|
||||
It's recommended to disable `trailing_stop` when using custom stoploss values. Both can work in tandem, but you might encounter the trailing stop to move the price higher while your custom function would not want this, causing conflicting behavior.
|
||||
|
||||
### Custom stoploss examples
|
||||
|
||||
The next section will show some examples on what's possible with the custom stoploss function.
|
||||
Of course, many more things are possible, and all examples can be combined at will.
|
||||
|
||||
#### Time based trailing stop
|
||||
|
||||
Use the initial stoploss for the first 60 minutes, after this change to 10% trailing stoploss, and after 2 hours (120 minutes) we use a 5% trailing stoploss.
|
||||
@@ -229,14 +243,45 @@ class AwesomeStrategy(IStrategy):
|
||||
use_custom_stoploss = True
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, **kwargs) -> float:
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
|
||||
# Make sure you have the longest interval first - these conditions are evaluated from top to bottom.
|
||||
if current_time - timedelta(minutes=120) > trade.open_date_utc:
|
||||
return -0.05
|
||||
elif current_time - timedelta(minutes=60) > trade.open_date_utc:
|
||||
return -0.10
|
||||
return 1
|
||||
return None
|
||||
```
|
||||
|
||||
#### Time based trailing stop with after-fill adjustments
|
||||
|
||||
Use the initial stoploss for the first 60 minutes, after this change to 10% trailing stoploss, and after 2 hours (120 minutes) we use a 5% trailing stoploss.
|
||||
If an additional order fills, set stoploss to -10% below the new `open_rate` ([Averaged across all entries](#position-adjust-calculations)).
|
||||
|
||||
``` python
|
||||
from datetime import datetime, timedelta
|
||||
from freqtrade.persistence import Trade
|
||||
|
||||
class AwesomeStrategy(IStrategy):
|
||||
|
||||
# ... populate_* methods
|
||||
|
||||
use_custom_stoploss = True
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
|
||||
if after_fill:
|
||||
# After an additional order, start with a stoploss of 10% below the new open rate
|
||||
return stoploss_from_open(0.10, current_profit, is_short=trade.is_short, leverage=trade.leverage)
|
||||
# Make sure you have the longest interval first - these conditions are evaluated from top to bottom.
|
||||
if current_time - timedelta(minutes=120) > trade.open_date_utc:
|
||||
return -0.05
|
||||
elif current_time - timedelta(minutes=60) > trade.open_date_utc:
|
||||
return -0.10
|
||||
return None
|
||||
```
|
||||
|
||||
#### Different stoploss per pair
|
||||
@@ -255,7 +300,8 @@ class AwesomeStrategy(IStrategy):
|
||||
use_custom_stoploss = True
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, **kwargs) -> float:
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
|
||||
if pair in ('ETH/BTC', 'XRP/BTC'):
|
||||
return -0.10
|
||||
@@ -281,7 +327,8 @@ class AwesomeStrategy(IStrategy):
|
||||
use_custom_stoploss = True
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, **kwargs) -> float:
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
|
||||
if current_profit < 0.04:
|
||||
return -1 # return a value bigger than the initial stoploss to keep using the initial stoploss
|
||||
@@ -314,7 +361,8 @@ class AwesomeStrategy(IStrategy):
|
||||
use_custom_stoploss = True
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, **kwargs) -> float:
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
|
||||
# evaluate highest to lowest, so that highest possible stop is used
|
||||
if current_profit > 0.40:
|
||||
@@ -325,7 +373,7 @@ class AwesomeStrategy(IStrategy):
|
||||
return stoploss_from_open(0.07, current_profit, is_short=trade.is_short, leverage=trade.leverage)
|
||||
|
||||
# return maximum stoploss value, keeping current stoploss price unchanged
|
||||
return 1
|
||||
return None
|
||||
```
|
||||
|
||||
#### Custom stoploss using an indicator from dataframe example
|
||||
@@ -342,7 +390,8 @@ class AwesomeStrategy(IStrategy):
|
||||
use_custom_stoploss = True
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, **kwargs) -> float:
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
|
||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
|
||||
last_candle = dataframe.iloc[-1].squeeze()
|
||||
@@ -355,7 +404,7 @@ class AwesomeStrategy(IStrategy):
|
||||
return stoploss_from_absolute(stoploss_price, current_rate, is_short=trade.is_short)
|
||||
|
||||
# return maximum stoploss value, keeping current stoploss price unchanged
|
||||
return 1
|
||||
return None
|
||||
```
|
||||
|
||||
See [Dataframe access](strategy-advanced.md#dataframe-access) for more information about dataframe use in strategy callbacks.
|
||||
@@ -364,15 +413,89 @@ See [Dataframe access](strategy-advanced.md#dataframe-access) for more informati
|
||||
|
||||
#### Stoploss relative to open price
|
||||
|
||||
Stoploss values returned from `custom_stoploss()` always specify a percentage relative to `current_rate`. In order to set a stoploss relative to the *open* price, we need to use `current_profit` to calculate what percentage relative to the `current_rate` will give you the same result as if the percentage was specified from the open price.
|
||||
Stoploss values returned from `custom_stoploss()` must specify a percentage relative to `current_rate`, but sometimes you may want to specify a stoploss relative to the _entry_ price instead.
|
||||
`stoploss_from_open()` is a helper function to calculate a stoploss value that can be returned from `custom_stoploss` which will be equivalent to the desired trade profit above the entry point.
|
||||
|
||||
The helper function [`stoploss_from_open()`](strategy-customization.md#stoploss_from_open) can be used to convert from an open price relative stop, to a current price relative stop which can be returned from `custom_stoploss()`.
|
||||
??? Example "Returning a stoploss relative to the open price from the custom stoploss function"
|
||||
|
||||
Say the open price was $100, and `current_price` is $121 (`current_profit` will be `0.21`).
|
||||
|
||||
If we want a stop price at 7% above the open price we can call `stoploss_from_open(0.07, current_profit, False)` which will return `0.1157024793`. 11.57% below $121 is $107, which is the same as 7% above $100.
|
||||
|
||||
This function will consider leverage - so at 10x leverage, the actual stoploss would be 0.7% above $100 (0.7% * 10x = 7%).
|
||||
|
||||
|
||||
``` python
|
||||
|
||||
from datetime import datetime
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.strategy import IStrategy, stoploss_from_open
|
||||
|
||||
class AwesomeStrategy(IStrategy):
|
||||
|
||||
# ... populate_* methods
|
||||
|
||||
use_custom_stoploss = True
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
|
||||
# once the profit has risen above 10%, keep the stoploss at 7% above the open price
|
||||
if current_profit > 0.10:
|
||||
return stoploss_from_open(0.07, current_profit, is_short=trade.is_short, leverage=trade.leverage)
|
||||
|
||||
return 1
|
||||
|
||||
```
|
||||
|
||||
Full examples can be found in the [Custom stoploss](strategy-advanced.md#custom-stoploss) section of the Documentation.
|
||||
|
||||
!!! Note
|
||||
Providing invalid input to `stoploss_from_open()` may produce "CustomStoploss function did not return valid stoploss" warnings.
|
||||
This may happen if `current_profit` parameter is below specified `open_relative_stop`. Such situations may arise when closing trade
|
||||
is blocked by `confirm_trade_exit()` method. Warnings can be solved by never blocking stop loss sells by checking `exit_reason` in
|
||||
`confirm_trade_exit()`, or by using `return stoploss_from_open(...) or 1` idiom, which will request to not change stop loss when
|
||||
`current_profit < open_relative_stop`.
|
||||
|
||||
#### Stoploss percentage from absolute price
|
||||
|
||||
Stoploss values returned from `custom_stoploss()` always specify a percentage relative to `current_rate`. In order to set a stoploss at specified absolute price level, we need to use `stop_rate` to calculate what percentage relative to the `current_rate` will give you the same result as if the percentage was specified from the open price.
|
||||
|
||||
The helper function [`stoploss_from_absolute()`](strategy-customization.md#stoploss_from_absolute) can be used to convert from an absolute price, to a current price relative stop which can be returned from `custom_stoploss()`.
|
||||
The helper function `stoploss_from_absolute()` can be used to convert from an absolute price, to a current price relative stop which can be returned from `custom_stoploss()`.
|
||||
|
||||
??? Example "Returning a stoploss using absolute price from the custom stoploss function"
|
||||
|
||||
If we want to trail a stop price at 2xATR below current price we can call `stoploss_from_absolute(current_rate + (side * candle['atr'] * 2), current_rate, is_short=trade.is_short, leverage=trade.leverage)`.
|
||||
For futures, we need to adjust the direction (up or down), as well as adjust for leverage, since the [`custom_stoploss`](strategy-callbacks.md#custom-stoploss) callback returns the ["risk for this trade"](stoploss.md#stoploss-and-leverage) - not the relative price movement.
|
||||
|
||||
``` python
|
||||
|
||||
from datetime import datetime
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.strategy import IStrategy, stoploss_from_absolute, timeframe_to_prev_date
|
||||
|
||||
class AwesomeStrategy(IStrategy):
|
||||
|
||||
use_custom_stoploss = True
|
||||
|
||||
def populate_indicators_1h(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
dataframe['atr'] = ta.ATR(dataframe, timeperiod=14)
|
||||
return dataframe
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
|
||||
trade_date = timeframe_to_prev_date(self.timeframe, trade.open_date_utc)
|
||||
candle = dataframe.iloc[-1].squeeze()
|
||||
sign = 1 if trade.is_short else -1
|
||||
return stoploss_from_absolute(current_rate + (side * candle['atr'] * 2),
|
||||
current_rate, is_short=trade.is_short,
|
||||
leverage=trade.leverage)
|
||||
|
||||
```
|
||||
|
||||
|
||||
---
|
||||
|
||||
@@ -387,6 +510,9 @@ Each of these methods are called right before placing an order on the exchange.
|
||||
!!! Note
|
||||
If your custom pricing function return None or an invalid value, price will fall back to `proposed_rate`, which is based on the regular pricing configuration.
|
||||
|
||||
!!! Note
|
||||
Using custom_entry_price, the Trade object will be available as soon as the first entry order associated with the trade is created, for the first entry, `trade` parameter value will be `None`.
|
||||
|
||||
### Custom order entry and exit price example
|
||||
|
||||
``` python
|
||||
@@ -397,7 +523,7 @@ class AwesomeStrategy(IStrategy):
|
||||
|
||||
# ... populate_* methods
|
||||
|
||||
def custom_entry_price(self, pair: str, current_time: datetime, proposed_rate: float,
|
||||
def custom_entry_price(self, pair: str, trade: Optional['Trade'], current_time: datetime, proposed_rate: float,
|
||||
entry_tag: Optional[str], side: str, **kwargs) -> float:
|
||||
|
||||
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=pair,
|
||||
@@ -634,9 +760,9 @@ The `position_adjustment_enable` strategy property enables the usage of `adjust_
|
||||
For performance reasons, it's disabled by default and freqtrade will show a warning message on startup if enabled.
|
||||
`adjust_trade_position()` can be used to perform additional orders, for example to manage risk with DCA (Dollar Cost Averaging) or to increase or decrease positions.
|
||||
|
||||
`max_entry_position_adjustment` property is used to limit the number of additional buys per trade (on top of the first buy) that the bot can execute. By default, the value is -1 which means the bot have no limit on number of adjustment buys.
|
||||
`max_entry_position_adjustment` property is used to limit the number of additional entries per trade (on top of the first entry order) that the bot can execute. By default, the value is -1 which means the bot have no limit on number of adjustment entries.
|
||||
|
||||
The strategy is expected to return a stake_amount (in stake currency) between `min_stake` and `max_stake` if and when an additional buy order should be made (position is increased).
|
||||
The strategy is expected to return a stake_amount (in stake currency) between `min_stake` and `max_stake` if and when an additional entry order should be made (position is increased -> buy order for long trades, sell order for short trades).
|
||||
If there are not enough funds in the wallet (the return value is above `max_stake`) then the signal will be ignored.
|
||||
Additional orders also result in additional fees and those orders don't count towards `max_open_trades`.
|
||||
|
||||
@@ -644,9 +770,11 @@ This callback is **not** called when there is an open order (either buy or sell)
|
||||
|
||||
`adjust_trade_position()` is called very frequently for the duration of a trade, so you must keep your implementation as performant as possible.
|
||||
|
||||
Additional Buys are ignored once you have reached the maximum amount of extra buys that you have set on `max_entry_position_adjustment`, but the callback is called anyway looking for partial exits.
|
||||
Additional entries are ignored once you have reached the maximum amount of extra entries that you have set on `max_entry_position_adjustment`, but the callback is called anyway looking for partial exits.
|
||||
|
||||
Position adjustments will always be applied in the direction of the trade, so a positive value will always increase your position (negative values will decrease your position), no matter if it's a long or short trade. Modifications to leverage are not possible, and the stake-amount is assumed to be before applying leverage.
|
||||
Position adjustments will always be applied in the direction of the trade, so a positive value will always increase your position (negative values will decrease your position), no matter if it's a long or short trade.
|
||||
|
||||
Modifications to leverage are not possible, and the stake-amount returned is assumed to be before applying leverage.
|
||||
|
||||
!!! Note "About stake size"
|
||||
Using fixed stake size means it will be the amount used for the first order, just like without position adjustment.
|
||||
@@ -700,7 +828,7 @@ class DigDeeperStrategy(IStrategy):
|
||||
"""
|
||||
Custom trade adjustment logic, returning the stake amount that a trade should be
|
||||
increased or decreased.
|
||||
This means extra buy or sell orders with additional fees.
|
||||
This means extra entry or exit orders with additional fees.
|
||||
Only called when `position_adjustment_enable` is set to True.
|
||||
|
||||
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-advanced/
|
||||
@@ -709,8 +837,9 @@ class DigDeeperStrategy(IStrategy):
|
||||
|
||||
:param trade: trade object.
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param current_rate: Current buy rate.
|
||||
:param current_profit: Current profit (as ratio), calculated based on current_rate.
|
||||
:param current_rate: Current entry rate (same as current_entry_profit)
|
||||
:param current_profit: Current profit (as ratio), calculated based on current_rate
|
||||
(same as current_entry_profit).
|
||||
:param min_stake: Minimal stake size allowed by exchange (for both entries and exits)
|
||||
:param max_stake: Maximum stake allowed (either through balance, or by exchange limits).
|
||||
:param current_entry_rate: Current rate using entry pricing.
|
||||
@@ -793,6 +922,8 @@ Returning any other price will cancel the existing order, and replace it with a
|
||||
The trade open-date (`trade.open_date_utc`) will remain at the time of the very first order placed.
|
||||
Please make sure to be aware of this - and eventually adjust your logic in other callbacks to account for this, and use the date of the first filled order instead.
|
||||
|
||||
If the cancellation of the original order fails, then the order will not be replaced - though the order will most likely have been canceled on exchange. Having this happen on initial entries will result in the deletion of the order, while on position adjustment orders, it'll result in the trade size remaining as is.
|
||||
|
||||
!!! Warning "Regular timeout"
|
||||
Entry `unfilledtimeout` mechanism (as well as `check_entry_timeout()`) takes precedence over this.
|
||||
Entry Orders that are cancelled via the above methods will not have this callback called. Be sure to update timeout values to match your expectations.
|
||||
|
||||
@@ -168,7 +168,9 @@ Most indicators have an instable startup period, in which they are either not av
|
||||
To account for this, the strategy can be assigned the `startup_candle_count` attribute.
|
||||
This should be set to the maximum number of candles that the strategy requires to calculate stable indicators. In the case where a user includes higher timeframes with informative pairs, the `startup_candle_count` does not necessarily change. The value is the maximum period (in candles) that any of the informatives timeframes need to compute stable indicators.
|
||||
|
||||
In this example strategy, this should be set to 100 (`startup_candle_count = 100`), since the longest needed history is 100 candles.
|
||||
You can use [recursive-analysis](recursive-analysis.md) to check and find the correct `startup_candle_count` to be used.
|
||||
|
||||
In this example strategy, this should be set to 400 (`startup_candle_count = 400`), since the minimum needed history for ema100 calculation to make sure the value is correct is 400 candles.
|
||||
|
||||
``` python
|
||||
dataframe['ema100'] = ta.EMA(dataframe, timeperiod=100)
|
||||
@@ -193,11 +195,11 @@ Let's try to backtest 1 month (January 2019) of 5m candles using an example stra
|
||||
freqtrade backtesting --timerange 20190101-20190201 --timeframe 5m
|
||||
```
|
||||
|
||||
Assuming `startup_candle_count` is set to 100, backtesting knows it needs 100 candles to generate valid buy signals. It will load data from `20190101 - (100 * 5m)` - which is ~2018-12-31 15:30:00.
|
||||
Assuming `startup_candle_count` is set to 400, backtesting knows it needs 400 candles to generate valid buy signals. It will load data from `20190101 - (400 * 5m)` - which is ~2018-12-30 11:40:00.
|
||||
If this data is available, indicators will be calculated with this extended timerange. The instable startup period (up to 2019-01-01 00:00:00) will then be removed before starting backtesting.
|
||||
|
||||
!!! Note
|
||||
If data for the startup period is not available, then the timerange will be adjusted to account for this startup period - so Backtesting would start at 2019-01-01 08:30:00.
|
||||
If data for the startup period is not available, then the timerange will be adjusted to account for this startup period - so Backtesting would start at 2019-01-02 09:20:00.
|
||||
|
||||
### Entry signal rules
|
||||
|
||||
@@ -264,7 +266,7 @@ def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFram
|
||||
### Exit signal rules
|
||||
|
||||
Edit the method `populate_exit_trend()` into your strategy file to update your exit strategy.
|
||||
The exit-signal is only used for exits if `use_exit_signal` is set to true in the configuration.
|
||||
The exit-signal can be suppressed by setting `use_exit_signal` to false in the configuration or strategy.
|
||||
`use_exit_signal` will not influence [signal collision rules](#colliding-signals) - which will still apply and can prevent entries.
|
||||
|
||||
It's important to always return the dataframe without removing/modifying the columns `"open", "high", "low", "close", "volume"`, otherwise these fields would contain something unexpected.
|
||||
@@ -484,17 +486,18 @@ for more information.
|
||||
|
||||
:param timeframe: Informative timeframe. Must always be equal or higher than strategy timeframe.
|
||||
:param asset: Informative asset, for example BTC, BTC/USDT, ETH/BTC. Do not specify to use
|
||||
current pair.
|
||||
current pair. Also supports limited pair format strings (see below)
|
||||
:param fmt: Column format (str) or column formatter (callable(name, asset, timeframe)). When not
|
||||
specified, defaults to:
|
||||
* {base}_{quote}_{column}_{timeframe} if asset is specified.
|
||||
* {base}_{quote}_{column}_{timeframe} if asset is specified.
|
||||
* {column}_{timeframe} if asset is not specified.
|
||||
Format string supports these format variables:
|
||||
* {asset} - full name of the asset, for example 'BTC/USDT'.
|
||||
Pair format supports these format variables:
|
||||
* {base} - base currency in lower case, for example 'eth'.
|
||||
* {BASE} - same as {base}, except in upper case.
|
||||
* {quote} - quote currency in lower case, for example 'usdt'.
|
||||
* {QUOTE} - same as {quote}, except in upper case.
|
||||
Format string additionally supports this variables.
|
||||
* {asset} - full name of the asset, for example 'BTC/USDT'.
|
||||
* {column} - name of dataframe column.
|
||||
* {timeframe} - timeframe of informative dataframe.
|
||||
:param ffill: ffill dataframe after merging informative pair.
|
||||
@@ -586,6 +589,67 @@ for more information.
|
||||
will overwrite previously defined method and not produce any errors due to limitations of Python programming language. In such cases you will find that indicators
|
||||
created in earlier-defined methods are not available in the dataframe. Carefully review method names and make sure they are unique!
|
||||
|
||||
### *merge_informative_pair()*
|
||||
|
||||
This method helps you merge an informative pair to a regular dataframe without lookahead bias.
|
||||
It's there to help you merge the dataframe in a safe and consistent way.
|
||||
|
||||
Options:
|
||||
|
||||
- Rename the columns for you to create unique columns
|
||||
- Merge the dataframe without lookahead bias
|
||||
- Forward-fill (optional)
|
||||
|
||||
For a full sample, please refer to the [complete data provider example](#complete-data-provider-sample) below.
|
||||
|
||||
All columns of the informative dataframe will be available on the returning dataframe in a renamed fashion:
|
||||
|
||||
!!! Example "Column renaming"
|
||||
Assuming `inf_tf = '1d'` the resulting columns will be:
|
||||
|
||||
``` python
|
||||
'date', 'open', 'high', 'low', 'close', 'rsi' # from the original dataframe
|
||||
'date_1d', 'open_1d', 'high_1d', 'low_1d', 'close_1d', 'rsi_1d' # from the informative dataframe
|
||||
```
|
||||
|
||||
??? Example "Column renaming - 1h"
|
||||
Assuming `inf_tf = '1h'` the resulting columns will be:
|
||||
|
||||
``` python
|
||||
'date', 'open', 'high', 'low', 'close', 'rsi' # from the original dataframe
|
||||
'date_1h', 'open_1h', 'high_1h', 'low_1h', 'close_1h', 'rsi_1h' # from the informative dataframe
|
||||
```
|
||||
|
||||
??? Example "Custom implementation"
|
||||
A custom implementation for this is possible, and can be done as follows:
|
||||
|
||||
``` python
|
||||
|
||||
# Shift date by 1 candle
|
||||
# This is necessary since the data is always the "open date"
|
||||
# and a 15m candle starting at 12:15 should not know the close of the 1h candle from 12:00 to 13:00
|
||||
minutes = timeframe_to_minutes(inf_tf)
|
||||
# Only do this if the timeframes are different:
|
||||
informative['date_merge'] = informative["date"] + pd.to_timedelta(minutes, 'm')
|
||||
|
||||
# Rename columns to be unique
|
||||
informative.columns = [f"{col}_{inf_tf}" for col in informative.columns]
|
||||
# Assuming inf_tf = '1d' - then the columns will now be:
|
||||
# date_1d, open_1d, high_1d, low_1d, close_1d, rsi_1d
|
||||
|
||||
# Combine the 2 dataframes
|
||||
# all indicators on the informative sample MUST be calculated before this point
|
||||
dataframe = pd.merge(dataframe, informative, left_on='date', right_on=f'date_merge_{inf_tf}', how='left')
|
||||
# FFill to have the 1d value available in every row throughout the day.
|
||||
# Without this, comparisons would only work once per day.
|
||||
dataframe = dataframe.ffill()
|
||||
|
||||
```
|
||||
|
||||
!!! Warning "Informative timeframe < timeframe"
|
||||
Using informative timeframes smaller than the dataframe timeframe is not recommended with this method, as it will not use any of the additional information this would provide.
|
||||
To use the more detailed information properly, more advanced methods should be applied (which are out of scope for freqtrade documentation, as it'll depend on the respective need).
|
||||
|
||||
## Additional data (DataProvider)
|
||||
|
||||
The strategy provides access to the `DataProvider`. This allows you to get additional data to use in your strategy.
|
||||
@@ -810,146 +874,6 @@ class SampleStrategy(IStrategy):
|
||||
|
||||
***
|
||||
|
||||
## Helper functions
|
||||
|
||||
### *merge_informative_pair()*
|
||||
|
||||
This method helps you merge an informative pair to a regular dataframe without lookahead bias.
|
||||
It's there to help you merge the dataframe in a safe and consistent way.
|
||||
|
||||
Options:
|
||||
|
||||
- Rename the columns for you to create unique columns
|
||||
- Merge the dataframe without lookahead bias
|
||||
- Forward-fill (optional)
|
||||
|
||||
For a full sample, please refer to the [complete data provider example](#complete-data-provider-sample) below.
|
||||
|
||||
All columns of the informative dataframe will be available on the returning dataframe in a renamed fashion:
|
||||
|
||||
!!! Example "Column renaming"
|
||||
Assuming `inf_tf = '1d'` the resulting columns will be:
|
||||
|
||||
``` python
|
||||
'date', 'open', 'high', 'low', 'close', 'rsi' # from the original dataframe
|
||||
'date_1d', 'open_1d', 'high_1d', 'low_1d', 'close_1d', 'rsi_1d' # from the informative dataframe
|
||||
```
|
||||
|
||||
??? Example "Column renaming - 1h"
|
||||
Assuming `inf_tf = '1h'` the resulting columns will be:
|
||||
|
||||
``` python
|
||||
'date', 'open', 'high', 'low', 'close', 'rsi' # from the original dataframe
|
||||
'date_1h', 'open_1h', 'high_1h', 'low_1h', 'close_1h', 'rsi_1h' # from the informative dataframe
|
||||
```
|
||||
|
||||
??? Example "Custom implementation"
|
||||
A custom implementation for this is possible, and can be done as follows:
|
||||
|
||||
``` python
|
||||
|
||||
# Shift date by 1 candle
|
||||
# This is necessary since the data is always the "open date"
|
||||
# and a 15m candle starting at 12:15 should not know the close of the 1h candle from 12:00 to 13:00
|
||||
minutes = timeframe_to_minutes(inf_tf)
|
||||
# Only do this if the timeframes are different:
|
||||
informative['date_merge'] = informative["date"] + pd.to_timedelta(minutes, 'm')
|
||||
|
||||
# Rename columns to be unique
|
||||
informative.columns = [f"{col}_{inf_tf}" for col in informative.columns]
|
||||
# Assuming inf_tf = '1d' - then the columns will now be:
|
||||
# date_1d, open_1d, high_1d, low_1d, close_1d, rsi_1d
|
||||
|
||||
# Combine the 2 dataframes
|
||||
# all indicators on the informative sample MUST be calculated before this point
|
||||
dataframe = pd.merge(dataframe, informative, left_on='date', right_on=f'date_merge_{inf_tf}', how='left')
|
||||
# FFill to have the 1d value available in every row throughout the day.
|
||||
# Without this, comparisons would only work once per day.
|
||||
dataframe = dataframe.ffill()
|
||||
|
||||
```
|
||||
|
||||
!!! Warning "Informative timeframe < timeframe"
|
||||
Using informative timeframes smaller than the dataframe timeframe is not recommended with this method, as it will not use any of the additional information this would provide.
|
||||
To use the more detailed information properly, more advanced methods should be applied (which are out of scope for freqtrade documentation, as it'll depend on the respective need).
|
||||
|
||||
***
|
||||
|
||||
### *stoploss_from_open()*
|
||||
|
||||
Stoploss values returned from `custom_stoploss` must specify a percentage relative to `current_rate`, but sometimes you may want to specify a stoploss relative to the entry point instead. `stoploss_from_open()` is a helper function to calculate a stoploss value that can be returned from `custom_stoploss` which will be equivalent to the desired trade profit above the entry point.
|
||||
|
||||
??? Example "Returning a stoploss relative to the open price from the custom stoploss function"
|
||||
|
||||
Say the open price was $100, and `current_price` is $121 (`current_profit` will be `0.21`).
|
||||
|
||||
If we want a stop price at 7% above the open price we can call `stoploss_from_open(0.07, current_profit, False)` which will return `0.1157024793`. 11.57% below $121 is $107, which is the same as 7% above $100.
|
||||
|
||||
This function will consider leverage - so at 10x leverage, the actual stoploss would be 0.7% above $100 (0.7% * 10x = 7%).
|
||||
|
||||
|
||||
``` python
|
||||
|
||||
from datetime import datetime
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.strategy import IStrategy, stoploss_from_open
|
||||
|
||||
class AwesomeStrategy(IStrategy):
|
||||
|
||||
# ... populate_* methods
|
||||
|
||||
use_custom_stoploss = True
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, **kwargs) -> float:
|
||||
|
||||
# once the profit has risen above 10%, keep the stoploss at 7% above the open price
|
||||
if current_profit > 0.10:
|
||||
return stoploss_from_open(0.07, current_profit, is_short=trade.is_short, leverage=trade.leverage)
|
||||
|
||||
return 1
|
||||
|
||||
```
|
||||
|
||||
Full examples can be found in the [Custom stoploss](strategy-advanced.md#custom-stoploss) section of the Documentation.
|
||||
|
||||
!!! Note
|
||||
Providing invalid input to `stoploss_from_open()` may produce "CustomStoploss function did not return valid stoploss" warnings.
|
||||
This may happen if `current_profit` parameter is below specified `open_relative_stop`. Such situations may arise when closing trade
|
||||
is blocked by `confirm_trade_exit()` method. Warnings can be solved by never blocking stop loss sells by checking `exit_reason` in
|
||||
`confirm_trade_exit()`, or by using `return stoploss_from_open(...) or 1` idiom, which will request to not change stop loss when
|
||||
`current_profit < open_relative_stop`.
|
||||
|
||||
### *stoploss_from_absolute()*
|
||||
|
||||
In some situations it may be confusing to deal with stops relative to current rate. Instead, you may define a stoploss level using an absolute price.
|
||||
|
||||
??? Example "Returning a stoploss using absolute price from the custom stoploss function"
|
||||
|
||||
If we want to trail a stop price at 2xATR below current price we can call `stoploss_from_absolute(current_rate - (candle['atr'] * 2), current_rate, is_short=trade.is_short)`.
|
||||
|
||||
``` python
|
||||
|
||||
from datetime import datetime
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.strategy import IStrategy, stoploss_from_absolute
|
||||
|
||||
class AwesomeStrategy(IStrategy):
|
||||
|
||||
use_custom_stoploss = True
|
||||
|
||||
def populate_indicators_1h(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
dataframe['atr'] = ta.ATR(dataframe, timeperiod=14)
|
||||
return dataframe
|
||||
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, **kwargs) -> float:
|
||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
|
||||
candle = dataframe.iloc[-1].squeeze()
|
||||
return stoploss_from_absolute(current_rate - (candle['atr'] * 2), current_rate, is_short=trade.is_short)
|
||||
|
||||
```
|
||||
|
||||
## Additional data (Wallets)
|
||||
|
||||
The strategy provides access to the `wallets` object. This contains the current balances on the exchange.
|
||||
@@ -1085,6 +1009,10 @@ The following lists some common patterns which should be avoided to prevent frus
|
||||
- don't use `dataframe['volume'].mean()`. This uses the full DataFrame for backtesting, including data from the future. Use `dataframe['volume'].rolling(<window>).mean()` instead
|
||||
- don't use `.resample('1h')`. This uses the left border of the interval, so moves data from an hour to the start of the hour. Use `.resample('1h', label='right')` instead.
|
||||
|
||||
!!! Tip "Identifying problems"
|
||||
You may also want to check the 2 helper commands [lookahead-analysis](lookahead-analysis.md) and [recursive-analysis](recursive-analysis.md), which can each help you figure out problems with your strategy in different ways.
|
||||
Please treat them as what they are - helpers to identify most common problems. A negative result of each does not guarantee that there's none of the above errors included.
|
||||
|
||||
### Colliding signals
|
||||
|
||||
When conflicting signals collide (e.g. both `'enter_long'` and `'exit_long'` are 1), freqtrade will do nothing and ignore the entry signal. This will avoid trades that enter, and exit immediately. Obviously, this can potentially lead to missed entries.
|
||||
|
||||
@@ -167,7 +167,7 @@ trades.groupby("pair")["exit_reason"].value_counts()
|
||||
# Plotting equity line (starting with 0 on day 1 and adding daily profit for each backtested day)
|
||||
|
||||
from freqtrade.configuration import Configuration
|
||||
from freqtrade.data.btanalysis import load_backtest_data, load_backtest_stats
|
||||
from freqtrade.data.btanalysis import load_backtest_stats
|
||||
import plotly.express as px
|
||||
import pandas as pd
|
||||
|
||||
@@ -178,20 +178,8 @@ import pandas as pd
|
||||
stats = load_backtest_stats(backtest_dir)
|
||||
strategy_stats = stats['strategy'][strategy]
|
||||
|
||||
dates = []
|
||||
profits = []
|
||||
for date_profit in strategy_stats['daily_profit']:
|
||||
dates.append(date_profit[0])
|
||||
profits.append(date_profit[1])
|
||||
|
||||
equity = 0
|
||||
equity_daily = []
|
||||
for daily_profit in profits:
|
||||
equity_daily.append(equity)
|
||||
equity += float(daily_profit)
|
||||
|
||||
|
||||
df = pd.DataFrame({'dates': dates,'equity_daily': equity_daily})
|
||||
df = pd.DataFrame(columns=['dates','equity'], data=strategy_stats['daily_profit'])
|
||||
df['equity_daily'] = df['equity'].cumsum()
|
||||
|
||||
fig = px.line(df, x="dates", y="equity_daily")
|
||||
fig.show()
|
||||
|
||||
@@ -280,7 +280,7 @@ After:
|
||||
|
||||
``` python hl_lines="3"
|
||||
class AwesomeStrategy(IStrategy):
|
||||
def custom_entry_price(self, pair: str, current_time: datetime, proposed_rate: float,
|
||||
def custom_entry_price(self, pair: str, trade: Optional[Trade], current_time: datetime, proposed_rate: float,
|
||||
entry_tag: Optional[str], side: str, **kwargs) -> float:
|
||||
return proposed_rate
|
||||
```
|
||||
@@ -311,12 +311,13 @@ After:
|
||||
|
||||
``` python hl_lines="5 7"
|
||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
||||
current_rate: float, current_profit: float, **kwargs) -> float:
|
||||
current_rate: float, current_profit: float, after_fill: bool,
|
||||
**kwargs) -> Optional[float]:
|
||||
# once the profit has risen above 10%, keep the stoploss at 7% above the open price
|
||||
if current_profit > 0.10:
|
||||
return stoploss_from_open(0.07, current_profit, is_short=trade.is_short)
|
||||
|
||||
return stoploss_from_absolute(current_rate - (candle['atr'] * 2), current_rate, is_short=trade.is_short)
|
||||
return stoploss_from_absolute(current_rate - (candle['atr'] * 2), current_rate, is_short=trade.is_short, leverage=trade.leverage)
|
||||
|
||||
|
||||
```
|
||||
@@ -569,7 +570,7 @@ def populate_any_indicators(
|
||||
```
|
||||
|
||||
1. Features - Move to `feature_engineering_expand_all`
|
||||
2. Basic features, not expanded across `include_periods_candles` - move to`feature_engineering_expand_basic()`.
|
||||
2. Basic features, not expanded across `indicator_periods_candles` - move to`feature_engineering_expand_basic()`.
|
||||
3. Standard features which should not be expanded - move to `feature_engineering_standard()`.
|
||||
4. Targets - Move this part to `set_freqai_targets()`.
|
||||
|
||||
|
||||
@@ -175,6 +175,7 @@ official commands. You can ask at any moment for help with `/help`.
|
||||
| `/status` | Lists all open trades
|
||||
| `/status <trade_id>` | Lists one or more specific trade. Separate multiple <trade_id> with a blank space.
|
||||
| `/status table` | List all open trades in a table format. Pending buy orders are marked with an asterisk (*) Pending sell orders are marked with a double asterisk (**)
|
||||
| `/order <trade_id>` | Lists orders of one or more specific trade. Separate multiple <trade_id> with a blank space.
|
||||
| `/trades [limit]` | List all recently closed trades in a table format.
|
||||
| `/count` | Displays number of trades used and available
|
||||
| `/locks` | Show currently locked pairs.
|
||||
|
||||
@@ -427,25 +427,33 @@ zb True missing opt: fetchMyTrades
|
||||
Use the `list-timeframes` subcommand to see the list of timeframes available for the exchange.
|
||||
|
||||
```
|
||||
usage: freqtrade list-timeframes [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH] [--userdir PATH] [--exchange EXCHANGE] [-1]
|
||||
usage: freqtrade list-timeframes [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[-d PATH] [--userdir PATH]
|
||||
[--exchange EXCHANGE] [-1]
|
||||
|
||||
optional arguments:
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no config is provided.
|
||||
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
|
||||
-1, --one-column Print output in one column.
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE Log to the file specified. Special values are: 'syslog', 'journald'. See the documentation for more details.
|
||||
--logfile FILE, --log-file FILE
|
||||
Log to the file specified. Special values are:
|
||||
'syslog', 'journald'. See the documentation for more
|
||||
details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default: `config.json`). Multiple --config options may be used. Can be set to `-`
|
||||
to read config from stdin.
|
||||
-d PATH, --datadir PATH
|
||||
Specify configuration file (default:
|
||||
`userdir/config.json` or `config.json` whichever
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
```
|
||||
|
||||
* Example: see the timeframes for the 'binance' exchange, set in the configuration file:
|
||||
@@ -479,20 +487,17 @@ usage: freqtrade list-markets [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[-d PATH] [--userdir PATH] [--exchange EXCHANGE]
|
||||
[--print-list] [--print-json] [-1] [--print-csv]
|
||||
[--base BASE_CURRENCY [BASE_CURRENCY ...]]
|
||||
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]] [-a]
|
||||
[--trading-mode {spot,margin,futures}]
|
||||
|
||||
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]]
|
||||
[-a] [--trading-mode {spot,margin,futures}]
|
||||
usage: freqtrade list-pairs [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[-d PATH] [--userdir PATH] [--exchange EXCHANGE]
|
||||
[--print-list] [--print-json] [-1] [--print-csv]
|
||||
[--base BASE_CURRENCY [BASE_CURRENCY ...]]
|
||||
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]] [-a]
|
||||
[--trading-mode {spot,margin,futures}]
|
||||
|
||||
optional arguments:
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
|
||||
config is provided.
|
||||
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
|
||||
--print-list Print list of pairs or market symbols. By default data
|
||||
is printed in the tabular format.
|
||||
--print-json Print list of pairs or market symbols in JSON format.
|
||||
@@ -504,20 +509,22 @@ optional arguments:
|
||||
Specify quote currency(-ies). Space-separated list.
|
||||
-a, --all Print all pairs or market symbols. By default only
|
||||
active ones are shown.
|
||||
--trading-mode {spot,margin,futures}
|
||||
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
|
||||
Select Trading mode
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE Log to the file specified. Special values are:
|
||||
--logfile FILE, --log-file FILE
|
||||
Log to the file specified. Special values are:
|
||||
'syslog', 'journald'. See the documentation for more
|
||||
details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default: `config.json`).
|
||||
Multiple --config options may be used. Can be set to
|
||||
`-` to read config from stdin.
|
||||
-d PATH, --datadir PATH
|
||||
Specify configuration file (default:
|
||||
`userdir/config.json` or `config.json` whichever
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
@@ -532,7 +539,7 @@ Pairs/markets are sorted by its symbol string in the printed output.
|
||||
### Examples
|
||||
|
||||
* Print the list of active pairs with quote currency USD on exchange, specified in the default
|
||||
configuration file (i.e. pairs on the "Bittrex" exchange) in JSON format:
|
||||
configuration file (i.e. pairs on the "Binance" exchange) in JSON format:
|
||||
|
||||
```
|
||||
$ freqtrade list-pairs --quote USD --print-json
|
||||
@@ -564,7 +571,7 @@ usage: freqtrade test-pairlist [-h] [--userdir PATH] [-v] [-c PATH]
|
||||
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]]
|
||||
[-1] [--print-json] [--exchange EXCHANGE]
|
||||
|
||||
optional arguments:
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
@@ -578,8 +585,7 @@ optional arguments:
|
||||
Specify quote currency(-ies). Space-separated list.
|
||||
-1, --one-column Print output in one column.
|
||||
--print-json Print list of pairs or market symbols in JSON format.
|
||||
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
|
||||
config is provided.
|
||||
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
|
||||
|
||||
```
|
||||
|
||||
|
||||
@@ -302,6 +302,7 @@ You can configure this as follows:
|
||||
```
|
||||
|
||||
The above represents the default (`exit_fill` and `entry_fill` are optional and will default to the above configuration) - modifications are obviously possible.
|
||||
To disable either of the two default values (`entry_fill` / `exit_fill`), you can assign them an empty array (`exit_fill: []`).
|
||||
|
||||
Available fields correspond to the fields for webhooks and are documented in the corresponding webhook sections.
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ git clone https://github.com/freqtrade/freqtrade.git
|
||||
|
||||
Install ta-lib according to the [ta-lib documentation](https://github.com/mrjbq7/ta-lib#windows).
|
||||
|
||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), Freqtrade provides these dependencies (in the binary wheel format) for the latest 3 Python versions (3.8, 3.9, 3.10 and 3.11) and for 64bit Windows.
|
||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), Freqtrade provides these dependencies (in the binary wheel format) for the latest 3 Python versions (3.9, 3.10 and 3.11) and for 64bit Windows.
|
||||
These Wheels are also used by CI running on windows, and are therefore tested together with freqtrade.
|
||||
|
||||
Other versions must be downloaded from the above link.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
""" Freqtrade bot """
|
||||
__version__ = '2023.8'
|
||||
__version__ = '2023.11'
|
||||
|
||||
if 'dev' in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
__main__.py for Freqtrade
|
||||
To launch Freqtrade as a module
|
||||
|
||||
> python -m freqtrade (with Python >= 3.8)
|
||||
> python -m freqtrade (with Python >= 3.9)
|
||||
"""
|
||||
|
||||
from freqtrade import main
|
||||
|
||||
@@ -20,7 +20,8 @@ from freqtrade.commands.list_commands import (start_list_exchanges, start_list_f
|
||||
start_list_timeframes, start_show_trades)
|
||||
from freqtrade.commands.optimize_commands import (start_backtesting, start_backtesting_show,
|
||||
start_edge, start_hyperopt,
|
||||
start_lookahead_analysis)
|
||||
start_lookahead_analysis,
|
||||
start_recursive_analysis)
|
||||
from freqtrade.commands.pairlist_commands import start_test_pairlist
|
||||
from freqtrade.commands.plot_commands import start_plot_dataframe, start_plot_profit
|
||||
from freqtrade.commands.strategy_utils_commands import start_strategy_update
|
||||
|
||||
@@ -65,8 +65,8 @@ ARGS_BUILD_CONFIG = ["config"]
|
||||
|
||||
ARGS_BUILD_STRATEGY = ["user_data_dir", "strategy", "template"]
|
||||
|
||||
ARGS_CONVERT_DATA_TRADES = ["pairs", "format_from_trades", "format_to", "erase", "exchange"]
|
||||
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase", "exchange"]
|
||||
|
||||
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes", "trading_mode", "candle_types"]
|
||||
|
||||
ARGS_CONVERT_TRADES = ["pairs", "timeframes", "exchange", "dataformat_ohlcv", "dataformat_trades"]
|
||||
@@ -122,6 +122,8 @@ ARGS_LOOKAHEAD_ANALYSIS = [
|
||||
a for a in ARGS_BACKTEST if a not in ("position_stacking", "use_max_market_positions", 'cache')
|
||||
] + ["minimum_trade_amount", "targeted_trade_amount", "lookahead_analysis_exportfilename"]
|
||||
|
||||
ARGS_RECURSIVE_ANALYSIS = ["timeframe", "timerange", "dataformat_ohlcv", "pairs", "startup_candle"]
|
||||
|
||||
|
||||
class Arguments:
|
||||
"""
|
||||
@@ -206,8 +208,9 @@ class Arguments:
|
||||
start_list_strategies, start_list_timeframes,
|
||||
start_lookahead_analysis, start_new_config,
|
||||
start_new_strategy, start_plot_dataframe, start_plot_profit,
|
||||
start_show_trades, start_strategy_update,
|
||||
start_test_pairlist, start_trading, start_webserver)
|
||||
start_recursive_analysis, start_show_trades,
|
||||
start_strategy_update, start_test_pairlist, start_trading,
|
||||
start_webserver)
|
||||
|
||||
subparsers = self.parser.add_subparsers(dest='command',
|
||||
# Use custom message when no subhandler is added
|
||||
@@ -265,7 +268,7 @@ class Arguments:
|
||||
parents=[_common_parser],
|
||||
)
|
||||
convert_trade_data_cmd.set_defaults(func=partial(start_convert_data, ohlcv=False))
|
||||
self._build_args(optionlist=ARGS_CONVERT_DATA, parser=convert_trade_data_cmd)
|
||||
self._build_args(optionlist=ARGS_CONVERT_DATA_TRADES, parser=convert_trade_data_cmd)
|
||||
|
||||
# Add trades-to-ohlcv subcommand
|
||||
convert_trade_data_cmd = subparsers.add_parser(
|
||||
@@ -467,3 +470,14 @@ class Arguments:
|
||||
|
||||
self._build_args(optionlist=ARGS_LOOKAHEAD_ANALYSIS,
|
||||
parser=lookahead_analayis_cmd)
|
||||
|
||||
# Add recursive_analysis subcommand
|
||||
recursive_analayis_cmd = subparsers.add_parser(
|
||||
'recursive-analysis',
|
||||
help="Check for potential recursive formula issue.",
|
||||
parents=[_common_parser, _strategy_parser])
|
||||
|
||||
recursive_analayis_cmd.set_defaults(func=start_recursive_analysis)
|
||||
|
||||
self._build_args(optionlist=ARGS_RECURSIVE_ANALYSIS,
|
||||
parser=recursive_analayis_cmd)
|
||||
|
||||
@@ -108,7 +108,6 @@ def ask_user_config() -> Dict[str, Any]:
|
||||
"choices": [
|
||||
"binance",
|
||||
"binanceus",
|
||||
"bittrex",
|
||||
"gate",
|
||||
"huobi",
|
||||
"kraken",
|
||||
|
||||
@@ -421,6 +421,12 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
'desired timeframe as specified as --timeframes/-t.',
|
||||
action='store_true',
|
||||
),
|
||||
"format_from_trades": Arg(
|
||||
'--format-from',
|
||||
help='Source format for data conversion.',
|
||||
choices=constants.AVAILABLE_DATAHANDLERS + ['kraken_csv'],
|
||||
required=True,
|
||||
),
|
||||
"format_from": Arg(
|
||||
'--format-from',
|
||||
help='Source format for data conversion.',
|
||||
@@ -705,4 +711,9 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
help="Use this csv-filename to store lookahead-analysis-results",
|
||||
type=str
|
||||
),
|
||||
"startup_candle": Arg(
|
||||
'--startup-candle',
|
||||
help='Specify startup candles to be checked (`199`, `499`, `999`, `1999`).',
|
||||
nargs='+',
|
||||
),
|
||||
}
|
||||
|
||||
@@ -5,12 +5,12 @@ from typing import Any, Dict
|
||||
|
||||
from freqtrade.configuration import TimeRange, setup_utils_configuration
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, DL_DATA_TIMEFRAMES, Config
|
||||
from freqtrade.data.converter import convert_ohlcv_format, convert_trades_format
|
||||
from freqtrade.data.history import convert_trades_to_ohlcv, download_data_main
|
||||
from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format,
|
||||
convert_trades_to_ohlcv)
|
||||
from freqtrade.data.history import download_data_main
|
||||
from freqtrade.enums import RunMode, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
from freqtrade.util.binance_mig import migrate_binance_futures_data
|
||||
|
||||
@@ -53,28 +53,19 @@ def start_convert_trades(args: Dict[str, Any]) -> None:
|
||||
# Remove stake-currency to skip checks which are not relevant for datadownload
|
||||
config['stake_currency'] = ''
|
||||
|
||||
if 'pairs' not in config:
|
||||
raise OperationalException(
|
||||
"Downloading data requires a list of pairs. "
|
||||
"Please check the documentation on how to configure this.")
|
||||
if 'timeframes' not in config:
|
||||
config['timeframes'] = DL_DATA_TIMEFRAMES
|
||||
|
||||
# Init exchange
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
# Manual validations of relevant settings
|
||||
if not config['exchange'].get('skip_pair_validation', False):
|
||||
exchange.validate_pairs(config['pairs'])
|
||||
expanded_pairs = expand_pairlist(config['pairs'], list(exchange.markets))
|
||||
|
||||
logger.info(f"About to Convert pairs: {expanded_pairs}, "
|
||||
f"intervals: {config['timeframes']} to {config['datadir']}")
|
||||
|
||||
for timeframe in config['timeframes']:
|
||||
exchange.validate_timeframes(timeframe)
|
||||
|
||||
# Convert downloaded trade data to different timeframes
|
||||
convert_trades_to_ohlcv(
|
||||
pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||
pairs=config.get('pairs', []), timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||
data_format_trades=config['dataformat_trades'],
|
||||
@@ -94,7 +85,7 @@ def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
|
||||
erase=args['erase'])
|
||||
else:
|
||||
convert_trades_format(config,
|
||||
convert_from=args['format_from'], convert_to=args['format_to'],
|
||||
convert_from=args['format_from_trades'], convert_to=args['format_to'],
|
||||
erase=args['erase'])
|
||||
|
||||
|
||||
|
||||
@@ -140,7 +140,19 @@ def start_lookahead_analysis(args: Dict[str, Any]) -> None:
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
from freqtrade.optimize.lookahead_analysis_helpers import LookaheadAnalysisSubFunctions
|
||||
from freqtrade.optimize.analysis.lookahead_helpers import LookaheadAnalysisSubFunctions
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
LookaheadAnalysisSubFunctions.start(config)
|
||||
|
||||
|
||||
def start_recursive_analysis(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Start the backtest recursive tester script
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
from freqtrade.optimize.analysis.recursive_helpers import RecursiveAnalysisSubFunctions
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
RecursiveAnalysisSubFunctions.start(config)
|
||||
|
||||
@@ -490,6 +490,9 @@ class Configuration:
|
||||
self._args_to_config(config, argname='lookahead_analysis_exportfilename',
|
||||
logstring='Path to store lookahead-analysis-results: {}')
|
||||
|
||||
self._args_to_config(config, argname='startup_candle',
|
||||
logstring='Startup candle to be used on recursive analysis: {}')
|
||||
|
||||
def _process_runmode(self, config: Config) -> None:
|
||||
|
||||
self._args_to_config(config, argname='dry_run',
|
||||
|
||||
@@ -33,7 +33,7 @@ HYPEROPT_LOSS_BUILTIN = ['ShortTradeDurHyperOptLoss', 'OnlyProfitHyperOptLoss',
|
||||
'MaxDrawDownHyperOptLoss', 'MaxDrawDownRelativeHyperOptLoss',
|
||||
'ProfitDrawDownHyperOptLoss']
|
||||
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList', 'ProducerPairList', 'RemotePairList',
|
||||
'AgeFilter', 'OffsetFilter', 'PerformanceFilter',
|
||||
'AgeFilter', "FullTradesFilter", 'OffsetFilter', 'PerformanceFilter',
|
||||
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
|
||||
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
|
||||
AVAILABLE_PROTECTIONS = ['CooldownPeriod',
|
||||
@@ -77,7 +77,8 @@ DL_DATA_TIMEFRAMES = ['1m', '5m']
|
||||
|
||||
ENV_VAR_PREFIX = 'FREQTRADE__'
|
||||
|
||||
NON_OPEN_EXCHANGE_STATES = ('cancelled', 'canceled', 'closed', 'expired')
|
||||
CANCELED_EXCHANGE_STATES = ('cancelled', 'canceled', 'expired')
|
||||
NON_OPEN_EXCHANGE_STATES = CANCELED_EXCHANGE_STATES + ('closed',)
|
||||
|
||||
# Define decimals per coin for outputs
|
||||
# Only used for outputs.
|
||||
@@ -177,6 +178,11 @@ CONF_SCHEMA = {
|
||||
'minimum_trade_amount': {'type': 'number', 'default': 10},
|
||||
'targeted_trade_amount': {'type': 'number', 'default': 20},
|
||||
'lookahead_analysis_exportfilename': {'type': 'string'},
|
||||
'startup_candle': {
|
||||
'type': 'array',
|
||||
'uniqueItems': True,
|
||||
'default': [199, 399, 499, 999, 1999],
|
||||
},
|
||||
'liquidation_buffer': {'type': 'number', 'minimum': 0.0, 'maximum': 0.99},
|
||||
'backtest_breakdown': {
|
||||
'type': 'array',
|
||||
@@ -688,6 +694,7 @@ CANCEL_REASON = {
|
||||
"CANCELLED_ON_EXCHANGE": "cancelled on exchange",
|
||||
"FORCE_EXIT": "forcesold",
|
||||
"REPLACE": "cancelled to be replaced by new limit order",
|
||||
"REPLACE_FAILED": "failed to replace order, deleting Trade",
|
||||
"USER_CANCEL": "user requested order cancel"
|
||||
}
|
||||
|
||||
@@ -709,3 +716,6 @@ Config = Dict[str, Any]
|
||||
# Exchange part of the configuration.
|
||||
ExchangeConfig = Dict[str, Any]
|
||||
IntOrInf = float
|
||||
|
||||
|
||||
EntryExecuteMode = Literal['initial', 'pos_adjust', 'replace']
|
||||
|
||||
28
freqtrade/data/converter/__init__.py
Normal file
28
freqtrade/data/converter/__init__.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from freqtrade.data.converter.converter import (clean_ohlcv_dataframe, convert_ohlcv_format,
|
||||
ohlcv_fill_up_missing_data, ohlcv_to_dataframe,
|
||||
order_book_to_dataframe, reduce_dataframe_footprint,
|
||||
trim_dataframe, trim_dataframes)
|
||||
from freqtrade.data.converter.trade_converter import (convert_trades_format,
|
||||
convert_trades_to_ohlcv, trades_convert_types,
|
||||
trades_df_remove_duplicates,
|
||||
trades_dict_to_list, trades_list_to_df,
|
||||
trades_to_ohlcv)
|
||||
|
||||
|
||||
__all__ = [
|
||||
'clean_ohlcv_dataframe',
|
||||
'convert_ohlcv_format',
|
||||
'ohlcv_fill_up_missing_data',
|
||||
'ohlcv_to_dataframe',
|
||||
'order_book_to_dataframe',
|
||||
'reduce_dataframe_footprint',
|
||||
'trim_dataframe',
|
||||
'trim_dataframes',
|
||||
'convert_trades_format',
|
||||
'convert_trades_to_ohlcv',
|
||||
'trades_convert_types',
|
||||
'trades_df_remove_duplicates',
|
||||
'trades_dict_to_list',
|
||||
'trades_list_to_df',
|
||||
'trades_to_ohlcv',
|
||||
]
|
||||
@@ -2,14 +2,13 @@
|
||||
Functions to convert data from one format to another
|
||||
"""
|
||||
import logging
|
||||
from typing import Dict, List
|
||||
from typing import Dict
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pandas import DataFrame, to_datetime
|
||||
|
||||
from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TRADES_DTYPES,
|
||||
Config, TradeList)
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, Config
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
|
||||
|
||||
@@ -105,7 +104,7 @@ def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str)
|
||||
df = dataframe.resample(resample_interval, on='date').agg(ohlcv_dict)
|
||||
|
||||
# Forwardfill close for missing columns
|
||||
df['close'] = df['close'].fillna(method='ffill')
|
||||
df['close'] = df['close'].ffill()
|
||||
# Use close for "open, high, low"
|
||||
df.loc[:, ['open', 'high', 'low']] = df[['open', 'high', 'low']].fillna(
|
||||
value={'open': df['close'],
|
||||
@@ -194,97 +193,6 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
|
||||
return frame
|
||||
|
||||
|
||||
def trades_df_remove_duplicates(trades: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
Removes duplicates from the trades DataFrame.
|
||||
Uses pandas.DataFrame.drop_duplicates to remove duplicates based on the 'timestamp' column.
|
||||
:param trades: DataFrame with the columns constants.DEFAULT_TRADES_COLUMNS
|
||||
:return: DataFrame with duplicates removed based on the 'timestamp' column
|
||||
"""
|
||||
return trades.drop_duplicates(subset=['timestamp', 'id'])
|
||||
|
||||
|
||||
def trades_dict_to_list(trades: List[Dict]) -> TradeList:
|
||||
"""
|
||||
Convert fetch_trades result into a List (to be more memory efficient).
|
||||
:param trades: List of trades, as returned by ccxt.fetch_trades.
|
||||
:return: List of Lists, with constants.DEFAULT_TRADES_COLUMNS as columns
|
||||
"""
|
||||
return [[t[col] for col in DEFAULT_TRADES_COLUMNS] for t in trades]
|
||||
|
||||
|
||||
def trades_convert_types(trades: DataFrame) -> DataFrame:
|
||||
"""
|
||||
Convert Trades dtypes and add 'date' column
|
||||
"""
|
||||
trades = trades.astype(TRADES_DTYPES)
|
||||
trades['date'] = to_datetime(trades['timestamp'], unit='ms', utc=True)
|
||||
return trades
|
||||
|
||||
|
||||
def trades_list_to_df(trades: TradeList, convert: bool = True):
|
||||
"""
|
||||
convert trades list to dataframe
|
||||
:param trades: List of Lists with constants.DEFAULT_TRADES_COLUMNS as columns
|
||||
"""
|
||||
if not trades:
|
||||
df = DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
||||
else:
|
||||
df = DataFrame(trades, columns=DEFAULT_TRADES_COLUMNS)
|
||||
|
||||
if convert:
|
||||
df = trades_convert_types(df)
|
||||
|
||||
return df
|
||||
|
||||
|
||||
def trades_to_ohlcv(trades: DataFrame, timeframe: str) -> DataFrame:
|
||||
"""
|
||||
Converts trades list to OHLCV list
|
||||
:param trades: List of trades, as returned by ccxt.fetch_trades.
|
||||
:param timeframe: Timeframe to resample data to
|
||||
:return: OHLCV Dataframe.
|
||||
:raises: ValueError if no trades are provided
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
timeframe_minutes = timeframe_to_minutes(timeframe)
|
||||
if trades.empty:
|
||||
raise ValueError('Trade-list empty.')
|
||||
df = trades.set_index('date', drop=True)
|
||||
|
||||
df_new = df['price'].resample(f'{timeframe_minutes}min').ohlc()
|
||||
df_new['volume'] = df['amount'].resample(f'{timeframe_minutes}min').sum()
|
||||
df_new['date'] = df_new.index
|
||||
# Drop 0 volume rows
|
||||
df_new = df_new.dropna()
|
||||
return df_new.loc[:, DEFAULT_DATAFRAME_COLUMNS]
|
||||
|
||||
|
||||
def convert_trades_format(config: Config, convert_from: str, convert_to: str, erase: bool):
|
||||
"""
|
||||
Convert trades from one format to another format.
|
||||
:param config: Config dictionary
|
||||
:param convert_from: Source format
|
||||
:param convert_to: Target format
|
||||
:param erase: Erase source data (does not apply if source and target format are identical)
|
||||
"""
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
|
||||
if 'pairs' not in config:
|
||||
config['pairs'] = src.trades_get_pairs(config['datadir'])
|
||||
logger.info(f"Converting trades for {config['pairs']}")
|
||||
|
||||
for pair in config['pairs']:
|
||||
data = src.trades_load(pair=pair)
|
||||
logger.info(f"Converting {len(data)} trades for {pair}")
|
||||
trg.trades_store(pair, data)
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source Trade data for {pair}.")
|
||||
src.trades_purge(pair=pair)
|
||||
|
||||
|
||||
def convert_ohlcv_format(
|
||||
config: Config,
|
||||
convert_from: str,
|
||||
155
freqtrade/data/converter/trade_converter.py
Normal file
155
freqtrade/data/converter/trade_converter.py
Normal file
@@ -0,0 +1,155 @@
|
||||
"""
|
||||
Functions to convert data from one format to another
|
||||
"""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import pandas as pd
|
||||
from pandas import DataFrame, to_datetime
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TRADES_DTYPES,
|
||||
Config, TradeList)
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def trades_df_remove_duplicates(trades: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
Removes duplicates from the trades DataFrame.
|
||||
Uses pandas.DataFrame.drop_duplicates to remove duplicates based on the 'timestamp' column.
|
||||
:param trades: DataFrame with the columns constants.DEFAULT_TRADES_COLUMNS
|
||||
:return: DataFrame with duplicates removed based on the 'timestamp' column
|
||||
"""
|
||||
return trades.drop_duplicates(subset=['timestamp', 'id'])
|
||||
|
||||
|
||||
def trades_dict_to_list(trades: List[Dict]) -> TradeList:
|
||||
"""
|
||||
Convert fetch_trades result into a List (to be more memory efficient).
|
||||
:param trades: List of trades, as returned by ccxt.fetch_trades.
|
||||
:return: List of Lists, with constants.DEFAULT_TRADES_COLUMNS as columns
|
||||
"""
|
||||
return [[t[col] for col in DEFAULT_TRADES_COLUMNS] for t in trades]
|
||||
|
||||
|
||||
def trades_convert_types(trades: DataFrame) -> DataFrame:
|
||||
"""
|
||||
Convert Trades dtypes and add 'date' column
|
||||
"""
|
||||
trades = trades.astype(TRADES_DTYPES)
|
||||
trades['date'] = to_datetime(trades['timestamp'], unit='ms', utc=True)
|
||||
return trades
|
||||
|
||||
|
||||
def trades_list_to_df(trades: TradeList, convert: bool = True):
|
||||
"""
|
||||
convert trades list to dataframe
|
||||
:param trades: List of Lists with constants.DEFAULT_TRADES_COLUMNS as columns
|
||||
"""
|
||||
if not trades:
|
||||
df = DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
||||
else:
|
||||
df = DataFrame(trades, columns=DEFAULT_TRADES_COLUMNS)
|
||||
|
||||
if convert:
|
||||
df = trades_convert_types(df)
|
||||
|
||||
return df
|
||||
|
||||
|
||||
def trades_to_ohlcv(trades: DataFrame, timeframe: str) -> DataFrame:
|
||||
"""
|
||||
Converts trades list to OHLCV list
|
||||
:param trades: List of trades, as returned by ccxt.fetch_trades.
|
||||
:param timeframe: Timeframe to resample data to
|
||||
:return: OHLCV Dataframe.
|
||||
:raises: ValueError if no trades are provided
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
timeframe_minutes = timeframe_to_minutes(timeframe)
|
||||
if trades.empty:
|
||||
raise ValueError('Trade-list empty.')
|
||||
df = trades.set_index('date', drop=True)
|
||||
|
||||
df_new = df['price'].resample(f'{timeframe_minutes}min').ohlc()
|
||||
df_new['volume'] = df['amount'].resample(f'{timeframe_minutes}min').sum()
|
||||
df_new['date'] = df_new.index
|
||||
# Drop 0 volume rows
|
||||
df_new = df_new.dropna()
|
||||
return df_new.loc[:, DEFAULT_DATAFRAME_COLUMNS]
|
||||
|
||||
|
||||
def convert_trades_to_ohlcv(
|
||||
pairs: List[str],
|
||||
timeframes: List[str],
|
||||
datadir: Path,
|
||||
timerange: TimeRange,
|
||||
erase: bool = False,
|
||||
data_format_ohlcv: str = 'feather',
|
||||
data_format_trades: str = 'feather',
|
||||
candle_type: CandleType = CandleType.SPOT
|
||||
) -> None:
|
||||
"""
|
||||
Convert stored trades data to ohlcv data
|
||||
"""
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
data_handler_trades = get_datahandler(datadir, data_format=data_format_trades)
|
||||
data_handler_ohlcv = get_datahandler(datadir, data_format=data_format_ohlcv)
|
||||
if not pairs:
|
||||
pairs = data_handler_trades.trades_get_pairs(datadir)
|
||||
|
||||
logger.info(f"About to convert pairs: '{', '.join(pairs)}', "
|
||||
f"intervals: '{', '.join(timeframes)}' to {datadir}")
|
||||
|
||||
for pair in pairs:
|
||||
trades = data_handler_trades.trades_load(pair)
|
||||
for timeframe in timeframes:
|
||||
if erase:
|
||||
if data_handler_ohlcv.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
||||
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
try:
|
||||
ohlcv = trades_to_ohlcv(trades, timeframe)
|
||||
# Store ohlcv
|
||||
data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv, candle_type=candle_type)
|
||||
except ValueError:
|
||||
logger.exception(f'Could not convert {pair} to OHLCV.')
|
||||
|
||||
|
||||
def convert_trades_format(config: Config, convert_from: str, convert_to: str, erase: bool):
|
||||
"""
|
||||
Convert trades from one format to another format.
|
||||
:param config: Config dictionary
|
||||
:param convert_from: Source format
|
||||
:param convert_to: Target format
|
||||
:param erase: Erase source data (does not apply if source and target format are identical)
|
||||
"""
|
||||
if convert_from == 'kraken_csv':
|
||||
if config['exchange']['name'] != 'kraken':
|
||||
raise OperationalException(
|
||||
'Converting from csv is only supported for kraken.'
|
||||
'Please refer to the documentation for details about this special mode.'
|
||||
)
|
||||
from freqtrade.data.converter.trade_converter_kraken import import_kraken_trades_from_csv
|
||||
import_kraken_trades_from_csv(config, convert_to)
|
||||
return
|
||||
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
|
||||
if 'pairs' not in config:
|
||||
config['pairs'] = src.trades_get_pairs(config['datadir'])
|
||||
logger.info(f"Converting trades for {config['pairs']}")
|
||||
|
||||
for pair in config['pairs']:
|
||||
data = src.trades_load(pair=pair)
|
||||
logger.info(f"Converting {len(data)} trades for {pair}")
|
||||
trg.trades_store(pair, data)
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source Trade data for {pair}.")
|
||||
src.trades_purge(pair=pair)
|
||||
70
freqtrade/data/converter/trade_converter_kraken.py
Normal file
70
freqtrade/data/converter/trade_converter_kraken.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, DEFAULT_TRADES_COLUMNS, Config
|
||||
from freqtrade.data.converter.trade_converter import (trades_convert_types,
|
||||
trades_df_remove_duplicates)
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
KRAKEN_CSV_TRADE_COLUMNS = ['timestamp', 'price', 'amount']
|
||||
|
||||
|
||||
def import_kraken_trades_from_csv(config: Config, convert_to: str):
|
||||
"""
|
||||
Import kraken trades from csv
|
||||
"""
|
||||
if config['exchange']['name'] != 'kraken':
|
||||
raise OperationalException('This function is only for the kraken exchange.')
|
||||
|
||||
datadir: Path = config['datadir']
|
||||
data_handler = get_datahandler(datadir, data_format=convert_to)
|
||||
|
||||
tradesdir: Path = config['datadir'] / 'trades_csv'
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
# iterate through directories in this directory
|
||||
data_symbols = {p.stem for p in tradesdir.rglob('*.csv')}
|
||||
|
||||
# create pair/filename mapping
|
||||
markets = {
|
||||
(m['symbol'], m['altname']) for m in exchange.markets.values()
|
||||
if m.get('altname') in data_symbols
|
||||
}
|
||||
logger.info(f"Found csv files for {', '.join(data_symbols)}.")
|
||||
|
||||
for pair, name in markets:
|
||||
dfs = []
|
||||
# Load and combine all csv files for this pair
|
||||
for f in tradesdir.rglob(f"{name}.csv"):
|
||||
df = pd.read_csv(f, names=KRAKEN_CSV_TRADE_COLUMNS)
|
||||
dfs.append(df)
|
||||
|
||||
# Load existing trades data
|
||||
if not dfs:
|
||||
# edgecase, can only happen if the file was deleted between the above glob and here
|
||||
logger.info(f"No data found for pair {pair}")
|
||||
continue
|
||||
|
||||
trades = pd.concat(dfs, ignore_index=True)
|
||||
|
||||
trades.loc[:, 'timestamp'] = trades['timestamp'] * 1e3
|
||||
trades.loc[:, 'cost'] = trades['price'] * trades['amount']
|
||||
for col in DEFAULT_TRADES_COLUMNS:
|
||||
if col not in trades.columns:
|
||||
trades[col] = ''
|
||||
|
||||
trades = trades[DEFAULT_TRADES_COLUMNS]
|
||||
trades = trades_convert_types(trades)
|
||||
|
||||
trades_df = trades_df_remove_duplicates(trades)
|
||||
logger.info(f"{pair}: {len(trades_df)} trades, from "
|
||||
f"{trades_df['date'].min():{DATETIME_PRINT_FORMAT}} to "
|
||||
f"{trades_df['date'].max():{DATETIME_PRINT_FORMAT}}")
|
||||
|
||||
data_handler.trades_store(pair, trades_df)
|
||||
@@ -119,8 +119,15 @@ def _do_group_table_output(bigdf, glist, csv_path: Path, to_csv=False, ):
|
||||
new['avg_win'] = (new['profit_abs_wins'] / new.iloc[:, 1]).fillna(0)
|
||||
new['avg_loss'] = (new['profit_abs_loss'] / new.iloc[:, 2]).fillna(0)
|
||||
|
||||
new.columns = ['total_num_buys', 'wins', 'losses', 'profit_abs_wins', 'profit_abs_loss',
|
||||
'profit_tot', 'wl_ratio_pct', 'avg_win', 'avg_loss']
|
||||
new['exp_ratio'] = (
|
||||
(
|
||||
(1 + (new['avg_win'] / abs(new['avg_loss']))) * (new['wl_ratio_pct'] / 100)
|
||||
) - 1).fillna(0)
|
||||
|
||||
new.columns = ['total_num_buys', 'wins', 'losses',
|
||||
'profit_abs_wins', 'profit_abs_loss',
|
||||
'profit_tot', 'wl_ratio_pct',
|
||||
'avg_win', 'avg_loss', 'exp_ratio']
|
||||
|
||||
sortcols = ['total_num_buys']
|
||||
|
||||
@@ -204,7 +211,9 @@ def prepare_results(analysed_trades, stratname,
|
||||
timerange=None):
|
||||
res_df = pd.DataFrame()
|
||||
for pair, trades in analysed_trades[stratname].items():
|
||||
res_df = pd.concat([res_df, trades], ignore_index=True)
|
||||
if (trades.shape[0] > 0):
|
||||
trades.dropna(subset=['close_date'], inplace=True)
|
||||
res_df = pd.concat([res_df, trades], ignore_index=True)
|
||||
|
||||
res_df = _select_rows_within_dates(res_df, timerange)
|
||||
|
||||
|
||||
@@ -9,9 +9,9 @@ from pandas import DataFrame, concat
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import (DATETIME_PRINT_FORMAT, DEFAULT_DATAFRAME_COLUMNS,
|
||||
DL_DATA_TIMEFRAMES, Config)
|
||||
from freqtrade.data.converter import (clean_ohlcv_dataframe, ohlcv_to_dataframe,
|
||||
trades_df_remove_duplicates, trades_list_to_df,
|
||||
trades_to_ohlcv)
|
||||
from freqtrade.data.converter import (clean_ohlcv_dataframe, convert_trades_to_ohlcv,
|
||||
ohlcv_to_dataframe, trades_df_remove_duplicates,
|
||||
trades_list_to_df)
|
||||
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.exceptions import OperationalException
|
||||
@@ -429,36 +429,6 @@ def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir:
|
||||
return pairs_not_available
|
||||
|
||||
|
||||
def convert_trades_to_ohlcv(
|
||||
pairs: List[str],
|
||||
timeframes: List[str],
|
||||
datadir: Path,
|
||||
timerange: TimeRange,
|
||||
erase: bool = False,
|
||||
data_format_ohlcv: str = 'feather',
|
||||
data_format_trades: str = 'feather',
|
||||
candle_type: CandleType = CandleType.SPOT
|
||||
) -> None:
|
||||
"""
|
||||
Convert stored trades data to ohlcv data
|
||||
"""
|
||||
data_handler_trades = get_datahandler(datadir, data_format=data_format_trades)
|
||||
data_handler_ohlcv = get_datahandler(datadir, data_format=data_format_ohlcv)
|
||||
|
||||
for pair in pairs:
|
||||
trades = data_handler_trades.trades_load(pair)
|
||||
for timeframe in timeframes:
|
||||
if erase:
|
||||
if data_handler_ohlcv.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
||||
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
try:
|
||||
ohlcv = trades_to_ohlcv(trades, timeframe)
|
||||
# Store ohlcv
|
||||
data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv, candle_type=candle_type)
|
||||
except ValueError:
|
||||
logger.exception(f'Could not convert {pair} to OHLCV.')
|
||||
|
||||
|
||||
def get_timerange(data: Dict[str, DataFrame]) -> Tuple[datetime, datetime]:
|
||||
"""
|
||||
Get the maximum common timerange for the given backtest data.
|
||||
|
||||
@@ -4,6 +4,7 @@ from freqtrade.exchange.common import remove_exchange_credentials, MAP_EXCHANGE_
|
||||
from freqtrade.exchange.exchange import Exchange
|
||||
# isort: on
|
||||
from freqtrade.exchange.binance import Binance
|
||||
from freqtrade.exchange.bitmart import Bitmart
|
||||
from freqtrade.exchange.bitpanda import Bitpanda
|
||||
from freqtrade.exchange.bittrex import Bittrex
|
||||
from freqtrade.exchange.bitvavo import Bitvavo
|
||||
|
||||
@@ -21,6 +21,8 @@ class Binance(Exchange):
|
||||
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": True,
|
||||
"stop_price_param": "stopPrice",
|
||||
"stop_price_prop": "stopPrice",
|
||||
"stoploss_order_types": {"limit": "stop_loss_limit"},
|
||||
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
|
||||
"ohlcv_candle_limit": 1000,
|
||||
@@ -121,10 +123,14 @@ class Binance(Exchange):
|
||||
|
||||
def funding_fee_cutoff(self, open_date: datetime):
|
||||
"""
|
||||
Funding fees are only charged at full hours (usually every 4-8h).
|
||||
Therefore a trade opening at 10:00:01 will not be charged a funding fee until the next hour.
|
||||
On binance, this cutoff is 15s.
|
||||
https://github.com/freqtrade/freqtrade/pull/5779#discussion_r740175931
|
||||
:param open_date: The open date for a trade
|
||||
:return: The cutoff open time for when a funding fee is charged
|
||||
:return: True if the date falls on a full hour, False otherwise
|
||||
"""
|
||||
return open_date.minute > 0 or (open_date.minute == 0 and open_date.second > 15)
|
||||
return open_date.minute == 0 and open_date.second < 15
|
||||
|
||||
def dry_run_liquidation_price(
|
||||
self,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
20
freqtrade/exchange/bitmart.py
Normal file
20
freqtrade/exchange/bitmart.py
Normal file
@@ -0,0 +1,20 @@
|
||||
""" Bitmart exchange subclass """
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from freqtrade.exchange import Exchange
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Bitmart(Exchange):
|
||||
"""
|
||||
Bitmart exchange class. Contains adjustments needed for Freqtrade to work
|
||||
with this exchange.
|
||||
"""
|
||||
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": False, # Bitmart API does not support stoploss orders
|
||||
"ohlcv_candle_limit": 200,
|
||||
}
|
||||
@@ -1,16 +1,16 @@
|
||||
""" Bybit exchange subclass """
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import ccxt
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import MarginMode, PriceType, TradingMode
|
||||
from freqtrade.enums.candletype import CandleType
|
||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import DDosProtection, ExchangeError, OperationalException, TemporaryError
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.util.datetime_helpers import dt_now, dt_ts
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -36,6 +36,8 @@ class Bybit(Exchange):
|
||||
"funding_fee_timeframe": "8h",
|
||||
"stoploss_on_exchange": True,
|
||||
"stoploss_order_types": {"limit": "limit", "market": "market"},
|
||||
# bybit response parsing fails to populate stopLossPrice
|
||||
"stop_price_prop": "stopPrice",
|
||||
"stop_price_type_field": "triggerBy",
|
||||
"stop_price_type_value_mapping": {
|
||||
PriceType.LAST: "LastPrice",
|
||||
@@ -200,6 +202,37 @@ class Bybit(Exchange):
|
||||
"""
|
||||
# Bybit does not provide "applied" funding fees per position.
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
return self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
try:
|
||||
return self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
except ExchangeError:
|
||||
logger.warning(f"Could not update funding fees for {pair}.")
|
||||
return 0.0
|
||||
|
||||
def fetch_orders(self, pair: str, since: datetime, params: Optional[Dict] = None) -> List[Dict]:
|
||||
"""
|
||||
Fetch all orders for a pair "since"
|
||||
:param pair: Pair for the query
|
||||
:param since: Starting time for the query
|
||||
"""
|
||||
# On bybit, the distance between since and "until" can't exceed 7 days.
|
||||
# we therefore need to split the query into multiple queries.
|
||||
orders = []
|
||||
|
||||
while since < dt_now():
|
||||
until = since + timedelta(days=7, minutes=-1)
|
||||
orders += super().fetch_orders(pair, since, params={'until': dt_ts(until)})
|
||||
since = until
|
||||
|
||||
return orders
|
||||
|
||||
def fetch_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
order = super().fetch_order(order_id, pair, params)
|
||||
if (
|
||||
order.get('status') == 'canceled'
|
||||
and order.get('filled') == 0.0
|
||||
and order.get('remaining') == 0.0
|
||||
):
|
||||
# Canceled orders will have "remaining=0" on bybit.
|
||||
order['remaining'] = None
|
||||
return order
|
||||
|
||||
@@ -52,7 +52,7 @@ MAP_EXCHANGE_CHILDCLASS = {
|
||||
|
||||
SUPPORTED_EXCHANGES = [
|
||||
'binance',
|
||||
'bittrex',
|
||||
'bitmart',
|
||||
'gate',
|
||||
'huobi',
|
||||
'kraken',
|
||||
|
||||
@@ -23,8 +23,7 @@ from freqtrade.constants import (DEFAULT_AMOUNT_RESERVE_PERCENT, NON_OPEN_EXCHAN
|
||||
BuySell, Config, EntryExit, ExchangeConfig,
|
||||
ListPairsWithTimeframes, MakerTaker, OBLiteral, PairWithTimeframe)
|
||||
from freqtrade.data.converter import clean_ohlcv_dataframe, ohlcv_to_dataframe, trades_dict_to_list
|
||||
from freqtrade.enums import OPTIMIZE_MODES, CandleType, MarginMode, TradingMode
|
||||
from freqtrade.enums.pricetype import PriceType
|
||||
from freqtrade.enums import OPTIMIZE_MODES, CandleType, MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFundsError,
|
||||
InvalidOrderException, OperationalException, PricingError,
|
||||
RetryableOrderError, TemporaryError)
|
||||
@@ -62,7 +61,8 @@ class Exchange:
|
||||
# or by specifying them in the configuration.
|
||||
_ft_has_default: Dict = {
|
||||
"stoploss_on_exchange": False,
|
||||
"stop_price_param": "stopPrice",
|
||||
"stop_price_param": "stopLossPrice", # Used for stoploss_on_exchange request
|
||||
"stop_price_prop": "stopLossPrice", # Used for stoploss_on_exchange response parsing
|
||||
"order_time_in_force": ["GTC"],
|
||||
"ohlcv_params": {},
|
||||
"ohlcv_candle_limit": 500,
|
||||
@@ -486,11 +486,14 @@ class Exchange:
|
||||
except ccxt.BaseError:
|
||||
logger.exception('Unable to initialize markets.')
|
||||
|
||||
def reload_markets(self) -> None:
|
||||
def reload_markets(self, force: bool = False) -> None:
|
||||
"""Reload markets both sync and async if refresh interval has passed """
|
||||
# Check whether markets have to be reloaded
|
||||
if (self._last_markets_refresh > 0) and (
|
||||
self._last_markets_refresh + self.markets_refresh_interval > dt_ts()):
|
||||
if (
|
||||
not force
|
||||
and self._last_markets_refresh > 0
|
||||
and (self._last_markets_refresh + self.markets_refresh_interval > dt_ts())
|
||||
):
|
||||
return None
|
||||
logger.debug("Performing scheduled market reload..")
|
||||
try:
|
||||
@@ -832,7 +835,7 @@ class Exchange:
|
||||
rate: float, leverage: float, params: Dict = {},
|
||||
stop_loss: bool = False) -> Dict[str, Any]:
|
||||
now = dt_now()
|
||||
order_id = f'dry_run_{side}_{now.timestamp()}'
|
||||
order_id = f'dry_run_{side}_{pair}_{now.timestamp()}'
|
||||
# Rounding here must respect to contract sizes
|
||||
_amount = self._contracts_to_amount(
|
||||
pair, self.amount_to_precision(pair, self._amount_to_contracts(pair, amount)))
|
||||
@@ -856,15 +859,15 @@ class Exchange:
|
||||
}
|
||||
if stop_loss:
|
||||
dry_order["info"] = {"stopPrice": dry_order["price"]}
|
||||
dry_order[self._ft_has['stop_price_param']] = dry_order["price"]
|
||||
dry_order[self._ft_has['stop_price_prop']] = dry_order["price"]
|
||||
# Workaround to avoid filling stoploss orders immediately
|
||||
dry_order["ft_order_type"] = "stoploss"
|
||||
orderbook: Optional[OrderBook] = None
|
||||
if self.exchange_has('fetchL2OrderBook'):
|
||||
orderbook = self.fetch_l2_order_book(pair, 20)
|
||||
if ordertype == "limit" and orderbook:
|
||||
# Allow a 3% price difference
|
||||
allowed_diff = 0.03
|
||||
# Allow a 1% price difference
|
||||
allowed_diff = 0.01
|
||||
if self._dry_is_price_crossed(pair, side, rate, orderbook, allowed_diff):
|
||||
logger.info(
|
||||
f"Converted order {pair} to market order due to price {rate} crossing spread "
|
||||
@@ -920,7 +923,7 @@ class Exchange:
|
||||
max_slippage_val = rate * ((1 + slippage) if side == 'buy' else (1 - slippage))
|
||||
|
||||
remaining_amount = amount
|
||||
filled_amount = 0.0
|
||||
filled_value = 0.0
|
||||
book_entry_price = 0.0
|
||||
for book_entry in orderbook[ob_type]:
|
||||
book_entry_price = book_entry[0]
|
||||
@@ -928,17 +931,17 @@ class Exchange:
|
||||
if remaining_amount > 0:
|
||||
if remaining_amount < book_entry_coin_volume:
|
||||
# Orderbook at this slot bigger than remaining amount
|
||||
filled_amount += remaining_amount * book_entry_price
|
||||
filled_value += remaining_amount * book_entry_price
|
||||
break
|
||||
else:
|
||||
filled_amount += book_entry_coin_volume * book_entry_price
|
||||
filled_value += book_entry_coin_volume * book_entry_price
|
||||
remaining_amount -= book_entry_coin_volume
|
||||
else:
|
||||
break
|
||||
else:
|
||||
# If remaining_amount wasn't consumed completely (break was not called)
|
||||
filled_amount += remaining_amount * book_entry_price
|
||||
forecast_avg_filled_price = max(filled_amount, 0) / amount
|
||||
filled_value += remaining_amount * book_entry_price
|
||||
forecast_avg_filled_price = max(filled_value, 0) / amount
|
||||
# Limit max. slippage to specified value
|
||||
if side == 'buy':
|
||||
forecast_avg_filled_price = min(forecast_avg_filled_price, max_slippage_val)
|
||||
@@ -1008,7 +1011,7 @@ class Exchange:
|
||||
from freqtrade.persistence import Order
|
||||
order = Order.order_by_id(order_id)
|
||||
if order:
|
||||
ccxt_order = order.to_ccxt_object(self._ft_has['stop_price_param'])
|
||||
ccxt_order = order.to_ccxt_object(self._ft_has['stop_price_prop'])
|
||||
self._dry_run_open_orders[order_id] = ccxt_order
|
||||
return ccxt_order
|
||||
# Gracefully handle errors with dry-run orders.
|
||||
@@ -1080,6 +1083,13 @@ class Exchange:
|
||||
rate_for_order,
|
||||
params,
|
||||
)
|
||||
if order.get('status') is None:
|
||||
# Map empty status to open.
|
||||
order['status'] = 'open'
|
||||
|
||||
if order.get('type') is None:
|
||||
order['type'] = ordertype
|
||||
|
||||
self._log_exchange_response('create_order', order)
|
||||
order = self._order_contracts_to_amount(order)
|
||||
return order
|
||||
@@ -1109,7 +1119,7 @@ class Exchange:
|
||||
"""
|
||||
if not self._ft_has.get('stoploss_on_exchange'):
|
||||
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
||||
price_param = self._ft_has['stop_price_param']
|
||||
price_param = self._ft_has['stop_price_prop']
|
||||
return (
|
||||
order.get(price_param, None) is None
|
||||
or ((side == "sell" and stop_loss > float(order[price_param])) or
|
||||
@@ -1221,16 +1231,16 @@ class Exchange:
|
||||
return order
|
||||
except ccxt.InsufficientFunds as e:
|
||||
raise InsufficientFundsError(
|
||||
f'Insufficient funds to create {ordertype} sell order on market {pair}. '
|
||||
f'Tried to sell amount {amount} at rate {limit_rate}. '
|
||||
f'Message: {e}') from e
|
||||
except ccxt.InvalidOrder as e:
|
||||
f'Insufficient funds to create {ordertype} {side} order on market {pair}. '
|
||||
f'Tried to {side} amount {amount} at rate {limit_rate} with '
|
||||
f'stop-price {stop_price_norm}. Message: {e}') from e
|
||||
except (ccxt.InvalidOrder, ccxt.BadRequest) as e:
|
||||
# Errors:
|
||||
# `Order would trigger immediately.`
|
||||
raise InvalidOrderException(
|
||||
f'Could not create {ordertype} sell order on market {pair}. '
|
||||
f'Tried to sell amount {amount} at rate {limit_rate}. '
|
||||
f'Message: {e}') from e
|
||||
f'Could not create {ordertype} {side} order on market {pair}. '
|
||||
f'Tried to {side} amount {amount} at rate {limit_rate} with '
|
||||
f'stop-price {stop_price_norm}. Message: {e}') from e
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
@@ -1421,8 +1431,17 @@ class Exchange:
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> List[Dict]:
|
||||
orders = []
|
||||
if self.exchange_has('fetchClosedOrders'):
|
||||
orders = self._api.fetch_closed_orders(pair, since=since_ms)
|
||||
if self.exchange_has('fetchOpenOrders'):
|
||||
orders_open = self._api.fetch_open_orders(pair, since=since_ms)
|
||||
orders.extend(orders_open)
|
||||
return orders
|
||||
|
||||
@retrier(retries=0)
|
||||
def fetch_orders(self, pair: str, since: datetime) -> List[Dict]:
|
||||
def fetch_orders(self, pair: str, since: datetime, params: Optional[Dict] = None) -> List[Dict]:
|
||||
"""
|
||||
Fetch all orders for a pair "since"
|
||||
:param pair: Pair for the query
|
||||
@@ -1431,26 +1450,20 @@ class Exchange:
|
||||
if self._config['dry_run']:
|
||||
return []
|
||||
|
||||
def fetch_orders_emulate() -> List[Dict]:
|
||||
orders = []
|
||||
if self.exchange_has('fetchClosedOrders'):
|
||||
orders = self._api.fetch_closed_orders(pair, since=since_ms)
|
||||
if self.exchange_has('fetchOpenOrders'):
|
||||
orders_open = self._api.fetch_open_orders(pair, since=since_ms)
|
||||
orders.extend(orders_open)
|
||||
return orders
|
||||
|
||||
try:
|
||||
since_ms = int((since.timestamp() - 10) * 1000)
|
||||
|
||||
if self.exchange_has('fetchOrders'):
|
||||
if not params:
|
||||
params = {}
|
||||
try:
|
||||
orders: List[Dict] = self._api.fetch_orders(pair, since=since_ms)
|
||||
orders: List[Dict] = self._api.fetch_orders(pair, since=since_ms, params=params)
|
||||
except ccxt.NotSupported:
|
||||
# Some exchanges don't support fetchOrders
|
||||
# attempt to fetch open and closed orders separately
|
||||
orders = fetch_orders_emulate()
|
||||
orders = self._fetch_orders_emulate(pair, since_ms)
|
||||
else:
|
||||
orders = fetch_orders_emulate()
|
||||
orders = self._fetch_orders_emulate(pair, since_ms)
|
||||
self._log_exchange_response('fetch_orders', orders)
|
||||
orders = [self._order_contracts_to_amount(o) for o in orders]
|
||||
return orders
|
||||
@@ -1486,8 +1499,9 @@ class Exchange:
|
||||
@retrier
|
||||
def fetch_bids_asks(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
|
||||
"""
|
||||
:param symbols: List of symbols to fetch
|
||||
:param cached: Allow cached result
|
||||
:return: fetch_tickers result
|
||||
:return: fetch_bids_asks result
|
||||
"""
|
||||
if not self.exchange_has('fetchBidsAsks'):
|
||||
return {}
|
||||
@@ -1536,6 +1550,12 @@ class Exchange:
|
||||
raise OperationalException(
|
||||
f'Exchange {self._api.name} does not support fetching tickers in batch. '
|
||||
f'Message: {e}') from e
|
||||
except ccxt.BadSymbol as e:
|
||||
logger.warning(f"Could not load tickers due to {e.__class__.__name__}. Message: {e} ."
|
||||
"Reloading markets.")
|
||||
self.reload_markets(True)
|
||||
# Re-raise exception to repeat the call.
|
||||
raise TemporaryError from e
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
@@ -1944,7 +1964,7 @@ class Exchange:
|
||||
|
||||
results = await asyncio.gather(*input_coro, return_exceptions=True)
|
||||
for res in results:
|
||||
if isinstance(res, Exception):
|
||||
if isinstance(res, BaseException):
|
||||
logger.warning(f"Async code raised an exception: {repr(res)}")
|
||||
if raise_:
|
||||
raise
|
||||
@@ -2269,6 +2289,7 @@ class Exchange:
|
||||
|
||||
from_id = t[-1][1]
|
||||
else:
|
||||
logger.debug("Stopping as no more trades were returned.")
|
||||
break
|
||||
except asyncio.CancelledError:
|
||||
logger.debug("Async operation Interrupted, breaking trades DL loop.")
|
||||
@@ -2294,6 +2315,11 @@ class Exchange:
|
||||
try:
|
||||
t = await self._async_fetch_trades(pair, since=since)
|
||||
if t:
|
||||
# No more trades to download available at the exchange,
|
||||
# So we repeatedly get the same trade over and over again.
|
||||
if since == t[-1][0] and len(t) == 1:
|
||||
logger.debug("Stopping because no more trades are available.")
|
||||
break
|
||||
since = t[-1][0]
|
||||
trades.extend(t)
|
||||
# Reached the end of the defined-download period
|
||||
@@ -2302,6 +2328,7 @@ class Exchange:
|
||||
f"Stopping because until was reached. {t[-1][0]} > {until}")
|
||||
break
|
||||
else:
|
||||
logger.debug("Stopping as no more trades were returned.")
|
||||
break
|
||||
except asyncio.CancelledError:
|
||||
logger.debug("Async operation Interrupted, breaking trades DL loop.")
|
||||
@@ -2643,12 +2670,14 @@ class Exchange:
|
||||
"""
|
||||
return 0.0
|
||||
|
||||
def funding_fee_cutoff(self, open_date: datetime):
|
||||
def funding_fee_cutoff(self, open_date: datetime) -> bool:
|
||||
"""
|
||||
Funding fees are only charged at full hours (usually every 4-8h).
|
||||
Therefore a trade opening at 10:00:01 will not be charged a funding fee until the next hour.
|
||||
:param open_date: The open date for a trade
|
||||
:return: The cutoff open time for when a funding fee is charged
|
||||
:return: True if the date falls on a full hour, False otherwise
|
||||
"""
|
||||
return open_date.minute > 0 or open_date.second > 0
|
||||
return open_date.minute == 0 and open_date.second == 0
|
||||
|
||||
@retrier
|
||||
def set_margin_mode(self, pair: str, margin_mode: MarginMode, accept_fail: bool = False,
|
||||
@@ -2696,15 +2725,16 @@ class Exchange:
|
||||
"""
|
||||
|
||||
if self.funding_fee_cutoff(open_date):
|
||||
open_date += timedelta(hours=1)
|
||||
# Shift back to 1h candle to avoid missing funding fees
|
||||
# Only really relevant for trades very close to the full hour
|
||||
open_date = timeframe_to_prev_date('1h', open_date)
|
||||
timeframe = self._ft_has['mark_ohlcv_timeframe']
|
||||
timeframe_ff = self._ft_has.get('funding_fee_timeframe',
|
||||
self._ft_has['mark_ohlcv_timeframe'])
|
||||
|
||||
if not close_date:
|
||||
close_date = datetime.now(timezone.utc)
|
||||
open_timestamp = int(timeframe_to_prev_date(timeframe, open_date).timestamp()) * 1000
|
||||
# close_timestamp = int(close_date.timestamp()) * 1000
|
||||
since_ms = int(timeframe_to_prev_date(timeframe, open_date).timestamp()) * 1000
|
||||
|
||||
mark_comb: PairWithTimeframe = (
|
||||
pair, timeframe, CandleType.from_string(self._ft_has["mark_ohlcv_price"]))
|
||||
@@ -2712,7 +2742,7 @@ class Exchange:
|
||||
funding_comb: PairWithTimeframe = (pair, timeframe_ff, CandleType.FUNDING_RATE)
|
||||
candle_histories = self.refresh_latest_ohlcv(
|
||||
[mark_comb, funding_comb],
|
||||
since_ms=open_timestamp,
|
||||
since_ms=since_ms,
|
||||
cache=False,
|
||||
drop_incomplete=False,
|
||||
)
|
||||
@@ -2723,8 +2753,7 @@ class Exchange:
|
||||
except KeyError:
|
||||
raise ExchangeError("Could not find funding rates.") from None
|
||||
|
||||
funding_mark_rates = self.combine_funding_and_mark(
|
||||
funding_rates=funding_rates, mark_rates=mark_rates)
|
||||
funding_mark_rates = self.combine_funding_and_mark(funding_rates, mark_rates)
|
||||
|
||||
return self.calculate_funding_fees(
|
||||
funding_mark_rates,
|
||||
@@ -2771,7 +2800,7 @@ class Exchange:
|
||||
amount: float,
|
||||
is_short: bool,
|
||||
open_date: datetime,
|
||||
close_date: Optional[datetime] = None,
|
||||
close_date: datetime,
|
||||
time_in_ratio: Optional[float] = None
|
||||
) -> float:
|
||||
"""
|
||||
@@ -2787,8 +2816,8 @@ class Exchange:
|
||||
fees: float = 0
|
||||
|
||||
if not df.empty:
|
||||
df = df[(df['date'] >= open_date) & (df['date'] <= close_date)]
|
||||
fees = sum(df['open_fund'] * df['open_mark'] * amount)
|
||||
df1 = df[(df['date'] >= open_date) & (df['date'] <= close_date)]
|
||||
fees = sum(df1['open_fund'] * df1['open_mark'] * amount)
|
||||
|
||||
# Negate fees for longs as funding_fees expects it this way based on live endpoints.
|
||||
return fees if is_short else -fees
|
||||
@@ -2803,17 +2832,19 @@ class Exchange:
|
||||
:param amount: Trade amount
|
||||
:param open_date: Open date of the trade
|
||||
:return: funding fee since open_date
|
||||
:raises: ExchangeError if something goes wrong.
|
||||
"""
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
if self._config['dry_run']:
|
||||
funding_fees = self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
else:
|
||||
funding_fees = self._get_funding_fees_from_exchange(pair, open_date)
|
||||
return funding_fees
|
||||
else:
|
||||
return 0.0
|
||||
try:
|
||||
if self._config['dry_run']:
|
||||
funding_fees = self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
else:
|
||||
funding_fees = self._get_funding_fees_from_exchange(pair, open_date)
|
||||
return funding_fees
|
||||
except ExchangeError:
|
||||
logger.warning(f"Could not update funding fees for {pair}.")
|
||||
|
||||
return 0.0
|
||||
|
||||
def get_liquidation_price(
|
||||
self,
|
||||
|
||||
@@ -248,6 +248,39 @@ def amount_to_contract_precision(
|
||||
return amount
|
||||
|
||||
|
||||
def __price_to_precision_significant_digits(
|
||||
price: float,
|
||||
price_precision: float,
|
||||
*,
|
||||
rounding_mode: int = ROUND,
|
||||
) -> float:
|
||||
"""
|
||||
Implementation of ROUND_UP/Round_down for significant digits mode.
|
||||
"""
|
||||
from decimal import ROUND_DOWN as dec_ROUND_DOWN
|
||||
from decimal import ROUND_UP as dec_ROUND_UP
|
||||
from decimal import Decimal
|
||||
dec = Decimal(str(price))
|
||||
string = f'{dec:f}'
|
||||
precision = round(price_precision)
|
||||
|
||||
q = precision - dec.adjusted() - 1
|
||||
sigfig = Decimal('10') ** -q
|
||||
if q < 0:
|
||||
string_to_precision = string[:precision]
|
||||
# string_to_precision is '' when we have zero precision
|
||||
below = sigfig * Decimal(string_to_precision if string_to_precision else '0')
|
||||
above = below + sigfig
|
||||
res = above if rounding_mode == ROUND_UP else below
|
||||
precise = f'{res:f}'
|
||||
else:
|
||||
precise = '{:f}'.format(dec.quantize(
|
||||
sigfig,
|
||||
rounding=dec_ROUND_DOWN if rounding_mode == ROUND_DOWN else dec_ROUND_UP)
|
||||
)
|
||||
return float(precise)
|
||||
|
||||
|
||||
def price_to_precision(
|
||||
price: float,
|
||||
price_precision: Optional[float],
|
||||
@@ -271,28 +304,39 @@ def price_to_precision(
|
||||
:return: price rounded up to the precision the Exchange accepts
|
||||
"""
|
||||
if price_precision is not None and precisionMode is not None:
|
||||
if rounding_mode not in (ROUND_UP, ROUND_DOWN):
|
||||
# Use CCXT code where possible.
|
||||
return float(decimal_to_precision(price, rounding_mode=rounding_mode,
|
||||
precision=price_precision,
|
||||
counting_mode=precisionMode
|
||||
))
|
||||
|
||||
if precisionMode == TICK_SIZE:
|
||||
if rounding_mode == ROUND:
|
||||
ticks = price / price_precision
|
||||
rounded_ticks = round(ticks)
|
||||
return rounded_ticks * price_precision
|
||||
precision = FtPrecise(price_precision)
|
||||
price_str = FtPrecise(price)
|
||||
missing = price_str % precision
|
||||
if not missing == FtPrecise("0"):
|
||||
return round(float(str(price_str - missing + precision)), 14)
|
||||
if rounding_mode == ROUND_UP:
|
||||
res = price_str - missing + precision
|
||||
elif rounding_mode == ROUND_DOWN:
|
||||
res = price_str - missing
|
||||
return round(float(str(res)), 14)
|
||||
return price
|
||||
elif precisionMode in (SIGNIFICANT_DIGITS, DECIMAL_PLACES):
|
||||
elif precisionMode == DECIMAL_PLACES:
|
||||
|
||||
ndigits = round(price_precision)
|
||||
if rounding_mode == ROUND:
|
||||
return round(price, ndigits)
|
||||
ticks = price * (10**ndigits)
|
||||
if rounding_mode == ROUND_UP:
|
||||
return ceil(ticks) / (10**ndigits)
|
||||
if rounding_mode == TRUNCATE:
|
||||
return int(ticks) / (10**ndigits)
|
||||
if rounding_mode == ROUND_DOWN:
|
||||
return floor(ticks) / (10**ndigits)
|
||||
|
||||
raise ValueError(f"Unknown rounding_mode {rounding_mode}")
|
||||
elif precisionMode == SIGNIFICANT_DIGITS:
|
||||
if rounding_mode in (ROUND_UP, ROUND_DOWN):
|
||||
return __price_to_precision_significant_digits(
|
||||
price, price_precision, rounding_mode=rounding_mode
|
||||
)
|
||||
|
||||
raise ValueError(f"Unknown precisionMode {precisionMode}")
|
||||
return price
|
||||
|
||||
@@ -25,8 +25,10 @@ class Gate(Exchange):
|
||||
_ft_has: Dict = {
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"order_time_in_force": ['GTC', 'IOC'],
|
||||
"stoploss_order_types": {"limit": "limit"},
|
||||
"stoploss_on_exchange": True,
|
||||
"stoploss_order_types": {"limit": "limit"},
|
||||
"stop_price_param": "stopPrice",
|
||||
"stop_price_prop": "stopPrice",
|
||||
"marketOrderRequiresPrice": True,
|
||||
}
|
||||
|
||||
|
||||
@@ -17,6 +17,8 @@ class Huobi(Exchange):
|
||||
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": True,
|
||||
"stop_price_param": "stopPrice",
|
||||
"stop_price_prop": "stopPrice",
|
||||
"stoploss_order_types": {"limit": "stop-limit"},
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"l2_limit_range": [5, 10, 20],
|
||||
|
||||
@@ -24,6 +24,8 @@ class Kraken(Exchange):
|
||||
_params: Dict = {"trading_agreement": "agree"}
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": True,
|
||||
"stop_price_param": "stopPrice",
|
||||
"stop_price_prop": "stopPrice",
|
||||
"ohlcv_candle_limit": 720,
|
||||
"ohlcv_has_history": False,
|
||||
"trades_pagination": "id",
|
||||
@@ -193,7 +195,7 @@ class Kraken(Exchange):
|
||||
amount: float,
|
||||
is_short: bool,
|
||||
open_date: datetime,
|
||||
close_date: Optional[datetime] = None,
|
||||
close_date: datetime,
|
||||
time_in_ratio: Optional[float] = None
|
||||
) -> float:
|
||||
"""
|
||||
|
||||
@@ -21,6 +21,8 @@ class Kucoin(Exchange):
|
||||
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": True,
|
||||
"stop_price_param": "stopPrice",
|
||||
"stop_price_prop": "stopPrice",
|
||||
"stoploss_order_types": {"limit": "limit", "market": "market"},
|
||||
"l2_limit_range": [20, 100],
|
||||
"l2_limit_range_required": False,
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import ccxt
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
||||
from freqtrade.enums.pricetype import PriceType
|
||||
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import (DDosProtection, OperationalException, RetryableOrderError,
|
||||
TemporaryError)
|
||||
from freqtrade.exchange import Exchange, date_minus_candles
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.misc import safe_value_fallback2
|
||||
from freqtrade.util import dt_now, dt_ts
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -28,7 +29,6 @@ class Okx(Exchange):
|
||||
"funding_fee_timeframe": "8h",
|
||||
"stoploss_order_types": {"limit": "limit"},
|
||||
"stoploss_on_exchange": True,
|
||||
"stop_price_param": "stopLossPrice",
|
||||
}
|
||||
_ft_has_futures: Dict = {
|
||||
"tickers_have_quoteVolume": False,
|
||||
@@ -187,7 +187,7 @@ class Okx(Exchange):
|
||||
|
||||
def _convert_stop_order(self, pair: str, order_id: str, order: Dict) -> Dict:
|
||||
if (
|
||||
order['status'] == 'closed'
|
||||
order.get('status', 'open') == 'closed'
|
||||
and (real_order_id := order.get('info', {}).get('ordId')) is not None
|
||||
):
|
||||
# Once a order triggered, we fetch the regular followup order.
|
||||
@@ -241,3 +241,18 @@ class Okx(Exchange):
|
||||
pair=pair,
|
||||
params=params1,
|
||||
)
|
||||
|
||||
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> List[Dict]:
|
||||
orders = []
|
||||
|
||||
orders = self._api.fetch_closed_orders(pair, since=since_ms)
|
||||
if (since_ms < dt_ts(dt_now() - timedelta(days=6, hours=23))):
|
||||
# Regular fetch_closed_orders only returns 7 days of data.
|
||||
# Force usage of "archive" endpoint, which returns 3 months of data.
|
||||
params = {'method': 'privateGetTradeOrdersHistoryArchive'}
|
||||
orders_hist = self._api.fetch_closed_orders(pair, since=since_ms, params=params)
|
||||
orders.extend(orders_hist)
|
||||
|
||||
orders_open = self._api.fetch_open_orders(pair, since=since_ms)
|
||||
orders.extend(orders_open)
|
||||
return orders
|
||||
|
||||
@@ -159,7 +159,7 @@ class BaseEnvironment(gym.Env):
|
||||
function is designed for tracking incremented objects,
|
||||
events, actions inside the training environment.
|
||||
For example, a user can call this to track the
|
||||
frequency of occurence of an `is_valid` call in
|
||||
frequency of occurrence of an `is_valid` call in
|
||||
their `calculate_reward()`:
|
||||
|
||||
def calculate_reward(self, action: int) -> float:
|
||||
|
||||
@@ -33,7 +33,7 @@ logger = logging.getLogger(__name__)
|
||||
torch.multiprocessing.set_sharing_strategy('file_system')
|
||||
|
||||
SB3_MODELS = ['PPO', 'A2C', 'DQN']
|
||||
SB3_CONTRIB_MODELS = ['TRPO', 'ARS', 'RecurrentPPO', 'MaskablePPO']
|
||||
SB3_CONTRIB_MODELS = ['TRPO', 'ARS', 'RecurrentPPO', 'MaskablePPO', 'QRDQN']
|
||||
|
||||
|
||||
class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
|
||||
@@ -263,23 +263,45 @@ class FreqaiDataDrawer:
|
||||
self.pair_dict[metadata["pair"]] = self.empty_pair_dict.copy()
|
||||
return
|
||||
|
||||
def set_initial_return_values(self, pair: str, pred_df: DataFrame) -> None:
|
||||
def set_initial_return_values(self, pair: str,
|
||||
pred_df: DataFrame,
|
||||
dataframe: DataFrame
|
||||
) -> None:
|
||||
"""
|
||||
Set the initial return values to the historical predictions dataframe. This avoids needing
|
||||
to repredict on historical candles, and also stores historical predictions despite
|
||||
retrainings (so stored predictions are true predictions, not just inferencing on trained
|
||||
data)
|
||||
data).
|
||||
|
||||
We also aim to keep the date from historical predictions so that the FreqUI displays
|
||||
zeros during any downtime (between FreqAI reloads).
|
||||
"""
|
||||
|
||||
hist_df = self.historic_predictions
|
||||
len_diff = len(hist_df[pair].index) - len(pred_df.index)
|
||||
if len_diff < 0:
|
||||
df_concat = pd.concat([pred_df.iloc[:abs(len_diff)], hist_df[pair]],
|
||||
ignore_index=True, keys=hist_df[pair].keys())
|
||||
new_pred = pred_df.copy()
|
||||
# set new_pred values to nans (we want to signal to user that there was nothing
|
||||
# historically made during downtime. The newest pred will get appeneded later in
|
||||
# append_model_predictions)
|
||||
new_pred.iloc[:, :] = np.nan
|
||||
new_pred["date_pred"] = dataframe["date"]
|
||||
hist_preds = self.historic_predictions[pair].copy()
|
||||
|
||||
# find the closest common date between new_pred and historic predictions
|
||||
# and cut off the new_pred dataframe at that date
|
||||
common_dates = pd.merge(new_pred, hist_preds, on="date_pred", how="inner")
|
||||
if len(common_dates.index) > 0:
|
||||
new_pred = new_pred.iloc[len(common_dates):]
|
||||
else:
|
||||
df_concat = hist_df[pair].tail(len(pred_df.index)).reset_index(drop=True)
|
||||
logger.warning("No common dates found between new predictions and historic "
|
||||
"predictions. You likely left your FreqAI instance offline "
|
||||
f"for more than {len(dataframe.index)} candles.")
|
||||
|
||||
df_concat = pd.concat([hist_preds, new_pred], ignore_index=True, keys=hist_preds.keys())
|
||||
|
||||
# any missing values will get zeroed out so users can see the exact
|
||||
# downtime in FreqUI
|
||||
df_concat = df_concat.fillna(0)
|
||||
self.model_return_values[pair] = df_concat
|
||||
self.historic_predictions[pair] = df_concat
|
||||
self.model_return_values[pair] = df_concat.tail(len(dataframe.index)).reset_index(drop=True)
|
||||
|
||||
def append_model_predictions(self, pair: str, predictions: DataFrame,
|
||||
do_preds: NDArray[np.int_],
|
||||
|
||||
@@ -244,6 +244,14 @@ class FreqaiDataKitchen:
|
||||
f"{self.pair}: dropped {len(unfiltered_df) - len(filtered_df)} training points"
|
||||
f" due to NaNs in populated dataset {len(unfiltered_df)}."
|
||||
)
|
||||
if len(unfiltered_df) == 0 and not self.live:
|
||||
raise OperationalException(
|
||||
f"{self.pair}: all training data dropped due to NaNs. "
|
||||
"You likely did not download enough training data prior "
|
||||
"to your backtest timerange. Hint:\n"
|
||||
f"{DOCS_LINK}/freqai-running/"
|
||||
"#downloading-data-to-cover-the-full-backtest-period"
|
||||
)
|
||||
if (1 - len(filtered_df) / len(unfiltered_df)) > 0.1 and self.live:
|
||||
worst_indicator = str(unfiltered_df.count().idxmin())
|
||||
logger.warning(
|
||||
|
||||
@@ -138,7 +138,6 @@ class IFreqaiModel(ABC):
|
||||
:param metadata: pair metadata coming from strategy.
|
||||
:param strategy: Strategy to train on
|
||||
"""
|
||||
|
||||
self.live = strategy.dp.runmode in (RunMode.DRY_RUN, RunMode.LIVE)
|
||||
self.dd.set_pair_dict_info(metadata)
|
||||
self.data_provider = strategy.dp
|
||||
@@ -394,6 +393,11 @@ class IFreqaiModel(ABC):
|
||||
dk: FreqaiDataKitchen = Data management/analysis tool associated to present pair only
|
||||
"""
|
||||
|
||||
if not strategy.process_only_new_candles:
|
||||
raise OperationalException("You are trying to use a FreqAI strategy with "
|
||||
"process_only_new_candles = False. This is not supported "
|
||||
"by FreqAI, and it is therefore aborting.")
|
||||
|
||||
# get the model metadata associated with the current pair
|
||||
(_, trained_timestamp) = self.dd.get_pair_dict_info(metadata["pair"])
|
||||
|
||||
@@ -453,7 +457,7 @@ class IFreqaiModel(ABC):
|
||||
pred_df, do_preds = self.predict(dataframe, dk)
|
||||
if pair not in self.dd.historic_predictions:
|
||||
self.set_initial_historic_predictions(pred_df, dk, pair, dataframe)
|
||||
self.dd.set_initial_return_values(pair, pred_df)
|
||||
self.dd.set_initial_return_values(pair, pred_df, dataframe)
|
||||
|
||||
dk.return_dataframe = self.dd.attach_return_values_to_return_dataframe(pair, dataframe)
|
||||
return
|
||||
@@ -645,11 +649,11 @@ class IFreqaiModel(ABC):
|
||||
If the user reuses an identifier on a subsequent instance,
|
||||
this function will not be called. In that case, "real" predictions
|
||||
will be appended to the loaded set of historic predictions.
|
||||
:param df: DataFrame = the dataframe containing the training feature data
|
||||
:param model: Any = A model which was `fit` using a common library such as
|
||||
catboost or lightgbm
|
||||
:param pred_df: DataFrame = the dataframe containing the predictions coming
|
||||
out of a model
|
||||
:param dk: FreqaiDataKitchen = object containing methods for data analysis
|
||||
:param pair: str = current pair
|
||||
:param strat_df: DataFrame = dataframe coming from strategy
|
||||
"""
|
||||
|
||||
self.dd.historic_predictions[pair] = pred_df
|
||||
|
||||
@@ -27,6 +27,12 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
|
||||
...
|
||||
"freqai": {
|
||||
...
|
||||
"conv_width": 30, // PyTorchTransformer is based on windowing
|
||||
"feature_parameters": {
|
||||
...
|
||||
"include_shifted_candles": 0, // which removes the need for shifted candles
|
||||
...
|
||||
},
|
||||
"model_training_parameters" : {
|
||||
"learning_rate": 3e-4,
|
||||
"trainer_kwargs": {
|
||||
@@ -120,16 +126,16 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
|
||||
# create empty torch tensor
|
||||
self.model.model.eval()
|
||||
yb = torch.empty(0).to(self.device)
|
||||
if x.shape[1] > 1:
|
||||
if x.shape[1] > self.window_size:
|
||||
ws = self.window_size
|
||||
for i in range(0, x.shape[1] - ws):
|
||||
xb = x[:, i:i + ws, :].to(self.device)
|
||||
y = self.model.model(xb)
|
||||
yb = torch.cat((yb, y), dim=0)
|
||||
yb = torch.cat((yb, y), dim=1)
|
||||
else:
|
||||
yb = self.model.model(x)
|
||||
|
||||
yb = yb.cpu().squeeze()
|
||||
yb = yb.cpu().squeeze(0)
|
||||
pred_df = pd.DataFrame(yb.detach().numpy(), columns=dk.label_list)
|
||||
pred_df, _, _ = dk.label_pipeline.inverse_transform(pred_df)
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Type
|
||||
from typing import Any, Dict, List, Optional, Type
|
||||
|
||||
import torch as th
|
||||
from stable_baselines3.common.callbacks import ProgressBarCallback
|
||||
|
||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||
from freqtrade.freqai.RL.Base5ActionRLEnv import Actions, Base5ActionRLEnv, Positions
|
||||
@@ -73,19 +74,27 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
||||
'trained agent.')
|
||||
model = self.dd.model_dictionary[dk.pair]
|
||||
model.set_env(self.train_env)
|
||||
callbacks: List[Any] = [self.eval_callback, self.tensorboard_callback]
|
||||
progressbar_callback: Optional[ProgressBarCallback] = None
|
||||
if self.rl_config.get('progress_bar', False):
|
||||
progressbar_callback = ProgressBarCallback()
|
||||
callbacks.insert(0, progressbar_callback)
|
||||
|
||||
model.learn(
|
||||
total_timesteps=int(total_timesteps),
|
||||
callback=[self.eval_callback, self.tensorboard_callback],
|
||||
progress_bar=self.rl_config.get('progress_bar', False)
|
||||
)
|
||||
try:
|
||||
model.learn(
|
||||
total_timesteps=int(total_timesteps),
|
||||
callback=callbacks,
|
||||
)
|
||||
finally:
|
||||
if progressbar_callback:
|
||||
progressbar_callback.on_training_end()
|
||||
|
||||
if Path(dk.data_path / "best_model.zip").is_file():
|
||||
logger.info('Callback found a best model.')
|
||||
best_model = self.MODELCLASS.load(dk.data_path / "best_model")
|
||||
return best_model
|
||||
|
||||
logger.info('Couldnt find best model, using final model instead.')
|
||||
logger.info("Couldn't find best model, using final model instead.")
|
||||
|
||||
return model
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ from typing import Any, Dict, Type, Union
|
||||
|
||||
from stable_baselines3.common.callbacks import BaseCallback
|
||||
from stable_baselines3.common.logger import HParam
|
||||
from stable_baselines3.common.vec_env import VecEnv
|
||||
|
||||
from freqtrade.freqai.RL.BaseEnvironment import BaseActions
|
||||
|
||||
@@ -13,13 +12,9 @@ class TensorboardCallback(BaseCallback):
|
||||
Custom callback for plotting additional values in tensorboard and
|
||||
episodic summary reports.
|
||||
"""
|
||||
# Override training_env type to fix type errors
|
||||
training_env: Union[VecEnv, None] = None
|
||||
|
||||
def __init__(self, verbose=1, actions: Type[Enum] = BaseActions):
|
||||
super().__init__(verbose)
|
||||
self.model: Any = None
|
||||
self.logger: Any = None
|
||||
self.actions: Type[Enum] = actions
|
||||
|
||||
def _on_training_start(self) -> None:
|
||||
@@ -47,9 +42,13 @@ class TensorboardCallback(BaseCallback):
|
||||
def _on_step(self) -> bool:
|
||||
|
||||
local_info = self.locals["infos"][0]
|
||||
if self.training_env is None:
|
||||
return True
|
||||
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
|
||||
|
||||
if hasattr(self.training_env, 'envs'):
|
||||
tensorboard_metrics = self.training_env.envs[0].unwrapped.tensorboard_metrics
|
||||
|
||||
else:
|
||||
# For RL-multiproc - usage of [0] might need to be evaluated
|
||||
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
|
||||
|
||||
for metric in local_info:
|
||||
if metric not in ["episode", "terminal_observation"]:
|
||||
|
||||
@@ -7,13 +7,14 @@ from copy import deepcopy
|
||||
from datetime import datetime, time, timedelta, timezone
|
||||
from math import isclose
|
||||
from threading import Lock
|
||||
from time import sleep
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from schedule import Scheduler
|
||||
|
||||
from freqtrade import constants
|
||||
from freqtrade.configuration import validate_config_consistency
|
||||
from freqtrade.constants import BuySell, Config, ExchangeConfig, LongShort
|
||||
from freqtrade.constants import BuySell, Config, EntryExecuteMode, ExchangeConfig, LongShort
|
||||
from freqtrade.data.converter import order_book_to_dataframe
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.edge import Edge
|
||||
@@ -21,9 +22,8 @@ from freqtrade.enums import (ExitCheckTuple, ExitType, RPCMessageType, RunMode,
|
||||
State, TradingMode)
|
||||
from freqtrade.exceptions import (DependencyException, ExchangeError, InsufficientFundsError,
|
||||
InvalidOrderException, PricingError)
|
||||
from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, timeframe_to_minutes, timeframe_to_next_date,
|
||||
timeframe_to_seconds)
|
||||
from freqtrade.exchange.common import remove_exchange_credentials
|
||||
from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, remove_exchange_credentials,
|
||||
timeframe_to_minutes, timeframe_to_next_date, timeframe_to_seconds)
|
||||
from freqtrade.misc import safe_value_fallback, safe_value_fallback2
|
||||
from freqtrade.mixins import LoggingMixin
|
||||
from freqtrade.persistence import Order, PairLocks, Trade, init_db
|
||||
@@ -132,7 +132,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# TODO: This would be more efficient if scheduled in utc time, and performed at each
|
||||
# TODO: funding interval, specified by funding_fee_times on the exchange classes
|
||||
for time_slot in range(0, 24):
|
||||
for minutes in [0, 15, 30, 45]:
|
||||
for minutes in [1, 31]:
|
||||
t = str(time(time_slot, minutes, 2))
|
||||
self._schedule.every().day.at(t).do(update)
|
||||
self.last_process: Optional[datetime] = None
|
||||
@@ -199,6 +199,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# Only update open orders on startup
|
||||
# This will update the database after the initial migration
|
||||
self.startup_update_open_orders()
|
||||
self.update_funding_fees()
|
||||
|
||||
def process(self) -> None:
|
||||
"""
|
||||
@@ -312,22 +313,19 @@ class FreqtradeBot(LoggingMixin):
|
||||
open_trades = Trade.get_open_trade_count()
|
||||
return max(0, self.config['max_open_trades'] - open_trades)
|
||||
|
||||
def update_funding_fees(self):
|
||||
def update_funding_fees(self) -> None:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
trades = Trade.get_open_trades()
|
||||
try:
|
||||
for trade in trades:
|
||||
funding_fees = self.exchange.get_funding_fees(
|
||||
trades: List[Trade] = Trade.get_open_trades()
|
||||
for trade in trades:
|
||||
trade.set_funding_fees(
|
||||
self.exchange.get_funding_fees(
|
||||
pair=trade.pair,
|
||||
amount=trade.amount,
|
||||
is_short=trade.is_short,
|
||||
open_date=trade.date_last_filled_utc
|
||||
)
|
||||
trade.funding_fees = funding_fees
|
||||
except ExchangeError:
|
||||
logger.warning("Could not update funding fees for open trades.")
|
||||
open_date=trade.date_last_filled_utc)
|
||||
)
|
||||
|
||||
def startup_backpopulate_precision(self):
|
||||
def startup_backpopulate_precision(self) -> None:
|
||||
|
||||
trades = Trade.get_trades([Trade.contract_size.is_(None)])
|
||||
for trade in trades:
|
||||
@@ -373,15 +371,14 @@ class FreqtradeBot(LoggingMixin):
|
||||
"Order is older than 5 days. Assuming order was fully cancelled.")
|
||||
fo = order.to_ccxt_object()
|
||||
fo['status'] = 'canceled'
|
||||
self.handle_cancel_order(fo, order.trade, constants.CANCEL_REASON['TIMEOUT'])
|
||||
self.handle_cancel_order(
|
||||
fo, order, order.trade, constants.CANCEL_REASON['TIMEOUT']
|
||||
)
|
||||
|
||||
except ExchangeError as e:
|
||||
|
||||
logger.warning(f"Error updating Order {order.order_id} due to {e}")
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
self._schedule.run_pending()
|
||||
|
||||
def update_trades_without_assigned_fees(self) -> None:
|
||||
"""
|
||||
Update closed trades without close fees assigned.
|
||||
@@ -440,13 +437,6 @@ class FreqtradeBot(LoggingMixin):
|
||||
if fo and fo['status'] == 'open':
|
||||
# Assume this as the open stoploss order
|
||||
trade.stoploss_order_id = order.order_id
|
||||
elif order.ft_order_side == trade.exit_side:
|
||||
if fo and fo['status'] == 'open':
|
||||
# Assume this as the open order
|
||||
trade.open_order_id = order.order_id
|
||||
elif order.ft_order_side == trade.entry_side:
|
||||
if fo and fo['status'] == 'open':
|
||||
trade.open_order_id = order.order_id
|
||||
if fo:
|
||||
logger.info(f"Found {order} for trade {trade}.")
|
||||
self.update_trade_state(trade, order.order_id, fo,
|
||||
@@ -461,36 +451,48 @@ class FreqtradeBot(LoggingMixin):
|
||||
Only used balance disappeared, which would make exiting impossible.
|
||||
"""
|
||||
try:
|
||||
orders = self.exchange.fetch_orders(trade.pair, trade.open_date_utc)
|
||||
orders = self.exchange.fetch_orders(
|
||||
trade.pair, trade.open_date_utc - timedelta(seconds=10))
|
||||
prev_exit_reason = trade.exit_reason
|
||||
prev_trade_state = trade.is_open
|
||||
for order in orders:
|
||||
trade_order = [o for o in trade.orders if o.order_id == order['id']]
|
||||
if trade_order:
|
||||
continue
|
||||
logger.info(f"Found previously unknown order {order['id']} for {trade.pair}.")
|
||||
|
||||
order_obj = Order.parse_from_ccxt_object(order, trade.pair, order['side'])
|
||||
order_obj.order_filled_date = datetime.fromtimestamp(
|
||||
safe_value_fallback(order, 'lastTradeTimestamp', 'timestamp') // 1000,
|
||||
tz=timezone.utc)
|
||||
trade.orders.append(order_obj)
|
||||
# TODO: how do we handle open_order_id ...
|
||||
Trade.commit()
|
||||
prev_exit_reason = trade.exit_reason
|
||||
trade.exit_reason = ExitType.SOLD_ON_EXCHANGE.value
|
||||
self.update_trade_state(trade, order['id'], order)
|
||||
if trade_order:
|
||||
# We knew this order, but didn't have it updated properly
|
||||
order_obj = trade_order[0]
|
||||
else:
|
||||
logger.info(f"Found previously unknown order {order['id']} for {trade.pair}.")
|
||||
|
||||
order_obj = Order.parse_from_ccxt_object(order, trade.pair, order['side'])
|
||||
order_obj.order_filled_date = datetime.fromtimestamp(
|
||||
safe_value_fallback(order, 'lastTradeTimestamp', 'timestamp') // 1000,
|
||||
tz=timezone.utc)
|
||||
trade.orders.append(order_obj)
|
||||
Trade.commit()
|
||||
trade.exit_reason = ExitType.SOLD_ON_EXCHANGE.value
|
||||
|
||||
self.update_trade_state(trade, order['id'], order, send_msg=False)
|
||||
|
||||
logger.info(f"handled order {order['id']}")
|
||||
if not trade.is_open:
|
||||
# Trade was just closed
|
||||
trade.close_date = order_obj.order_filled_date
|
||||
Trade.commit()
|
||||
break
|
||||
else:
|
||||
trade.exit_reason = prev_exit_reason
|
||||
Trade.commit()
|
||||
|
||||
# Refresh trade from database
|
||||
Trade.session.refresh(trade)
|
||||
if not trade.is_open:
|
||||
# Trade was just closed
|
||||
trade.close_date = trade.date_last_filled_utc
|
||||
self.order_close_notify(trade, order_obj,
|
||||
order_obj.ft_order_side == 'stoploss',
|
||||
send_msg=prev_trade_state != trade.is_open)
|
||||
else:
|
||||
trade.exit_reason = prev_exit_reason
|
||||
Trade.commit()
|
||||
|
||||
except ExchangeError:
|
||||
logger.warning("Error finding onexchange order")
|
||||
logger.warning("Error finding onexchange order.")
|
||||
except Exception:
|
||||
# catching https://github.com/freqtrade/freqtrade/issues/9025
|
||||
logger.warning("Error finding onexchange order", exc_info=True)
|
||||
#
|
||||
# BUY / enter positions / open trades logic and methods
|
||||
#
|
||||
@@ -612,7 +614,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
# Walk through each pair and check if it needs changes
|
||||
for trade in Trade.get_open_trades():
|
||||
# If there is any open orders, wait for them to finish.
|
||||
if trade.open_order_id is None:
|
||||
# TODO Remove to allow mul open orders
|
||||
if not trade.has_open_orders:
|
||||
# Do a wallets update (will be ratelimited to once per hour)
|
||||
self.wallets.update(False)
|
||||
try:
|
||||
@@ -662,7 +665,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
else:
|
||||
logger.debug("Max adjustment entries is set to unlimited.")
|
||||
self.execute_entry(trade.pair, stake_amount, price=current_entry_rate,
|
||||
trade=trade, is_short=trade.is_short)
|
||||
trade=trade, is_short=trade.is_short, mode='pos_adjust')
|
||||
|
||||
if stake_amount is not None and stake_amount < 0.0:
|
||||
# We should decrease our position
|
||||
@@ -732,7 +735,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
ordertype: Optional[str] = None,
|
||||
enter_tag: Optional[str] = None,
|
||||
trade: Optional[Trade] = None,
|
||||
order_adjust: bool = False,
|
||||
mode: EntryExecuteMode = 'initial',
|
||||
leverage_: Optional[float] = None,
|
||||
) -> bool:
|
||||
"""
|
||||
@@ -740,6 +743,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
:param pair: pair for which we want to create a LIMIT_BUY
|
||||
:param stake_amount: amount of stake-currency for the pair
|
||||
:return: True if a buy order is created, false if it fails.
|
||||
:raise: DependencyException or it's subclasses like ExchangeError.
|
||||
"""
|
||||
time_in_force = self.strategy.order_time_in_force['entry']
|
||||
|
||||
@@ -749,22 +753,25 @@ class FreqtradeBot(LoggingMixin):
|
||||
pos_adjust = trade is not None
|
||||
|
||||
enter_limit_requested, stake_amount, leverage = self.get_valid_enter_price_and_stake(
|
||||
pair, price, stake_amount, trade_side, enter_tag, trade, order_adjust, leverage_,
|
||||
pos_adjust)
|
||||
pair, price, stake_amount, trade_side, enter_tag, trade, mode, leverage_)
|
||||
|
||||
if not stake_amount:
|
||||
return False
|
||||
|
||||
msg = (f"Position adjust: about to create a new order for {pair} with stake: "
|
||||
f"{stake_amount} for {trade}" if pos_adjust
|
||||
msg = (f"Position adjust: about to create a new order for {pair} with stake_amount: "
|
||||
f"{stake_amount} for {trade}" if mode == 'pos_adjust'
|
||||
else
|
||||
f"{name} signal found: about create a new trade for {pair} with stake_amount: "
|
||||
f"{stake_amount} ...")
|
||||
(f"Replacing {side} order: about create a new order for {pair} with stake_amount: "
|
||||
f"{stake_amount} ..."
|
||||
if mode == 'replace' else
|
||||
f"{name} signal found: about create a new trade for {pair} with stake_amount: "
|
||||
f"{stake_amount} ..."
|
||||
))
|
||||
logger.info(msg)
|
||||
amount = (stake_amount / enter_limit_requested) * leverage
|
||||
order_type = ordertype or self.strategy.order_types['entry']
|
||||
|
||||
if not pos_adjust and not strategy_safe_wrapper(
|
||||
if mode == 'initial' and not strategy_safe_wrapper(
|
||||
self.strategy.confirm_trade_entry, default_retval=True)(
|
||||
pair=pair, order_type=order_type, amount=amount, rate=enter_limit_requested,
|
||||
time_in_force=time_in_force, current_time=datetime.now(timezone.utc),
|
||||
@@ -784,7 +791,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
order_obj = Order.parse_from_ccxt_object(order, pair, side, amount, enter_limit_requested)
|
||||
order_id = order['id']
|
||||
order_status = order.get('status')
|
||||
logger.info(f"Order #{order_id} was created for {pair} and status is {order_status}.")
|
||||
logger.info(f"Order {order_id} was created for {pair} and status is {order_status}.")
|
||||
|
||||
# we assume the order is executed at the price requested
|
||||
enter_limit_filled_price = enter_limit_requested
|
||||
@@ -823,14 +830,15 @@ class FreqtradeBot(LoggingMixin):
|
||||
base_currency = self.exchange.get_pair_base_currency(pair)
|
||||
open_date = datetime.now(timezone.utc)
|
||||
|
||||
funding_fees = self.exchange.get_funding_fees(
|
||||
pair=pair,
|
||||
amount=amount + trade.amount if trade else amount,
|
||||
is_short=is_short,
|
||||
open_date=trade.date_last_filled_utc if trade else open_date
|
||||
)
|
||||
|
||||
# This is a new trade
|
||||
if trade is None:
|
||||
funding_fees = 0.0
|
||||
try:
|
||||
funding_fees = self.exchange.get_funding_fees(
|
||||
pair=pair, amount=amount, is_short=is_short, open_date=open_date)
|
||||
except ExchangeError:
|
||||
logger.warning("Could not find funding fee.")
|
||||
|
||||
trade = Trade(
|
||||
pair=pair,
|
||||
@@ -846,7 +854,6 @@ class FreqtradeBot(LoggingMixin):
|
||||
open_rate_requested=enter_limit_requested,
|
||||
open_date=open_date,
|
||||
exchange=self.exchange.id,
|
||||
open_order_id=order_id,
|
||||
strategy=self.strategy.get_strategy_name(),
|
||||
enter_tag=enter_tag,
|
||||
timeframe=timeframe_to_minutes(self.config['timeframe']),
|
||||
@@ -867,7 +874,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
trade.is_open = True
|
||||
trade.fee_open_currency = None
|
||||
trade.open_rate_requested = enter_limit_requested
|
||||
trade.open_order_id = order_id
|
||||
trade.set_funding_fees(funding_fees)
|
||||
|
||||
trade.orders.append(order_obj)
|
||||
trade.recalc_trade_from_orders()
|
||||
@@ -913,9 +920,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
trade_side: LongShort,
|
||||
entry_tag: Optional[str],
|
||||
trade: Optional[Trade],
|
||||
order_adjust: bool,
|
||||
mode: EntryExecuteMode,
|
||||
leverage_: Optional[float],
|
||||
pos_adjust: bool,
|
||||
) -> Tuple[float, float, float]:
|
||||
"""
|
||||
Validate and eventually adjust (within limits) limit, amount and leverage
|
||||
@@ -928,11 +934,12 @@ class FreqtradeBot(LoggingMixin):
|
||||
# Calculate price
|
||||
enter_limit_requested = self.exchange.get_rate(
|
||||
pair, side='entry', is_short=(trade_side == 'short'), refresh=True)
|
||||
if not order_adjust:
|
||||
if mode != 'replace':
|
||||
# Don't call custom_entry_price in order-adjust scenario
|
||||
custom_entry_price = strategy_safe_wrapper(self.strategy.custom_entry_price,
|
||||
default_retval=enter_limit_requested)(
|
||||
pair=pair, current_time=datetime.now(timezone.utc),
|
||||
pair=pair, trade=trade,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
proposed_rate=enter_limit_requested, entry_tag=entry_tag,
|
||||
side=trade_side,
|
||||
)
|
||||
@@ -967,7 +974,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# edge-case for now.
|
||||
min_stake_amount = self.exchange.get_min_pair_stake_amount(
|
||||
pair, enter_limit_requested,
|
||||
self.strategy.stoploss if not pos_adjust else 0.0,
|
||||
self.strategy.stoploss if not mode != 'pos_adjust' else 0.0,
|
||||
leverage)
|
||||
max_stake_amount = self.exchange.get_max_pair_stake_amount(
|
||||
pair, enter_limit_requested, leverage)
|
||||
@@ -1077,7 +1084,11 @@ class FreqtradeBot(LoggingMixin):
|
||||
trades_closed = 0
|
||||
for trade in trades:
|
||||
|
||||
if trade.open_order_id is None and not self.wallets.check_exit_amount(trade):
|
||||
if (
|
||||
not trade.has_open_orders
|
||||
and not trade.stoploss_order_id
|
||||
and not self.wallets.check_exit_amount(trade)
|
||||
):
|
||||
logger.warning(
|
||||
f'Not enough {trade.safe_base_currency} in wallet to exit {trade}. '
|
||||
'Trying to recover.')
|
||||
@@ -1095,7 +1106,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
logger.warning(
|
||||
f'Unable to handle stoploss on exchange for {trade.pair}: {exception}')
|
||||
# Check if we can sell our current pair
|
||||
if trade.open_order_id is None and trade.is_open and self.handle_trade(trade):
|
||||
if not trade.has_open_orders and trade.is_open and self.handle_trade(trade):
|
||||
trades_closed += 1
|
||||
|
||||
except DependencyException as exception:
|
||||
@@ -1214,7 +1225,6 @@ class FreqtradeBot(LoggingMixin):
|
||||
"""
|
||||
|
||||
logger.debug('Handling stoploss on exchange %s ...', trade)
|
||||
|
||||
stoploss_order = None
|
||||
|
||||
try:
|
||||
@@ -1237,7 +1247,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.handle_protections(trade.pair, trade.trade_direction)
|
||||
return True
|
||||
|
||||
if trade.open_order_id or not trade.is_open:
|
||||
if trade.has_open_orders or not trade.is_open:
|
||||
# Trade has an open Buy or Sell order, Stoploss-handling can't happen in this case
|
||||
# as the Amount on the exchange is tied up in another trade.
|
||||
# The trade can be closed already (sell-order fill confirmation came in this iteration)
|
||||
@@ -1321,53 +1331,80 @@ class FreqtradeBot(LoggingMixin):
|
||||
Timeout setting takes priority over limit order adjustment request.
|
||||
:return: None
|
||||
"""
|
||||
for trade in Trade.get_open_order_trades():
|
||||
try:
|
||||
if not trade.open_order_id:
|
||||
for trade in Trade.get_open_trades():
|
||||
open_order: Order
|
||||
for open_order in trade.open_orders:
|
||||
try:
|
||||
order = self.exchange.fetch_order(open_order.order_id, trade.pair)
|
||||
|
||||
except (ExchangeError):
|
||||
logger.info(
|
||||
'Cannot query order for %s due to %s', trade, traceback.format_exc()
|
||||
)
|
||||
continue
|
||||
order = self.exchange.fetch_order(trade.open_order_id, trade.pair)
|
||||
except (ExchangeError):
|
||||
logger.info('Cannot query order for %s due to %s', trade, traceback.format_exc())
|
||||
continue
|
||||
|
||||
fully_cancelled = self.update_trade_state(trade, trade.open_order_id, order)
|
||||
not_closed = order['status'] == 'open' or fully_cancelled
|
||||
order_obj = trade.select_order_by_order_id(trade.open_order_id)
|
||||
fully_cancelled = self.update_trade_state(trade, open_order.order_id, order)
|
||||
not_closed = order['status'] == 'open' or fully_cancelled
|
||||
|
||||
if not_closed:
|
||||
if fully_cancelled or (order_obj and self.strategy.ft_check_timed_out(
|
||||
trade, order_obj, datetime.now(timezone.utc))):
|
||||
self.handle_cancel_order(order, trade, constants.CANCEL_REASON['TIMEOUT'])
|
||||
else:
|
||||
self.replace_order(order, order_obj, trade)
|
||||
if not_closed:
|
||||
if fully_cancelled or (
|
||||
open_order and self.strategy.ft_check_timed_out(
|
||||
trade, open_order, datetime.now(timezone.utc)
|
||||
)
|
||||
):
|
||||
self.handle_cancel_order(
|
||||
order, open_order, trade, constants.CANCEL_REASON['TIMEOUT']
|
||||
)
|
||||
else:
|
||||
self.replace_order(order, open_order, trade)
|
||||
|
||||
def handle_cancel_order(self, order: Dict, trade: Trade, reason: str) -> None:
|
||||
def handle_cancel_order(self, order: Dict, order_obj: Order, trade: Trade, reason: str) -> None:
|
||||
"""
|
||||
Check if current analyzed order timed out and cancel if necessary.
|
||||
:param order: Order dict grabbed with exchange.fetch_order()
|
||||
:param order_obj: Order object from the database.
|
||||
:param trade: Trade object.
|
||||
:return: None
|
||||
"""
|
||||
if order['side'] == trade.entry_side:
|
||||
self.handle_cancel_enter(trade, order, reason)
|
||||
self.handle_cancel_enter(trade, order, order_obj, reason)
|
||||
else:
|
||||
canceled = self.handle_cancel_exit(trade, order, reason)
|
||||
canceled_count = trade.get_exit_order_count()
|
||||
canceled = self.handle_cancel_exit(trade, order, order_obj, reason)
|
||||
canceled_count = trade.get_canceled_exit_order_count()
|
||||
max_timeouts = self.config.get('unfilledtimeout', {}).get('exit_timeout_count', 0)
|
||||
if canceled and max_timeouts > 0 and canceled_count >= max_timeouts:
|
||||
logger.warning(f'Emergency exiting trade {trade}, as the exit order '
|
||||
f'timed out {max_timeouts} times.')
|
||||
self.emergency_exit(trade, order['price'])
|
||||
if (canceled and max_timeouts > 0 and canceled_count >= max_timeouts):
|
||||
logger.warning(f"Emergency exiting trade {trade}, as the exit order "
|
||||
f"timed out {max_timeouts} times. force selling {order['amount']}.")
|
||||
self.emergency_exit(trade, order['price'], order['amount'])
|
||||
|
||||
def emergency_exit(self, trade: Trade, price: float) -> None:
|
||||
def emergency_exit(
|
||||
self, trade: Trade, price: float, sub_trade_amt: Optional[float] = None) -> None:
|
||||
try:
|
||||
self.execute_trade_exit(
|
||||
trade, price,
|
||||
exit_check=ExitCheckTuple(exit_type=ExitType.EMERGENCY_EXIT))
|
||||
exit_check=ExitCheckTuple(exit_type=ExitType.EMERGENCY_EXIT),
|
||||
sub_trade_amt=sub_trade_amt
|
||||
)
|
||||
except DependencyException as exception:
|
||||
logger.warning(
|
||||
f'Unable to emergency exit trade {trade.pair}: {exception}')
|
||||
|
||||
def replace_order_failed(self, trade: Trade, msg: str) -> None:
|
||||
"""
|
||||
Order replacement fail handling.
|
||||
Deletes the trade if necessary.
|
||||
:param trade: Trade object.
|
||||
:param msg: Error message.
|
||||
"""
|
||||
logger.warning(msg)
|
||||
if trade.nr_of_successful_entries == 0:
|
||||
# this is the first entry and we didn't get filled yet, delete trade
|
||||
logger.warning(f"Removing {trade} from database.")
|
||||
self._notify_enter_cancel(
|
||||
trade, order_type=self.strategy.order_types['entry'],
|
||||
reason=constants.CANCEL_REASON['REPLACE_FAILED'])
|
||||
trade.delete()
|
||||
|
||||
def replace_order(self, order: Dict, order_obj: Optional[Order], trade: Trade) -> None:
|
||||
"""
|
||||
Check if current analyzed entry order should be replaced or simply cancelled.
|
||||
@@ -1397,7 +1434,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
trade=trade, order=order_obj, pair=trade.pair,
|
||||
current_time=datetime.now(timezone.utc), proposed_rate=proposed_rate,
|
||||
current_order_rate=order_obj.safe_price, entry_tag=trade.enter_tag,
|
||||
side=trade.entry_side)
|
||||
side=trade.trade_direction)
|
||||
|
||||
replacing = True
|
||||
cancel_reason = constants.CANCEL_REASON['REPLACE']
|
||||
@@ -1406,19 +1443,30 @@ class FreqtradeBot(LoggingMixin):
|
||||
cancel_reason = constants.CANCEL_REASON['USER_CANCEL']
|
||||
if order_obj.price != adjusted_entry_price:
|
||||
# cancel existing order if new price is supplied or None
|
||||
self.handle_cancel_enter(trade, order, cancel_reason,
|
||||
replacing=replacing)
|
||||
res = self.handle_cancel_enter(trade, order, order_obj, cancel_reason,
|
||||
replacing=replacing)
|
||||
if not res:
|
||||
self.replace_order_failed(
|
||||
trade, f"Could not cancel order for {trade}, therefore not replacing.")
|
||||
return
|
||||
if adjusted_entry_price:
|
||||
# place new order only if new price is supplied
|
||||
self.execute_entry(
|
||||
pair=trade.pair,
|
||||
stake_amount=(
|
||||
order_obj.safe_remaining * order_obj.safe_price / trade.leverage),
|
||||
price=adjusted_entry_price,
|
||||
trade=trade,
|
||||
is_short=trade.is_short,
|
||||
order_adjust=True,
|
||||
)
|
||||
try:
|
||||
if not self.execute_entry(
|
||||
pair=trade.pair,
|
||||
stake_amount=(
|
||||
order_obj.safe_remaining * order_obj.safe_price / trade.leverage),
|
||||
price=adjusted_entry_price,
|
||||
trade=trade,
|
||||
is_short=trade.is_short,
|
||||
mode='replace',
|
||||
):
|
||||
self.replace_order_failed(
|
||||
trade, f"Could not replace order for {trade}.")
|
||||
except DependencyException as exception:
|
||||
logger.warning(
|
||||
f'Unable to replace order for {trade.pair}: {exception}')
|
||||
self.replace_order_failed(trade, f"Could not replace order for {trade}.")
|
||||
|
||||
def cancel_all_open_orders(self) -> None:
|
||||
"""
|
||||
@@ -1426,38 +1474,39 @@ class FreqtradeBot(LoggingMixin):
|
||||
:return: None
|
||||
"""
|
||||
|
||||
for trade in Trade.get_open_order_trades():
|
||||
if not trade.open_order_id:
|
||||
continue
|
||||
try:
|
||||
order = self.exchange.fetch_order(trade.open_order_id, trade.pair)
|
||||
except (ExchangeError):
|
||||
logger.info('Cannot query order for %s due to %s', trade, traceback.format_exc())
|
||||
continue
|
||||
for trade in Trade.get_open_trades():
|
||||
for open_order in trade.open_orders:
|
||||
try:
|
||||
order = self.exchange.fetch_order(open_order.order_id, trade.pair)
|
||||
except (ExchangeError):
|
||||
logger.info("Can't query order for %s due to %s", trade, traceback.format_exc())
|
||||
continue
|
||||
|
||||
if order['side'] == trade.entry_side:
|
||||
self.handle_cancel_enter(trade, order, constants.CANCEL_REASON['ALL_CANCELLED'])
|
||||
if order['side'] == trade.entry_side:
|
||||
self.handle_cancel_enter(
|
||||
trade, order, open_order, constants.CANCEL_REASON['ALL_CANCELLED']
|
||||
)
|
||||
|
||||
elif order['side'] == trade.exit_side:
|
||||
self.handle_cancel_exit(trade, order, constants.CANCEL_REASON['ALL_CANCELLED'])
|
||||
elif order['side'] == trade.exit_side:
|
||||
self.handle_cancel_exit(
|
||||
trade, order, open_order, constants.CANCEL_REASON['ALL_CANCELLED']
|
||||
)
|
||||
Trade.commit()
|
||||
|
||||
def handle_cancel_enter(
|
||||
self, trade: Trade, order: Dict, reason: str,
|
||||
replacing: Optional[bool] = False
|
||||
self, trade: Trade, order: Dict, order_obj: Order,
|
||||
reason: str, replacing: Optional[bool] = False
|
||||
) -> bool:
|
||||
"""
|
||||
entry cancel - cancel order
|
||||
:param order_obj: Order object from the database.
|
||||
:param replacing: Replacing order - prevent trade deletion.
|
||||
:return: True if trade was fully cancelled
|
||||
"""
|
||||
was_trade_fully_canceled = False
|
||||
order_id = order_obj.order_id
|
||||
side = trade.entry_side.capitalize()
|
||||
if not trade.open_order_id:
|
||||
logger.warning(f"No open order for {trade}.")
|
||||
return False
|
||||
|
||||
# Cancelled orders may have the status of 'canceled' or 'closed'
|
||||
if order['status'] not in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
filled_val: float = order.get('filled', 0.0) or 0.0
|
||||
filled_stake = filled_val * trade.open_rate
|
||||
@@ -1466,71 +1515,87 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
if filled_val > 0 and minstake and filled_stake < minstake:
|
||||
logger.warning(
|
||||
f"Order {trade.open_order_id} for {trade.pair} not cancelled, "
|
||||
f"Order {order_id} for {trade.pair} not cancelled, "
|
||||
f"as the filled amount of {filled_val} would result in an unexitable trade.")
|
||||
return False
|
||||
corder = self.exchange.cancel_order_with_result(trade.open_order_id, trade.pair,
|
||||
trade.amount)
|
||||
corder = self.exchange.cancel_order_with_result(order_id, trade.pair, trade.amount)
|
||||
order_obj.ft_cancel_reason = reason
|
||||
# if replacing, retry fetching the order 3 times if the status is not what we need
|
||||
if replacing:
|
||||
retry_count = 0
|
||||
while (
|
||||
corder.get('status') not in constants.NON_OPEN_EXCHANGE_STATES
|
||||
and retry_count < 3
|
||||
):
|
||||
sleep(0.5)
|
||||
corder = self.exchange.fetch_order(order_id, trade.pair)
|
||||
retry_count += 1
|
||||
|
||||
# Avoid race condition where the order could not be cancelled coz its already filled.
|
||||
# Simply bailing here is the only safe way - as this order will then be
|
||||
# handled in the next iteration.
|
||||
if corder.get('status') not in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
logger.warning(f"Order {trade.open_order_id} for {trade.pair} not cancelled.")
|
||||
logger.warning(f"Order {order_id} for {trade.pair} not cancelled.")
|
||||
return False
|
||||
else:
|
||||
# Order was cancelled already, so we can reuse the existing dict
|
||||
corder = order
|
||||
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
if order_obj.ft_cancel_reason is None:
|
||||
order_obj.ft_cancel_reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
|
||||
logger.info(f'{side} order {reason} for {trade}.')
|
||||
logger.info(f'{side} order {order_obj.ft_cancel_reason} for {trade}.')
|
||||
|
||||
# Using filled to determine the filled amount
|
||||
filled_amount = safe_value_fallback2(corder, order, 'filled', 'filled')
|
||||
if isclose(filled_amount, 0.0, abs_tol=constants.MATH_CLOSE_PREC):
|
||||
was_trade_fully_canceled = True
|
||||
# if trade is not partially completed and it's the only order, just delete the trade
|
||||
open_order_count = len([order for order in trade.orders if order.status == 'open'])
|
||||
if open_order_count <= 1 and trade.nr_of_successful_entries == 0 and not replacing:
|
||||
open_order_count = len([
|
||||
order for order in trade.orders if order.ft_is_open and order.order_id != order_id
|
||||
])
|
||||
if open_order_count < 1 and trade.nr_of_successful_entries == 0 and not replacing:
|
||||
logger.info(f'{side} order fully cancelled. Removing {trade} from database.')
|
||||
trade.delete()
|
||||
was_trade_fully_canceled = True
|
||||
reason += f", {constants.CANCEL_REASON['FULLY_CANCELLED']}"
|
||||
order_obj.ft_cancel_reason += f", {constants.CANCEL_REASON['FULLY_CANCELLED']}"
|
||||
else:
|
||||
self.update_trade_state(trade, trade.open_order_id, corder)
|
||||
trade.open_order_id = None
|
||||
self.update_trade_state(trade, order_id, corder)
|
||||
logger.info(f'{side} Order timeout for {trade}.')
|
||||
else:
|
||||
# update_trade_state (and subsequently recalc_trade_from_orders) will handle updates
|
||||
# to the trade object
|
||||
self.update_trade_state(trade, trade.open_order_id, corder)
|
||||
trade.open_order_id = None
|
||||
self.update_trade_state(trade, order_id, corder)
|
||||
|
||||
logger.info(f'Partial {trade.entry_side} order timeout for {trade}.')
|
||||
reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}"
|
||||
order_obj.ft_cancel_reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}"
|
||||
|
||||
self.wallets.update()
|
||||
self._notify_enter_cancel(trade, order_type=self.strategy.order_types['entry'],
|
||||
reason=reason)
|
||||
reason=order_obj.ft_cancel_reason)
|
||||
return was_trade_fully_canceled
|
||||
|
||||
def handle_cancel_exit(self, trade: Trade, order: Dict, reason: str) -> bool:
|
||||
def handle_cancel_exit(
|
||||
self, trade: Trade, order: Dict, order_obj: Order, reason: str
|
||||
) -> bool:
|
||||
"""
|
||||
exit order cancel - cancel order and update trade
|
||||
:return: True if exit order was cancelled, false otherwise
|
||||
"""
|
||||
order_id = order_obj.order_id
|
||||
cancelled = False
|
||||
# Cancelled orders may have the status of 'canceled' or 'closed'
|
||||
if order['status'] not in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
filled_val: float = order.get('filled', 0.0) or 0.0
|
||||
filled_rem_stake = trade.stake_amount - filled_val * trade.open_rate
|
||||
filled_amt: float = order.get('filled', 0.0) or 0.0
|
||||
# Filled val is in quote currency (after leverage)
|
||||
filled_rem_stake = trade.stake_amount - (filled_amt * trade.open_rate / trade.leverage)
|
||||
minstake = self.exchange.get_min_pair_stake_amount(
|
||||
trade.pair, trade.open_rate, self.strategy.stoploss)
|
||||
# Double-check remaining amount
|
||||
if filled_val > 0:
|
||||
if filled_amt > 0:
|
||||
reason = constants.CANCEL_REASON['PARTIALLY_FILLED']
|
||||
if minstake and filled_rem_stake < minstake:
|
||||
logger.warning(
|
||||
f"Order {trade.open_order_id} for {trade.pair} not cancelled, as "
|
||||
f"the filled amount of {filled_val} would result in an unexitable trade.")
|
||||
f"Order {order_id} for {trade.pair} not cancelled, as "
|
||||
f"the filled amount of {filled_amt} would result in an unexitable trade.")
|
||||
reason = constants.CANCEL_REASON['PARTIALLY_FILLED_KEEP_OPEN']
|
||||
|
||||
self._notify_exit_cancel(
|
||||
@@ -1540,13 +1605,13 @@ class FreqtradeBot(LoggingMixin):
|
||||
sub_trade=trade.amount != order['amount']
|
||||
)
|
||||
return False
|
||||
|
||||
order_obj.ft_cancel_reason = reason
|
||||
try:
|
||||
order = self.exchange.cancel_order_with_result(
|
||||
order['id'], trade.pair, trade.amount)
|
||||
except InvalidOrderException:
|
||||
logger.exception(
|
||||
f"Could not cancel {trade.exit_side} order {trade.open_order_id}")
|
||||
f"Could not cancel {trade.exit_side} order {order_id}")
|
||||
return False
|
||||
|
||||
# Set exit_reason for fill message
|
||||
@@ -1555,25 +1620,26 @@ class FreqtradeBot(LoggingMixin):
|
||||
# Order might be filled above in odd timing issues.
|
||||
if order.get('status') in ('canceled', 'cancelled'):
|
||||
trade.exit_reason = None
|
||||
trade.open_order_id = None
|
||||
else:
|
||||
trade.exit_reason = exit_reason_prev
|
||||
cancelled = True
|
||||
else:
|
||||
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
if order_obj.ft_cancel_reason is None:
|
||||
order_obj.ft_cancel_reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
trade.exit_reason = None
|
||||
trade.open_order_id = None
|
||||
|
||||
self.update_trade_state(trade, order['id'], order)
|
||||
|
||||
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
|
||||
logger.info(
|
||||
f'{trade.exit_side.capitalize()} order {order_obj.ft_cancel_reason} for {trade}.')
|
||||
trade.close_rate = None
|
||||
trade.close_rate_requested = None
|
||||
|
||||
self._notify_exit_cancel(
|
||||
trade,
|
||||
order_type=self.strategy.order_types['exit'],
|
||||
reason=reason, order_id=order['id'], sub_trade=trade.amount != order['amount']
|
||||
reason=order_obj.ft_cancel_reason, order_id=order['id'],
|
||||
sub_trade=trade.amount != order['amount']
|
||||
)
|
||||
return cancelled
|
||||
|
||||
@@ -1625,15 +1691,13 @@ class FreqtradeBot(LoggingMixin):
|
||||
:param exit_check: CheckTuple with signal and reason
|
||||
:return: True if it succeeds False
|
||||
"""
|
||||
try:
|
||||
trade.funding_fees = self.exchange.get_funding_fees(
|
||||
trade.set_funding_fees(
|
||||
self.exchange.get_funding_fees(
|
||||
pair=trade.pair,
|
||||
amount=trade.amount,
|
||||
is_short=trade.is_short,
|
||||
open_date=trade.date_last_filled_utc,
|
||||
)
|
||||
except ExchangeError:
|
||||
logger.warning("Could not update funding fee.")
|
||||
open_date=trade.date_last_filled_utc)
|
||||
)
|
||||
|
||||
exit_type = 'exit'
|
||||
exit_reason = exit_tag or exit_check.exit_reason
|
||||
@@ -1696,7 +1760,6 @@ class FreqtradeBot(LoggingMixin):
|
||||
order_obj = Order.parse_from_ccxt_object(order, trade.pair, trade.exit_side, amount, limit)
|
||||
trade.orders.append(order_obj)
|
||||
|
||||
trade.open_order_id = order['id']
|
||||
trade.exit_order_status = ''
|
||||
trade.close_rate_requested = limit
|
||||
trade.exit_reason = exit_reason
|
||||
@@ -1704,7 +1767,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
self._notify_exit(trade, order_type, sub_trade=bool(sub_trade_amt), order=order_obj)
|
||||
# In case of market sell orders the order can be closed immediately
|
||||
if order.get('status', 'unknown') in ('closed', 'expired'):
|
||||
self.update_trade_state(trade, trade.open_order_id, order)
|
||||
self.update_trade_state(trade, order_obj.order_id, order)
|
||||
Trade.commit()
|
||||
|
||||
return True
|
||||
@@ -1723,14 +1786,12 @@ class FreqtradeBot(LoggingMixin):
|
||||
amount = order.safe_filled if fill else order.safe_amount
|
||||
order_rate: float = order.safe_price
|
||||
|
||||
profit = trade.calc_profit(rate=order_rate, amount=amount, open_rate=trade.open_rate)
|
||||
profit_ratio = trade.calc_profit_ratio(order_rate, amount, trade.open_rate)
|
||||
profit = trade.calculate_profit(order_rate, amount, trade.open_rate)
|
||||
else:
|
||||
order_rate = trade.safe_close_rate
|
||||
profit = trade.calc_profit(rate=order_rate) + (0.0 if fill else trade.realized_profit)
|
||||
profit_ratio = trade.calc_profit_ratio(order_rate)
|
||||
profit = trade.calculate_profit(rate=order_rate)
|
||||
amount = trade.amount
|
||||
gain = "profit" if profit_ratio > 0 else "loss"
|
||||
gain = "profit" if profit.profit_ratio > 0 else "loss"
|
||||
|
||||
msg: RPCSellMsg = {
|
||||
'type': (RPCMessageType.EXIT_FILL if fill
|
||||
@@ -1748,8 +1809,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
'open_rate': trade.open_rate,
|
||||
'close_rate': order_rate,
|
||||
'current_rate': current_rate,
|
||||
'profit_amount': profit,
|
||||
'profit_ratio': profit_ratio,
|
||||
'profit_amount': profit.profit_abs if fill else profit.total_profit,
|
||||
'profit_ratio': profit.profit_ratio,
|
||||
'buy_tag': trade.enter_tag,
|
||||
'enter_tag': trade.enter_tag,
|
||||
'sell_reason': trade.exit_reason, # Deprecated
|
||||
@@ -1781,11 +1842,10 @@ class FreqtradeBot(LoggingMixin):
|
||||
order = self.order_obj_or_raise(order_id, order_or_none)
|
||||
|
||||
profit_rate: float = trade.safe_close_rate
|
||||
profit_trade = trade.calc_profit(rate=profit_rate)
|
||||
profit = trade.calculate_profit(rate=profit_rate)
|
||||
current_rate = self.exchange.get_rate(
|
||||
trade.pair, side='exit', is_short=trade.is_short, refresh=False)
|
||||
profit_ratio = trade.calc_profit_ratio(profit_rate)
|
||||
gain = "profit" if profit_ratio > 0 else "loss"
|
||||
gain = "profit" if profit.profit_ratio > 0 else "loss"
|
||||
|
||||
msg: RPCSellCancelMsg = {
|
||||
'type': RPCMessageType.EXIT_CANCEL,
|
||||
@@ -1800,8 +1860,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
'amount': order.safe_amount_after_fee,
|
||||
'open_rate': trade.open_rate,
|
||||
'current_rate': current_rate,
|
||||
'profit_amount': profit_trade,
|
||||
'profit_ratio': profit_ratio,
|
||||
'profit_amount': profit.profit_abs,
|
||||
'profit_ratio': profit.profit_ratio,
|
||||
'buy_tag': trade.enter_tag,
|
||||
'enter_tag': trade.enter_tag,
|
||||
'sell_reason': trade.exit_reason, # Deprecated
|
||||
@@ -1831,7 +1891,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
def update_trade_state(
|
||||
self, trade: Trade, order_id: Optional[str],
|
||||
action_order: Optional[Dict[str, Any]] = None,
|
||||
action_order: Optional[Dict[str, Any]] = None, *,
|
||||
stoploss_order: bool = False, send_msg: bool = True) -> bool:
|
||||
"""
|
||||
Checks trades with open orders and updates the amount if necessary
|
||||
@@ -1860,7 +1920,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
if self.exchange.check_order_canceled_empty(order):
|
||||
# Trade has been cancelled on exchange
|
||||
# Handling of this will happen in check_handle_timedout.
|
||||
# Handling of this will happen in handle_cancel_order.
|
||||
return True
|
||||
|
||||
order_obj_or_none = trade.select_order_by_order_id(order_id)
|
||||
@@ -1868,17 +1928,25 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
self.handle_order_fee(trade, order_obj, order)
|
||||
|
||||
trade.update_trade(order_obj)
|
||||
trade.update_trade(order_obj, not send_msg)
|
||||
|
||||
if order.get('status') in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
trade = self._update_trade_after_fill(trade, order_obj)
|
||||
Trade.commit()
|
||||
|
||||
self.order_close_notify(trade, order_obj, stoploss_order, send_msg)
|
||||
|
||||
return False
|
||||
|
||||
def _update_trade_after_fill(self, trade: Trade, order: Order) -> Trade:
|
||||
if order.status in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
# If a entry order was closed, force update on stoploss on exchange
|
||||
if order.get('side') == trade.entry_side:
|
||||
if order.ft_order_side == trade.entry_side:
|
||||
trade = self.cancel_stoploss_on_exchange(trade)
|
||||
if not self.edge:
|
||||
# TODO: should shorting/leverage be supported by Edge,
|
||||
# then this will need to be fixed.
|
||||
trade.adjust_stop_loss(trade.open_rate, self.strategy.stoploss, initial=True)
|
||||
if order.get('side') == trade.entry_side or (trade.amount > 0 and trade.is_open):
|
||||
if order.ft_order_side == trade.entry_side or (trade.amount > 0 and trade.is_open):
|
||||
# Must also run for partial exits
|
||||
# TODO: Margin will need to use interest_rate as well.
|
||||
# interest_rate = self.exchange.get_interest_rate()
|
||||
@@ -1894,13 +1962,16 @@ class FreqtradeBot(LoggingMixin):
|
||||
))
|
||||
except DependencyException:
|
||||
logger.warning('Unable to calculate liquidation price')
|
||||
if self.strategy.use_custom_stoploss:
|
||||
current_rate = self.exchange.get_rate(
|
||||
trade.pair, side='exit', is_short=trade.is_short, refresh=True)
|
||||
profit = trade.calc_profit_ratio(current_rate)
|
||||
self.strategy.ft_stoploss_adjust(current_rate, trade,
|
||||
datetime.now(timezone.utc), profit, 0,
|
||||
after_fill=True)
|
||||
# Updating wallets when order is closed
|
||||
self.wallets.update()
|
||||
Trade.commit()
|
||||
|
||||
self.order_close_notify(trade, order_obj, stoploss_order, send_msg)
|
||||
|
||||
return False
|
||||
return trade
|
||||
|
||||
def order_close_notify(
|
||||
self, trade: Trade, order: Order, stoploss_order: bool, send_msg: bool):
|
||||
@@ -1910,11 +1981,11 @@ class FreqtradeBot(LoggingMixin):
|
||||
trade.amount, abs_tol=constants.MATH_CLOSE_PREC)
|
||||
if order.ft_order_side == trade.exit_side:
|
||||
# Exit notification
|
||||
if send_msg and not stoploss_order and not trade.open_order_id:
|
||||
if send_msg and not stoploss_order and order.order_id not in trade.open_orders_ids:
|
||||
self._notify_exit(trade, '', fill=True, sub_trade=sub_trade, order=order)
|
||||
if not trade.is_open:
|
||||
self.handle_protections(trade.pair, trade.trade_direction)
|
||||
elif send_msg and not trade.open_order_id and not stoploss_order:
|
||||
elif send_msg and order.order_id not in trade.open_orders_ids and not stoploss_order:
|
||||
# Enter fill
|
||||
self._notify_enter(trade, order, order.order_type, fill=True, sub_trade=sub_trade)
|
||||
|
||||
|
||||
@@ -8,15 +8,13 @@ logger = logging.getLogger(__name__)
|
||||
def set_loggers(verbosity: int = 0, api_verbosity: str = 'info') -> None:
|
||||
"""
|
||||
Set the logging level for third party libraries
|
||||
:param verbosity: Verbosity level. amount of `-v` passed to the command line
|
||||
:return: None
|
||||
"""
|
||||
|
||||
logging.getLogger('requests').setLevel(
|
||||
logging.INFO if verbosity <= 1 else logging.DEBUG
|
||||
)
|
||||
logging.getLogger("urllib3").setLevel(
|
||||
logging.INFO if verbosity <= 1 else logging.DEBUG
|
||||
)
|
||||
for logger_name in ('requests', 'urllib3', 'httpcore'):
|
||||
logging.getLogger(logger_name).setLevel(
|
||||
logging.INFO if verbosity <= 1 else logging.DEBUG
|
||||
)
|
||||
logging.getLogger('ccxt.base.exchange').setLevel(
|
||||
logging.INFO if verbosity <= 2 else logging.DEBUG
|
||||
)
|
||||
|
||||
@@ -11,8 +11,8 @@ from freqtrade.util.gc_setup import gc_set_threshold
|
||||
|
||||
|
||||
# check min. python version
|
||||
if sys.version_info < (3, 8): # pragma: no cover
|
||||
sys.exit("Freqtrade requires Python version >= 3.8")
|
||||
if sys.version_info < (3, 9): # pragma: no cover
|
||||
sys.exit("Freqtrade requires Python version >= 3.9")
|
||||
|
||||
from freqtrade import __version__
|
||||
from freqtrade.commands import Arguments
|
||||
|
||||
@@ -3,6 +3,7 @@ Various tool function for Freqtrade and scripts
|
||||
"""
|
||||
import gzip
|
||||
import logging
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterator, List, Mapping, Optional, TextIO, Union
|
||||
from urllib.parse import urlparse
|
||||
@@ -156,7 +157,7 @@ def round_dict(d, n):
|
||||
return {k: (round(v, n) if isinstance(v, float) else v) for k, v in d.items()}
|
||||
|
||||
|
||||
def safe_value_fallback(obj: dict, key1: str, key2: str, default_value=None):
|
||||
def safe_value_fallback(obj: dict, key1: str, key2: Optional[str] = None, default_value=None):
|
||||
"""
|
||||
Search a value in obj, return this if it's not None.
|
||||
Then search key2 in obj - return that if it's not none - then use default_value.
|
||||
@@ -165,7 +166,7 @@ def safe_value_fallback(obj: dict, key1: str, key2: str, default_value=None):
|
||||
if key1 in obj and obj[key1] is not None:
|
||||
return obj[key1]
|
||||
else:
|
||||
if key2 in obj and obj[key2] is not None:
|
||||
if key2 and key2 in obj and obj[key2] is not None:
|
||||
return obj[key2]
|
||||
return default_value
|
||||
|
||||
@@ -231,7 +232,7 @@ def json_to_dataframe(data: str) -> pd.DataFrame:
|
||||
:param data: A JSON string
|
||||
:returns: A pandas DataFrame from the JSON string
|
||||
"""
|
||||
dataframe = pd.read_json(data, orient='split')
|
||||
dataframe = pd.read_json(StringIO(data), orient='split')
|
||||
if 'date' in dataframe.columns:
|
||||
dataframe['date'] = pd.to_datetime(dataframe['date'], unit='ms', utc=True)
|
||||
|
||||
|
||||
0
freqtrade/optimize/analysis/__init__.py
Normal file
0
freqtrade/optimize/analysis/__init__.py
Normal file
@@ -1,35 +1,23 @@
|
||||
import logging
|
||||
import shutil
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.data.history import get_timerange
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
from freqtrade.loggers.set_log_levels import (reduce_verbosity_for_bias_tester,
|
||||
restore_verbosity_for_bias_tester)
|
||||
from freqtrade.optimize.backtesting import Backtesting
|
||||
from freqtrade.optimize.base_analysis import BaseAnalysis, VarHolder
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VarHolder:
|
||||
timerange: TimeRange
|
||||
data: DataFrame
|
||||
indicators: Dict[str, DataFrame]
|
||||
result: DataFrame
|
||||
compared: DataFrame
|
||||
from_dt: datetime
|
||||
to_dt: datetime
|
||||
compared_dt: datetime
|
||||
timeframe: str
|
||||
|
||||
|
||||
class Analysis:
|
||||
def __init__(self) -> None:
|
||||
self.total_signals = 0
|
||||
@@ -39,29 +27,18 @@ class Analysis:
|
||||
self.has_bias = False
|
||||
|
||||
|
||||
class LookaheadAnalysis:
|
||||
class LookaheadAnalysis(BaseAnalysis):
|
||||
|
||||
def __init__(self, config: Dict[str, Any], strategy_obj: Dict):
|
||||
self.failed_bias_check = True
|
||||
self.full_varHolder = VarHolder()
|
||||
|
||||
super().__init__(config, strategy_obj)
|
||||
|
||||
self.entry_varHolders: List[VarHolder] = []
|
||||
self.exit_varHolders: List[VarHolder] = []
|
||||
self.exchange: Optional[Any] = None
|
||||
self._fee = None
|
||||
|
||||
# pull variables the scope of the lookahead_analysis-instance
|
||||
self.local_config = deepcopy(config)
|
||||
self.local_config['strategy'] = strategy_obj['name']
|
||||
self.current_analysis = Analysis()
|
||||
self.minimum_trade_amount = config['minimum_trade_amount']
|
||||
self.targeted_trade_amount = config['targeted_trade_amount']
|
||||
self.strategy_obj = strategy_obj
|
||||
|
||||
@staticmethod
|
||||
def dt_to_timestamp(dt: datetime):
|
||||
timestamp = int(dt.replace(tzinfo=timezone.utc).timestamp())
|
||||
return timestamp
|
||||
|
||||
@staticmethod
|
||||
def get_result(backtesting: Backtesting, processed: DataFrame):
|
||||
@@ -117,8 +94,8 @@ class LookaheadAnalysis:
|
||||
# compare_df now comprises tuples with [1] having either 'self' or 'other'
|
||||
if 'other' in col_name[1]:
|
||||
continue
|
||||
self_value = compare_df_row[col_idx]
|
||||
other_value = compare_df_row[col_idx + 1]
|
||||
self_value = compare_df_row.iloc[col_idx]
|
||||
other_value = compare_df_row.iloc[col_idx + 1]
|
||||
|
||||
# output differences
|
||||
if self_value != other_value:
|
||||
@@ -162,24 +139,6 @@ class LookaheadAnalysis:
|
||||
varholder.indicators = backtesting.strategy.advise_all_indicators(varholder.data)
|
||||
varholder.result = self.get_result(backtesting, varholder.indicators)
|
||||
|
||||
def fill_full_varholder(self):
|
||||
self.full_varHolder = VarHolder()
|
||||
|
||||
# define datetime in human-readable format
|
||||
parsed_timerange = TimeRange.parse_timerange(self.local_config['timerange'])
|
||||
|
||||
if parsed_timerange.startdt is None:
|
||||
self.full_varHolder.from_dt = datetime.fromtimestamp(0, tz=timezone.utc)
|
||||
else:
|
||||
self.full_varHolder.from_dt = parsed_timerange.startdt
|
||||
|
||||
if parsed_timerange.stopdt is None:
|
||||
self.full_varHolder.to_dt = datetime.utcnow()
|
||||
else:
|
||||
self.full_varHolder.to_dt = parsed_timerange.stopdt
|
||||
|
||||
self.prepare_data(self.full_varHolder, self.local_config['pairs'])
|
||||
|
||||
def fill_entry_and_exit_varHolders(self, result_row):
|
||||
# entry_varHolder
|
||||
entry_varHolder = VarHolder()
|
||||
@@ -246,8 +205,7 @@ class LookaheadAnalysis:
|
||||
|
||||
def start(self) -> None:
|
||||
|
||||
# first make a single backtest
|
||||
self.fill_full_varholder()
|
||||
super().start()
|
||||
|
||||
reduce_verbosity_for_bias_tester()
|
||||
|
||||
@@ -7,7 +7,7 @@ import pandas as pd
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.optimize.lookahead_analysis import LookaheadAnalysis
|
||||
from freqtrade.optimize.analysis.lookahead import LookaheadAnalysis
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
|
||||
|
||||
@@ -184,12 +184,12 @@ class LookaheadAnalysisSubFunctions:
|
||||
|
||||
lookaheadAnalysis_instances = []
|
||||
|
||||
# unify --strategy and --strategy_list to one list
|
||||
# unify --strategy and --strategy-list to one list
|
||||
if not (strategy_list := config.get('strategy_list', [])):
|
||||
if config.get('strategy') is None:
|
||||
raise OperationalException(
|
||||
"No Strategy specified. Please specify a strategy via --strategy or "
|
||||
"--strategy_list"
|
||||
"--strategy-list"
|
||||
)
|
||||
strategy_list = [config['strategy']]
|
||||
|
||||
@@ -211,5 +211,5 @@ class LookaheadAnalysisSubFunctions:
|
||||
else:
|
||||
logger.error("There were no strategies specified neither through "
|
||||
"--strategy nor through "
|
||||
"--strategy_list "
|
||||
"--strategy-list "
|
||||
"or timeframe was not specified.")
|
||||
183
freqtrade/optimize/analysis/recursive.py
Normal file
183
freqtrade/optimize/analysis/recursive.py
Normal file
@@ -0,0 +1,183 @@
|
||||
import logging
|
||||
import shutil
|
||||
from copy import deepcopy
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
from freqtrade.loggers.set_log_levels import (reduce_verbosity_for_bias_tester,
|
||||
restore_verbosity_for_bias_tester)
|
||||
from freqtrade.optimize.backtesting import Backtesting
|
||||
from freqtrade.optimize.base_analysis import BaseAnalysis, VarHolder
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RecursiveAnalysis(BaseAnalysis):
|
||||
|
||||
def __init__(self, config: Dict[str, Any], strategy_obj: Dict):
|
||||
|
||||
self._startup_candle = config.get('startup_candle', [199, 399, 499, 999, 1999])
|
||||
|
||||
super().__init__(config, strategy_obj)
|
||||
|
||||
self.partial_varHolder_array: List[VarHolder] = []
|
||||
self.partial_varHolder_lookahead_array: List[VarHolder] = []
|
||||
|
||||
self.dict_recursive: Dict[str, Any] = dict()
|
||||
|
||||
# For recursive bias check
|
||||
# analyzes two data frames with processed indicators and shows differences between them.
|
||||
def analyze_indicators(self):
|
||||
|
||||
pair_to_check = self.local_config['pairs'][0]
|
||||
logger.info("Start checking for recursive bias")
|
||||
|
||||
# check and report signals
|
||||
base_last_row = self.full_varHolder.indicators[pair_to_check].iloc[-1]
|
||||
|
||||
for part in self.partial_varHolder_array:
|
||||
part_last_row = part.indicators[pair_to_check].iloc[-1]
|
||||
|
||||
compare_df = base_last_row.compare(part_last_row)
|
||||
if compare_df.shape[0] > 0:
|
||||
# print(compare_df)
|
||||
for col_name, values in compare_df.items():
|
||||
# print(col_name)
|
||||
if 'other' == col_name:
|
||||
continue
|
||||
indicators = values.index
|
||||
|
||||
for indicator in indicators:
|
||||
if (indicator not in self.dict_recursive):
|
||||
self.dict_recursive[indicator] = {}
|
||||
|
||||
values_diff = compare_df.loc[indicator]
|
||||
values_diff_self = values_diff.loc['self']
|
||||
values_diff_other = values_diff.loc['other']
|
||||
diff = (values_diff_other - values_diff_self) / values_diff_self * 100
|
||||
|
||||
self.dict_recursive[indicator][part.startup_candle] = f"{diff:.3f}%"
|
||||
|
||||
else:
|
||||
logger.info("No variance on indicator(s) found due to recursive formula.")
|
||||
break
|
||||
|
||||
# For lookahead bias check
|
||||
# analyzes two data frames with processed indicators and shows differences between them.
|
||||
def analyze_indicators_lookahead(self):
|
||||
|
||||
pair_to_check = self.local_config['pairs'][0]
|
||||
logger.info("Start checking for lookahead bias on indicators only")
|
||||
|
||||
part = self.partial_varHolder_lookahead_array[0]
|
||||
part_last_row = part.indicators[pair_to_check].iloc[-1]
|
||||
date_to_check = part_last_row['date']
|
||||
index_to_get = (self.full_varHolder.indicators[pair_to_check]['date'] == date_to_check)
|
||||
base_row_check = self.full_varHolder.indicators[pair_to_check].loc[index_to_get].iloc[-1]
|
||||
|
||||
check_time = part.to_dt.strftime('%Y-%m-%dT%H:%M:%S')
|
||||
|
||||
logger.info(f"Check indicators at {check_time}")
|
||||
# logger.info(f"vs {part_timerange} with {part.startup_candle} startup candle")
|
||||
|
||||
compare_df = base_row_check.compare(part_last_row)
|
||||
if compare_df.shape[0] > 0:
|
||||
# print(compare_df)
|
||||
for col_name, values in compare_df.items():
|
||||
# print(col_name)
|
||||
if 'other' == col_name:
|
||||
continue
|
||||
indicators = values.index
|
||||
|
||||
for indicator in indicators:
|
||||
logger.info(f"=> found lookahead in indicator {indicator}")
|
||||
# logger.info("base value {:.5f}".format(values_diff_self))
|
||||
# logger.info("part value {:.5f}".format(values_diff_other))
|
||||
|
||||
else:
|
||||
logger.info("No lookahead bias on indicators found.")
|
||||
|
||||
def prepare_data(self, varholder: VarHolder, pairs_to_load: List[DataFrame]):
|
||||
|
||||
if 'freqai' in self.local_config and 'identifier' in self.local_config['freqai']:
|
||||
# purge previous data if the freqai model is defined
|
||||
# (to be sure nothing is carried over from older backtests)
|
||||
path_to_current_identifier = (
|
||||
Path(f"{self.local_config['user_data_dir']}/models/"
|
||||
f"{self.local_config['freqai']['identifier']}").resolve())
|
||||
# remove folder and its contents
|
||||
if Path.exists(path_to_current_identifier):
|
||||
shutil.rmtree(path_to_current_identifier)
|
||||
|
||||
prepare_data_config = deepcopy(self.local_config)
|
||||
prepare_data_config['timerange'] = (str(self.dt_to_timestamp(varholder.from_dt)) + "-" +
|
||||
str(self.dt_to_timestamp(varholder.to_dt)))
|
||||
prepare_data_config['exchange']['pair_whitelist'] = pairs_to_load
|
||||
|
||||
backtesting = Backtesting(prepare_data_config, self.exchange)
|
||||
self.exchange = backtesting.exchange
|
||||
backtesting._set_strategy(backtesting.strategylist[0])
|
||||
|
||||
varholder.data, varholder.timerange = backtesting.load_bt_data()
|
||||
backtesting.load_bt_data_detail()
|
||||
varholder.timeframe = backtesting.timeframe
|
||||
|
||||
varholder.indicators = backtesting.strategy.advise_all_indicators(varholder.data)
|
||||
|
||||
def fill_partial_varholder(self, start_date, startup_candle):
|
||||
logger.info(f"Calculating indicators using startup candle of {startup_candle}.")
|
||||
partial_varHolder = VarHolder()
|
||||
|
||||
partial_varHolder.from_dt = start_date
|
||||
partial_varHolder.to_dt = self.full_varHolder.to_dt
|
||||
partial_varHolder.startup_candle = startup_candle
|
||||
|
||||
self.local_config['startup_candle_count'] = startup_candle
|
||||
|
||||
self.prepare_data(partial_varHolder, self.local_config['pairs'])
|
||||
|
||||
self.partial_varHolder_array.append(partial_varHolder)
|
||||
|
||||
def fill_partial_varholder_lookahead(self, end_date):
|
||||
logger.info("Calculating indicators to test lookahead on indicators.")
|
||||
|
||||
partial_varHolder = VarHolder()
|
||||
|
||||
partial_varHolder.from_dt = self.full_varHolder.from_dt
|
||||
partial_varHolder.to_dt = end_date
|
||||
|
||||
self.prepare_data(partial_varHolder, self.local_config['pairs'])
|
||||
|
||||
self.partial_varHolder_lookahead_array.append(partial_varHolder)
|
||||
|
||||
def start(self) -> None:
|
||||
|
||||
super().start()
|
||||
|
||||
reduce_verbosity_for_bias_tester()
|
||||
start_date_full = self.full_varHolder.from_dt
|
||||
end_date_full = self.full_varHolder.to_dt
|
||||
|
||||
timeframe_minutes = timeframe_to_minutes(self.full_varHolder.timeframe)
|
||||
|
||||
end_date_partial = start_date_full + timedelta(minutes=int(timeframe_minutes * 10))
|
||||
|
||||
self.fill_partial_varholder_lookahead(end_date_partial)
|
||||
|
||||
# restore_verbosity_for_bias_tester()
|
||||
|
||||
start_date_partial = end_date_full - timedelta(minutes=int(timeframe_minutes))
|
||||
|
||||
for startup_candle in self._startup_candle:
|
||||
self.fill_partial_varholder(start_date_partial, int(startup_candle))
|
||||
|
||||
# Restore verbosity, so it's not too quiet for the next strategy
|
||||
restore_verbosity_for_bias_tester()
|
||||
|
||||
self.analyze_indicators()
|
||||
self.analyze_indicators_lookahead()
|
||||
106
freqtrade/optimize/analysis/recursive_helpers.py
Normal file
106
freqtrade/optimize/analysis/recursive_helpers.py
Normal file
@@ -0,0 +1,106 @@
|
||||
import logging
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.optimize.analysis.recursive import RecursiveAnalysis
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RecursiveAnalysisSubFunctions:
|
||||
|
||||
@staticmethod
|
||||
def text_table_recursive_analysis_instances(
|
||||
recursive_instances: List[RecursiveAnalysis]):
|
||||
startups = recursive_instances[0]._startup_candle
|
||||
headers = ['indicators']
|
||||
for candle in startups:
|
||||
headers.append(candle)
|
||||
|
||||
data = []
|
||||
for inst in recursive_instances:
|
||||
if len(inst.dict_recursive) > 0:
|
||||
for indicator, values in inst.dict_recursive.items():
|
||||
temp_data = [indicator]
|
||||
for candle in startups:
|
||||
temp_data.append(values.get(int(candle), '-'))
|
||||
data.append(temp_data)
|
||||
|
||||
if len(data) > 0:
|
||||
from tabulate import tabulate
|
||||
table = tabulate(data, headers=headers, tablefmt="orgtbl")
|
||||
print(table)
|
||||
return table, headers, data
|
||||
|
||||
return None, None, data
|
||||
|
||||
@staticmethod
|
||||
def calculate_config_overrides(config: Config):
|
||||
if 'timerange' not in config:
|
||||
# setting a timerange is enforced here
|
||||
raise OperationalException(
|
||||
"Please set a timerange. "
|
||||
"A timerange of 5000 candles are enough for recursive analysis."
|
||||
)
|
||||
|
||||
if config.get('backtest_cache') is None:
|
||||
config['backtest_cache'] = 'none'
|
||||
elif config['backtest_cache'] != 'none':
|
||||
logger.info(f"backtest_cache = "
|
||||
f"{config['backtest_cache']} detected. "
|
||||
f"Inside recursive-analysis it is enforced to be 'none'. "
|
||||
f"Changed it to 'none'")
|
||||
config['backtest_cache'] = 'none'
|
||||
return config
|
||||
|
||||
@staticmethod
|
||||
def initialize_single_recursive_analysis(config: Config, strategy_obj: Dict[str, Any]):
|
||||
|
||||
logger.info(f"Recursive test of {Path(strategy_obj['location']).name} started.")
|
||||
start = time.perf_counter()
|
||||
current_instance = RecursiveAnalysis(config, strategy_obj)
|
||||
current_instance.start()
|
||||
elapsed = time.perf_counter() - start
|
||||
logger.info(f"Checking recursive and indicator-only lookahead bias of indicators "
|
||||
f"of {Path(strategy_obj['location']).name} "
|
||||
f"took {elapsed:.0f} seconds.")
|
||||
return current_instance
|
||||
|
||||
@staticmethod
|
||||
def start(config: Config):
|
||||
config = RecursiveAnalysisSubFunctions.calculate_config_overrides(config)
|
||||
|
||||
strategy_objs = StrategyResolver.search_all_objects(
|
||||
config, enum_failed=False, recursive=config.get('recursive_strategy_search', False))
|
||||
|
||||
RecursiveAnalysis_instances = []
|
||||
|
||||
# unify --strategy and --strategy-list to one list
|
||||
if not (strategy_list := config.get('strategy_list', [])):
|
||||
if config.get('strategy') is None:
|
||||
raise OperationalException(
|
||||
"No Strategy specified. Please specify a strategy via --strategy"
|
||||
)
|
||||
strategy_list = [config['strategy']]
|
||||
|
||||
# check if strategies can be properly loaded, only check them if they can be.
|
||||
for strat in strategy_list:
|
||||
for strategy_obj in strategy_objs:
|
||||
if strategy_obj['name'] == strat and strategy_obj not in strategy_list:
|
||||
RecursiveAnalysis_instances.append(
|
||||
RecursiveAnalysisSubFunctions.initialize_single_recursive_analysis(
|
||||
config, strategy_obj))
|
||||
break
|
||||
|
||||
# report the results
|
||||
if RecursiveAnalysis_instances:
|
||||
RecursiveAnalysisSubFunctions.text_table_recursive_analysis_instances(
|
||||
RecursiveAnalysis_instances)
|
||||
else:
|
||||
logger.error("There was no strategy specified through --strategy "
|
||||
"or timeframe was not specified.")
|
||||
@@ -116,6 +116,7 @@ class Backtesting:
|
||||
raise OperationalException("Timeframe needs to be set in either "
|
||||
"configuration or as cli argument `--timeframe 5m`")
|
||||
self.timeframe = str(self.config.get('timeframe'))
|
||||
self.disable_database_use()
|
||||
self.timeframe_min = timeframe_to_minutes(self.timeframe)
|
||||
self.init_backtest_detail()
|
||||
self.pairlists = PairListManager(self.exchange, self.config, self.dataprovider)
|
||||
@@ -318,13 +319,16 @@ class Backtesting:
|
||||
else:
|
||||
self.futures_data = {}
|
||||
|
||||
def disable_database_use(self):
|
||||
PairLocks.use_db = False
|
||||
PairLocks.timeframe = self.timeframe
|
||||
Trade.use_db = False
|
||||
|
||||
def prepare_backtest(self, enable_protections):
|
||||
"""
|
||||
Backtesting setup method - called once for every call to "backtest()".
|
||||
"""
|
||||
PairLocks.use_db = False
|
||||
PairLocks.timeframe = self.config['timeframe']
|
||||
Trade.use_db = False
|
||||
self.disable_database_use()
|
||||
PairLocks.reset_locks()
|
||||
Trade.reset_trades()
|
||||
self.rejected_trades = 0
|
||||
@@ -521,10 +525,10 @@ class Backtesting:
|
||||
# This should not be reached...
|
||||
return row[OPEN_IDX]
|
||||
|
||||
def _get_adjust_trade_entry_for_candle(self, trade: LocalTrade, row: Tuple
|
||||
) -> LocalTrade:
|
||||
def _get_adjust_trade_entry_for_candle(
|
||||
self, trade: LocalTrade, row: Tuple, current_time: datetime
|
||||
) -> LocalTrade:
|
||||
current_rate = row[OPEN_IDX]
|
||||
current_date = row[DATE_IDX].to_pydatetime()
|
||||
current_profit = trade.calc_profit_ratio(current_rate)
|
||||
min_stake = self.exchange.get_min_pair_stake_amount(trade.pair, current_rate, -0.1)
|
||||
max_stake = self.exchange.get_max_pair_stake_amount(trade.pair, current_rate)
|
||||
@@ -532,7 +536,7 @@ class Backtesting:
|
||||
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
|
||||
default_retval=None, supress_error=True)(
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
current_time=current_date, current_rate=current_rate,
|
||||
current_time=current_time, current_rate=current_rate,
|
||||
current_profit=current_profit, min_stake=min_stake,
|
||||
max_stake=min(max_stake, stake_available),
|
||||
current_entry_rate=current_rate, current_exit_rate=current_rate,
|
||||
@@ -565,10 +569,10 @@ class Backtesting:
|
||||
# Remaining stake is too low to be sold.
|
||||
return trade
|
||||
exit_ = ExitCheckTuple(ExitType.PARTIAL_EXIT)
|
||||
pos_trade = self._get_exit_for_signal(trade, row, exit_, amount)
|
||||
pos_trade = self._get_exit_for_signal(trade, row, exit_, current_time, amount)
|
||||
if pos_trade is not None:
|
||||
order = pos_trade.orders[-1]
|
||||
if self._try_close_open_order(order, trade, current_date, row):
|
||||
if self._try_close_open_order(order, trade, current_time, row):
|
||||
trade.recalc_trade_from_orders()
|
||||
self.wallets.update()
|
||||
return pos_trade
|
||||
@@ -579,6 +583,11 @@ class Backtesting:
|
||||
""" Rate is within candle, therefore filled"""
|
||||
return row[LOW_IDX] <= rate <= row[HIGH_IDX]
|
||||
|
||||
def _call_adjust_stop(self, current_date: datetime, trade: LocalTrade, current_rate: float):
|
||||
profit = trade.calc_profit_ratio(current_rate)
|
||||
self.strategy.ft_stoploss_adjust(current_rate, trade, # type: ignore
|
||||
current_date, profit, 0, after_fill=True)
|
||||
|
||||
def _try_close_open_order(
|
||||
self, order: Optional[Order], trade: LocalTrade, current_date: datetime,
|
||||
row: Tuple) -> bool:
|
||||
@@ -588,17 +597,29 @@ class Backtesting:
|
||||
"""
|
||||
if order and self._get_order_filled(order.ft_price, row):
|
||||
order.close_bt_order(current_date, trade)
|
||||
trade.open_order_id = None
|
||||
if not (order.ft_order_side == trade.exit_side and order.safe_amount == trade.amount):
|
||||
# trade is still open
|
||||
trade.set_liquidation_price(self.exchange.get_liquidation_price(
|
||||
pair=trade.pair,
|
||||
open_rate=trade.open_rate,
|
||||
is_short=trade.is_short,
|
||||
amount=trade.amount,
|
||||
stake_amount=trade.stake_amount,
|
||||
leverage=trade.leverage,
|
||||
wallet_balance=trade.stake_amount,
|
||||
))
|
||||
self._call_adjust_stop(current_date, trade, order.ft_price)
|
||||
# pass
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_exit_for_signal(
|
||||
self, trade: LocalTrade, row: Tuple, exit_: ExitCheckTuple,
|
||||
current_time: datetime,
|
||||
amount: Optional[float] = None) -> Optional[LocalTrade]:
|
||||
|
||||
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
if exit_.exit_flag:
|
||||
trade.close_date = exit_candle_time
|
||||
trade.close_date = current_time
|
||||
exit_reason = exit_.exit_reason
|
||||
amount_ = amount if amount is not None else trade.amount
|
||||
trade_dur = int((trade.close_date_utc - trade.open_date_utc).total_seconds() // 60)
|
||||
@@ -626,10 +647,10 @@ class Backtesting:
|
||||
default_retval=close_rate)(
|
||||
pair=trade.pair,
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
current_time=exit_candle_time,
|
||||
current_time=current_time,
|
||||
proposed_rate=close_rate, current_profit=current_profit,
|
||||
exit_tag=exit_reason)
|
||||
if rate != close_rate:
|
||||
if rate is not None and rate != close_rate:
|
||||
close_rate = price_to_precision(rate, trade.price_precision,
|
||||
self.precision_mode)
|
||||
# We can't place orders lower than current low.
|
||||
@@ -652,7 +673,7 @@ class Backtesting:
|
||||
time_in_force=time_in_force,
|
||||
sell_reason=exit_reason, # deprecated
|
||||
exit_reason=exit_reason,
|
||||
current_time=exit_candle_time)):
|
||||
current_time=current_time)):
|
||||
return None
|
||||
|
||||
trade.exit_reason = exit_reason
|
||||
@@ -693,21 +714,24 @@ class Backtesting:
|
||||
trade.orders.append(order)
|
||||
return trade
|
||||
|
||||
def _check_trade_exit(self, trade: LocalTrade, row: Tuple) -> Optional[LocalTrade]:
|
||||
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
def _check_trade_exit(
|
||||
self, trade: LocalTrade, row: Tuple, current_time: datetime
|
||||
) -> Optional[LocalTrade]:
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
trade.funding_fees = self.exchange.calculate_funding_fees(
|
||||
self.futures_data[trade.pair],
|
||||
amount=trade.amount,
|
||||
is_short=trade.is_short,
|
||||
open_date=trade.date_last_filled_utc,
|
||||
close_date=exit_candle_time,
|
||||
trade.set_funding_fees(
|
||||
self.exchange.calculate_funding_fees(
|
||||
self.futures_data[trade.pair],
|
||||
amount=trade.amount,
|
||||
is_short=trade.is_short,
|
||||
open_date=trade.date_last_filled_utc,
|
||||
close_date=current_time
|
||||
)
|
||||
)
|
||||
|
||||
# Check if we need to adjust our current positions
|
||||
if self.strategy.position_adjustment_enable:
|
||||
trade = self._get_adjust_trade_entry_for_candle(trade, row)
|
||||
trade = self._get_adjust_trade_entry_for_candle(trade, row, current_time)
|
||||
|
||||
enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX]
|
||||
exit_sig = row[ESHORT_IDX] if trade.is_short else row[ELONG_IDX]
|
||||
@@ -717,7 +741,7 @@ class Backtesting:
|
||||
low=row[LOW_IDX], high=row[HIGH_IDX]
|
||||
)
|
||||
for exit_ in exits:
|
||||
t = self._get_exit_for_signal(trade, row, exit_)
|
||||
t = self._get_exit_for_signal(trade, row, exit_, current_time)
|
||||
if t:
|
||||
return t
|
||||
return None
|
||||
@@ -731,13 +755,15 @@ class Backtesting:
|
||||
if order_type == 'limit':
|
||||
new_rate = strategy_safe_wrapper(self.strategy.custom_entry_price,
|
||||
default_retval=propose_rate)(
|
||||
pair=pair, current_time=current_time,
|
||||
pair=pair,
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
current_time=current_time,
|
||||
proposed_rate=propose_rate, entry_tag=entry_tag,
|
||||
side=direction,
|
||||
) # default value is the open rate
|
||||
# We can't place orders higher than current high (otherwise it'd be a stop limit entry)
|
||||
# which freqtrade does not support in live.
|
||||
if new_rate != propose_rate:
|
||||
if new_rate is not None and new_rate != propose_rate:
|
||||
propose_rate = price_to_precision(new_rate, price_precision,
|
||||
self.precision_mode)
|
||||
if direction == "short":
|
||||
@@ -854,7 +880,6 @@ class Backtesting:
|
||||
self.trade_id_counter += 1
|
||||
trade = LocalTrade(
|
||||
id=self.trade_id_counter,
|
||||
open_order_id=self.order_id_counter,
|
||||
pair=pair,
|
||||
base_currency=base_currency,
|
||||
stake_currency=self.config['stake_currency'],
|
||||
@@ -882,16 +907,6 @@ class Backtesting:
|
||||
|
||||
trade.adjust_stop_loss(trade.open_rate, self.strategy.stoploss, initial=True)
|
||||
|
||||
trade.set_liquidation_price(self.exchange.get_liquidation_price(
|
||||
pair=pair,
|
||||
open_rate=propose_rate,
|
||||
amount=amount,
|
||||
stake_amount=trade.stake_amount,
|
||||
leverage=trade.leverage,
|
||||
wallet_balance=trade.stake_amount,
|
||||
is_short=is_short,
|
||||
))
|
||||
|
||||
order = Order(
|
||||
id=self.order_id_counter,
|
||||
ft_trade_id=trade.id,
|
||||
@@ -916,8 +931,7 @@ class Backtesting:
|
||||
)
|
||||
order._trade_bt = trade
|
||||
trade.orders.append(order)
|
||||
if not self._try_close_open_order(order, trade, current_time, row):
|
||||
trade.open_order_id = str(self.order_id_counter)
|
||||
self._try_close_open_order(order, trade, current_time, row)
|
||||
trade.recalc_trade_from_orders()
|
||||
|
||||
return trade
|
||||
@@ -929,7 +943,7 @@ class Backtesting:
|
||||
"""
|
||||
for pair in open_trades.keys():
|
||||
for trade in list(open_trades[pair]):
|
||||
if trade.open_order_id and trade.nr_of_successful_entries == 0:
|
||||
if trade.has_open_orders and trade.nr_of_successful_entries == 0:
|
||||
# Ignore trade if entry-order did not fill yet
|
||||
continue
|
||||
exit_row = data[pair][-1]
|
||||
@@ -1006,13 +1020,11 @@ class Backtesting:
|
||||
else:
|
||||
# Close additional entry order
|
||||
del trade.orders[trade.orders.index(order)]
|
||||
trade.open_order_id = None
|
||||
return False
|
||||
if order.side == trade.exit_side:
|
||||
self.timedout_exit_orders += 1
|
||||
# Close exit order and retry exiting on next signal.
|
||||
del trade.orders[trade.orders.index(order)]
|
||||
trade.open_order_id = None
|
||||
return False
|
||||
return None
|
||||
|
||||
@@ -1040,7 +1052,6 @@ class Backtesting:
|
||||
return False
|
||||
else:
|
||||
del trade.orders[trade.orders.index(order)]
|
||||
trade.open_order_id = None
|
||||
self.canceled_entry_orders += 1
|
||||
|
||||
# place new order if result was not None
|
||||
@@ -1051,7 +1062,7 @@ class Backtesting:
|
||||
order.safe_remaining * order.ft_price / trade.leverage),
|
||||
direction='short' if trade.is_short else 'long')
|
||||
# Delete trade if no successful entries happened (if placing the new order failed)
|
||||
if trade.open_order_id is None and trade.nr_of_successful_entries == 0:
|
||||
if not trade.has_open_orders and trade.nr_of_successful_entries == 0:
|
||||
return True
|
||||
self.replaced_entry_orders += 1
|
||||
else:
|
||||
@@ -1136,8 +1147,8 @@ class Backtesting:
|
||||
self.wallets.update()
|
||||
|
||||
# 4. Create exit orders (if any)
|
||||
if not trade.open_order_id:
|
||||
self._check_trade_exit(trade, row) # Place exit order if necessary
|
||||
if not trade.has_open_orders:
|
||||
self._check_trade_exit(trade, row, current_time) # Place exit order if necessary
|
||||
|
||||
# 5. Process exit orders.
|
||||
order = trade.select_order(trade.exit_side, is_open=True)
|
||||
|
||||
66
freqtrade/optimize/base_analysis.py
Normal file
66
freqtrade/optimize/base_analysis.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VarHolder:
|
||||
timerange: TimeRange
|
||||
data: DataFrame
|
||||
indicators: Dict[str, DataFrame]
|
||||
result: DataFrame
|
||||
compared: DataFrame
|
||||
from_dt: datetime
|
||||
to_dt: datetime
|
||||
compared_dt: datetime
|
||||
timeframe: str
|
||||
startup_candle: int
|
||||
|
||||
|
||||
class BaseAnalysis:
|
||||
|
||||
def __init__(self, config: Dict[str, Any], strategy_obj: Dict):
|
||||
self.failed_bias_check = True
|
||||
self.full_varHolder = VarHolder()
|
||||
self.exchange: Optional[Any] = None
|
||||
self._fee = None
|
||||
|
||||
# pull variables the scope of the lookahead_analysis-instance
|
||||
self.local_config = deepcopy(config)
|
||||
self.local_config['strategy'] = strategy_obj['name']
|
||||
self.strategy_obj = strategy_obj
|
||||
|
||||
@staticmethod
|
||||
def dt_to_timestamp(dt: datetime):
|
||||
timestamp = int(dt.replace(tzinfo=timezone.utc).timestamp())
|
||||
return timestamp
|
||||
|
||||
def fill_full_varholder(self):
|
||||
self.full_varHolder = VarHolder()
|
||||
|
||||
# define datetime in human-readable format
|
||||
parsed_timerange = TimeRange.parse_timerange(self.local_config['timerange'])
|
||||
|
||||
if parsed_timerange.startdt is None:
|
||||
self.full_varHolder.from_dt = datetime.fromtimestamp(0, tz=timezone.utc)
|
||||
else:
|
||||
self.full_varHolder.from_dt = parsed_timerange.startdt
|
||||
|
||||
if parsed_timerange.stopdt is None:
|
||||
self.full_varHolder.to_dt = datetime.utcnow()
|
||||
else:
|
||||
self.full_varHolder.to_dt = parsed_timerange.stopdt
|
||||
|
||||
self.prepare_data(self.full_varHolder, self.local_config['pairs'])
|
||||
|
||||
def start(self) -> None:
|
||||
|
||||
# first make a single backtest
|
||||
self.fill_full_varholder()
|
||||
@@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def _format_exception_message(space: str, ignore_missing_space: bool) -> None:
|
||||
msg = (f"The '{space}' space is included into the hyperoptimization "
|
||||
f"but no parameter for this space was not found in your Strategy. "
|
||||
f"but no parameter for this space was found in your Strategy. "
|
||||
)
|
||||
if ignore_missing_space:
|
||||
logger.warning(msg + "This space will be ignored.")
|
||||
|
||||
@@ -52,7 +52,7 @@ class SortinoHyperOptLossDaily(IHyperOptLoss):
|
||||
total_profit = sum_daily["profit_ratio_after_slippage"] - minimum_acceptable_return
|
||||
expected_returns_mean = total_profit.mean()
|
||||
|
||||
sum_daily['downside_returns'] = 0
|
||||
sum_daily['downside_returns'] = 0.0
|
||||
sum_daily.loc[total_profit < 0, 'downside_returns'] = total_profit
|
||||
total_downside = sum_daily['downside_returns']
|
||||
# Here total_downside contains min(0, P - MAR) values,
|
||||
|
||||
@@ -429,14 +429,18 @@ class HyperoptTools:
|
||||
trials = trials.drop(columns=['Total profit'])
|
||||
|
||||
if print_colorized:
|
||||
trials2 = trials.astype(str)
|
||||
for i in range(len(trials)):
|
||||
if trials.loc[i]['is_profit']:
|
||||
for j in range(len(trials.loc[i]) - 3):
|
||||
trials.iat[i, j] = f"{Fore.GREEN}{str(trials.loc[i][j])}{Fore.RESET}"
|
||||
trials2.iat[i, j] = f"{Fore.GREEN}{str(trials.iloc[i, j])}{Fore.RESET}"
|
||||
if trials.loc[i]['is_best'] and highlight_best:
|
||||
for j in range(len(trials.loc[i]) - 3):
|
||||
trials.iat[i, j] = f"{Style.BRIGHT}{str(trials.loc[i][j])}{Style.RESET_ALL}"
|
||||
|
||||
trials2.iat[i, j] = (
|
||||
f"{Style.BRIGHT}{str(trials.iloc[i, j])}{Style.RESET_ALL}"
|
||||
)
|
||||
trials = trials2
|
||||
del trials2
|
||||
trials = trials.drop(columns=['is_initial_point', 'is_best', 'is_profit', 'is_random'])
|
||||
if remove_header > 0:
|
||||
table = tabulate.tabulate(
|
||||
|
||||
@@ -219,8 +219,10 @@ def _get_resample_from_period(period: str) -> str:
|
||||
raise ValueError(f"Period {period} is not supported.")
|
||||
|
||||
|
||||
def generate_periodic_breakdown_stats(trade_list: List, period: str) -> List[Dict[str, Any]]:
|
||||
results = DataFrame.from_records(trade_list)
|
||||
def generate_periodic_breakdown_stats(
|
||||
trade_list: Union[List, DataFrame], period: str) -> List[Dict[str, Any]]:
|
||||
|
||||
results = trade_list if not isinstance(trade_list, list) else DataFrame.from_records(trade_list)
|
||||
if len(results) == 0:
|
||||
return []
|
||||
results['close_date'] = to_datetime(results['close_date'], utc=True)
|
||||
|
||||
@@ -88,6 +88,9 @@ def migrate_trades_and_orders_table(
|
||||
stop_loss_pct = get_column_def(cols, 'stop_loss_pct', 'null')
|
||||
initial_stop_loss = get_column_def(cols, 'initial_stop_loss', '0.0')
|
||||
initial_stop_loss_pct = get_column_def(cols, 'initial_stop_loss_pct', 'null')
|
||||
is_stop_loss_trailing = get_column_def(
|
||||
cols, 'is_stop_loss_trailing',
|
||||
f'coalesce({stop_loss_pct}, 0.0) <> coalesce({initial_stop_loss_pct}, 0.0)')
|
||||
stoploss_order_id = get_column_def(cols, 'stoploss_order_id', 'null')
|
||||
stoploss_last_update = get_column_def(cols, 'stoploss_last_update', 'null')
|
||||
max_rate = get_column_def(cols, 'max_rate', '0.0')
|
||||
@@ -112,6 +115,7 @@ def migrate_trades_and_orders_table(
|
||||
# Futures Properties
|
||||
interest_rate = get_column_def(cols, 'interest_rate', '0.0')
|
||||
funding_fees = get_column_def(cols, 'funding_fees', '0.0')
|
||||
funding_fee_running = get_column_def(cols, 'funding_fee_running', 'null')
|
||||
max_stake_amount = get_column_def(cols, 'max_stake_amount', 'stake_amount')
|
||||
|
||||
# If ticker-interval existed use that, else null.
|
||||
@@ -154,13 +158,13 @@ def migrate_trades_and_orders_table(
|
||||
fee_open, fee_open_cost, fee_open_currency,
|
||||
fee_close, fee_close_cost, fee_close_currency, open_rate,
|
||||
open_rate_requested, close_rate, close_rate_requested, close_profit,
|
||||
stake_amount, amount, amount_requested, open_date, close_date, open_order_id,
|
||||
stake_amount, amount, amount_requested, open_date, close_date,
|
||||
stop_loss, stop_loss_pct, initial_stop_loss, initial_stop_loss_pct,
|
||||
stoploss_order_id, stoploss_last_update,
|
||||
is_stop_loss_trailing, stoploss_order_id, stoploss_last_update,
|
||||
max_rate, min_rate, exit_reason, exit_order_status, strategy, enter_tag,
|
||||
timeframe, open_trade_value, close_profit_abs,
|
||||
trading_mode, leverage, liquidation_price, is_short,
|
||||
interest_rate, funding_fees, realized_profit,
|
||||
interest_rate, funding_fees, funding_fee_running, realized_profit,
|
||||
amount_precision, price_precision, precision_mode, contract_size,
|
||||
max_stake_amount
|
||||
)
|
||||
@@ -171,10 +175,11 @@ def migrate_trades_and_orders_table(
|
||||
{fee_close_cost} fee_close_cost, {fee_close_currency} fee_close_currency,
|
||||
open_rate, {open_rate_requested} open_rate_requested, close_rate,
|
||||
{close_rate_requested} close_rate_requested, close_profit,
|
||||
stake_amount, amount, {amount_requested}, open_date, close_date, open_order_id,
|
||||
stake_amount, amount, {amount_requested}, open_date, close_date,
|
||||
{stop_loss} stop_loss, {stop_loss_pct} stop_loss_pct,
|
||||
{initial_stop_loss} initial_stop_loss,
|
||||
{initial_stop_loss_pct} initial_stop_loss_pct,
|
||||
{is_stop_loss_trailing} is_stop_loss_trailing,
|
||||
{stoploss_order_id} stoploss_order_id, {stoploss_last_update} stoploss_last_update,
|
||||
{max_rate} max_rate, {min_rate} min_rate,
|
||||
case when {exit_reason} = 'sell_signal' then 'exit_signal'
|
||||
@@ -188,7 +193,8 @@ def migrate_trades_and_orders_table(
|
||||
{open_trade_value} open_trade_value, {close_profit_abs} close_profit_abs,
|
||||
{trading_mode} trading_mode, {leverage} leverage, {liquidation_price} liquidation_price,
|
||||
{is_short} is_short, {interest_rate} interest_rate,
|
||||
{funding_fees} funding_fees, {realized_profit} realized_profit,
|
||||
{funding_fees} funding_fees, {funding_fee_running} funding_fee_running,
|
||||
{realized_profit} realized_profit,
|
||||
{amount_precision} amount_precision, {price_precision} price_precision,
|
||||
{precision_mode} precision_mode, {contract_size} contract_size,
|
||||
{max_stake_amount} max_stake_amount
|
||||
@@ -216,6 +222,7 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
|
||||
funding_fee = get_column_def(cols_order, 'funding_fee', '0.0')
|
||||
ft_amount = get_column_def(cols_order, 'ft_amount', 'coalesce(amount, 0.0)')
|
||||
ft_price = get_column_def(cols_order, 'ft_price', 'coalesce(price, 0.0)')
|
||||
ft_cancel_reason = get_column_def(cols_order, 'ft_cancel_reason', 'null')
|
||||
|
||||
# sqlite does not support literals for booleans
|
||||
with engine.begin() as connection:
|
||||
@@ -223,13 +230,13 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
|
||||
insert into orders (id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
status, symbol, order_type, side, price, amount, filled, average, remaining, cost,
|
||||
stop_price, order_date, order_filled_date, order_update_date, ft_fee_base, funding_fee,
|
||||
ft_amount, ft_price
|
||||
ft_amount, ft_price, ft_cancel_reason
|
||||
)
|
||||
select id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
status, symbol, order_type, side, price, amount, filled, {average} average, remaining,
|
||||
cost, {stop_price} stop_price, order_date, order_filled_date,
|
||||
order_update_date, {ft_fee_base} ft_fee_base, {funding_fee} funding_fee,
|
||||
{ft_amount} ft_amount, {ft_price} ft_price
|
||||
{ft_amount} ft_amount, {ft_price} ft_price, {ft_cancel_reason} ft_cancel_reason
|
||||
from {table_back_name}
|
||||
"""))
|
||||
|
||||
@@ -268,6 +275,13 @@ def set_sqlite_to_wal(engine):
|
||||
|
||||
def fix_old_dry_orders(engine):
|
||||
with engine.begin() as connection:
|
||||
|
||||
# Update current dry-run Orders where
|
||||
# - current Order is open
|
||||
# - current Trade is closed
|
||||
# - current Order trade_id not equal to current Trade.id
|
||||
# - current Order not stoploss
|
||||
|
||||
stmt = update(Order).where(
|
||||
Order.ft_is_open.is_(True),
|
||||
tuple_(Order.ft_trade_id, Order.order_id).not_in(
|
||||
@@ -281,12 +295,13 @@ def fix_old_dry_orders(engine):
|
||||
).values(ft_is_open=False)
|
||||
connection.execute(stmt)
|
||||
|
||||
# Close dry-run orders for closed trades.
|
||||
stmt = update(Order).where(
|
||||
Order.ft_is_open.is_(True),
|
||||
tuple_(Order.ft_trade_id, Order.order_id).not_in(
|
||||
Order.ft_trade_id.not_in(
|
||||
select(
|
||||
Trade.id, Trade.open_order_id
|
||||
).where(Trade.open_order_id.is_not(None))
|
||||
Trade.id
|
||||
).where(Trade.is_open.is_(True))
|
||||
),
|
||||
Order.ft_order_side != 'stoploss',
|
||||
Order.order_id.like('dry%')
|
||||
@@ -316,8 +331,8 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
# if ('orders' not in previous_tables
|
||||
# or not has_column(cols_orders, 'funding_fee')):
|
||||
migrating = False
|
||||
# if not has_column(cols_trades, 'max_stake_amount'):
|
||||
if not has_column(cols_orders, 'ft_price'):
|
||||
# if not has_column(cols_orders, 'ft_cancel_reason'):
|
||||
if not has_column(cols_trades, 'funding_fee_running'):
|
||||
migrating = True
|
||||
logger.info(f"Running database migration for trades - "
|
||||
f"backup: {table_back_name}, {order_table_bak_name}")
|
||||
|
||||
@@ -3,6 +3,7 @@ This module contains the class to persist trades into SQLite
|
||||
"""
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import isclose
|
||||
from typing import Any, ClassVar, Dict, List, Optional, Sequence, cast
|
||||
@@ -12,20 +13,30 @@ from sqlalchemy import (Enum, Float, ForeignKey, Integer, ScalarResult, Select,
|
||||
from sqlalchemy.orm import Mapped, lazyload, mapped_column, relationship, validates
|
||||
from typing_extensions import Self
|
||||
|
||||
from freqtrade.constants import (CUSTOM_TAG_MAX_LENGTH, DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC,
|
||||
NON_OPEN_EXCHANGE_STATES, BuySell, LongShort)
|
||||
from freqtrade.constants import (CANCELED_EXCHANGE_STATES, CUSTOM_TAG_MAX_LENGTH,
|
||||
DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC, NON_OPEN_EXCHANGE_STATES,
|
||||
BuySell, LongShort)
|
||||
from freqtrade.enums import ExitType, TradingMode
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, amount_to_contract_precision,
|
||||
price_to_precision)
|
||||
from freqtrade.leverage import interest
|
||||
from freqtrade.misc import safe_value_fallback
|
||||
from freqtrade.persistence.base import ModelBase, SessionType
|
||||
from freqtrade.util import FtPrecise, dt_now
|
||||
from freqtrade.util import FtPrecise, dt_from_ts, dt_now, dt_ts
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProfitStruct:
|
||||
profit_abs: float
|
||||
profit_ratio: float
|
||||
total_profit: float
|
||||
total_profit_ratio: float
|
||||
|
||||
|
||||
class Order(ModelBase):
|
||||
"""
|
||||
Order database model
|
||||
@@ -57,6 +68,7 @@ class Order(ModelBase):
|
||||
ft_is_open: Mapped[bool] = mapped_column(nullable=False, default=True, index=True)
|
||||
ft_amount: Mapped[float] = mapped_column(Float(), nullable=False)
|
||||
ft_price: Mapped[float] = mapped_column(Float(), nullable=False)
|
||||
ft_cancel_reason: Mapped[str] = mapped_column(String(CUSTOM_TAG_MAX_LENGTH), nullable=True)
|
||||
|
||||
order_id: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
status: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
@@ -162,12 +174,10 @@ class Order(ModelBase):
|
||||
self.ft_is_open = True
|
||||
if self.status in NON_OPEN_EXCHANGE_STATES:
|
||||
self.ft_is_open = False
|
||||
if self.trade:
|
||||
# Assign funding fee up to this point
|
||||
# (represents the funding fee since the last order)
|
||||
self.funding_fee = self.trade.funding_fees
|
||||
if (order.get('filled', 0.0) or 0.0) > 0 and not self.order_filled_date:
|
||||
self.order_filled_date = datetime.now(timezone.utc)
|
||||
self.order_filled_date = dt_from_ts(
|
||||
safe_value_fallback(order, 'lastTradeTimestamp', default_value=dt_ts())
|
||||
)
|
||||
self.order_update_date = datetime.now(timezone.utc)
|
||||
|
||||
def to_ccxt_object(self, stopPriceName: str = 'stopPrice') -> Dict[str, Any]:
|
||||
@@ -224,6 +234,7 @@ class Order(ModelBase):
|
||||
'price': self.price,
|
||||
'remaining': self.remaining,
|
||||
'ft_fee_base': self.ft_fee_base,
|
||||
'funding_fee': self.funding_fee,
|
||||
})
|
||||
return resp
|
||||
|
||||
@@ -235,12 +246,16 @@ class Order(ModelBase):
|
||||
self.ft_is_open = False
|
||||
# Assign funding fees to Order.
|
||||
# Assumes backtesting will use date_last_filled_utc to calculate future funding fees.
|
||||
self.funding_fee = trade.funding_fees
|
||||
self.funding_fee = trade.funding_fee_running
|
||||
trade.funding_fee_running = 0.0
|
||||
|
||||
if (self.ft_order_side == trade.entry_side and self.price):
|
||||
trade.open_rate = self.price
|
||||
trade.recalc_trade_from_orders()
|
||||
trade.adjust_stop_loss(trade.open_rate, trade.stop_loss_pct, refresh=True)
|
||||
if trade.nr_of_successful_entries == 1:
|
||||
trade.initial_stop_loss_pct = None
|
||||
trade.is_stop_loss_trailing = False
|
||||
trade.adjust_stop_loss(trade.open_rate, trade.stop_loss_pct)
|
||||
|
||||
@staticmethod
|
||||
def update_orders(orders: List['Order'], order: Dict[str, Any]):
|
||||
@@ -340,7 +355,6 @@ class LocalTrade:
|
||||
amount_requested: Optional[float] = None
|
||||
open_date: datetime
|
||||
close_date: Optional[datetime] = None
|
||||
open_order_id: Optional[str] = None
|
||||
# absolute value of the stop loss
|
||||
stop_loss: float = 0.0
|
||||
# percentage value of the stop loss
|
||||
@@ -349,6 +363,7 @@ class LocalTrade:
|
||||
initial_stop_loss: Optional[float] = 0.0
|
||||
# percentage value of the initial stop loss
|
||||
initial_stop_loss_pct: Optional[float] = None
|
||||
is_stop_loss_trailing: bool = False
|
||||
# stoploss order id which is on exchange
|
||||
stoploss_order_id: Optional[str] = None
|
||||
# last update time of the stoploss order on exchange
|
||||
@@ -379,6 +394,9 @@ class LocalTrade:
|
||||
|
||||
# Futures properties
|
||||
funding_fees: Optional[float] = None
|
||||
# Used to keep running funding fees - between the last filled order and now
|
||||
# Shall not be used for calculations!
|
||||
funding_fee_running: Optional[float] = None
|
||||
|
||||
@property
|
||||
def stoploss_or_liquidation(self) -> float:
|
||||
@@ -418,13 +436,20 @@ class LocalTrade:
|
||||
return self.amount
|
||||
|
||||
@property
|
||||
def date_last_filled_utc(self) -> datetime:
|
||||
def _date_last_filled_utc(self) -> Optional[datetime]:
|
||||
""" Date of the last filled order"""
|
||||
orders = self.select_filled_orders()
|
||||
if not orders:
|
||||
if orders:
|
||||
return max(o.order_filled_utc for o in orders if o.order_filled_utc)
|
||||
return None
|
||||
|
||||
@property
|
||||
def date_last_filled_utc(self) -> datetime:
|
||||
""" Date of the last filled order - or open_date if no orders are filled"""
|
||||
dt_last_filled = self._date_last_filled_utc
|
||||
if not dt_last_filled:
|
||||
return self.open_date_utc
|
||||
return max([self.open_date_utc,
|
||||
max(o.order_filled_utc for o in orders if o.order_filled_utc)])
|
||||
return max([self.open_date_utc, dt_last_filled])
|
||||
|
||||
@property
|
||||
def open_date_utc(self):
|
||||
@@ -481,10 +506,37 @@ class LocalTrade:
|
||||
except IndexError:
|
||||
return ''
|
||||
|
||||
@property
|
||||
def open_orders(self) -> List[Order]:
|
||||
"""
|
||||
All open orders for this trade excluding stoploss orders
|
||||
"""
|
||||
return [o for o in self.orders if o.ft_is_open and o.ft_order_side != 'stoploss']
|
||||
|
||||
@property
|
||||
def has_open_orders(self) -> int:
|
||||
"""
|
||||
True if there are open orders for this trade excluding stoploss orders
|
||||
"""
|
||||
open_orders_wo_sl = [
|
||||
o for o in self.orders
|
||||
if o.ft_order_side not in ['stoploss'] and o.ft_is_open
|
||||
]
|
||||
return len(open_orders_wo_sl) > 0
|
||||
|
||||
@property
|
||||
def open_orders_ids(self) -> List[str]:
|
||||
open_orders_ids_wo_sl = [
|
||||
oo.order_id for oo in self.open_orders
|
||||
if oo.ft_order_side not in ['stoploss']
|
||||
]
|
||||
return open_orders_ids_wo_sl
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for key in kwargs:
|
||||
setattr(self, key, kwargs[key])
|
||||
self.recalc_open_trade_value()
|
||||
self.orders = []
|
||||
if self.trading_mode == TradingMode.MARGIN and self.interest_rate is None:
|
||||
raise OperationalException(
|
||||
f"{self.trading_mode.value} trading requires param interest_rate on trades")
|
||||
@@ -499,8 +551,8 @@ class LocalTrade:
|
||||
)
|
||||
|
||||
def to_json(self, minified: bool = False) -> Dict[str, Any]:
|
||||
filled_orders = self.select_filled_or_open_orders()
|
||||
orders = [order.to_json(self.entry_side, minified) for order in filled_orders]
|
||||
filled_or_open_orders = self.select_filled_or_open_orders()
|
||||
orders_json = [order.to_json(self.entry_side, minified) for order in filled_or_open_orders]
|
||||
|
||||
return {
|
||||
'trade_id': self.id,
|
||||
@@ -576,11 +628,12 @@ class LocalTrade:
|
||||
'is_short': self.is_short,
|
||||
'trading_mode': self.trading_mode,
|
||||
'funding_fees': self.funding_fees,
|
||||
'open_order_id': self.open_order_id,
|
||||
'amount_precision': self.amount_precision,
|
||||
'price_precision': self.price_precision,
|
||||
'precision_mode': self.precision_mode,
|
||||
'orders': orders,
|
||||
'contract_size': self.contract_size,
|
||||
'has_open_orders': self.has_open_orders,
|
||||
'orders': orders_json,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@@ -610,6 +663,16 @@ class LocalTrade:
|
||||
return
|
||||
self.liquidation_price = liquidation_price
|
||||
|
||||
def set_funding_fees(self, funding_fee: float) -> None:
|
||||
"""
|
||||
Assign funding fees to Trade.
|
||||
"""
|
||||
if funding_fee is None:
|
||||
return
|
||||
self.funding_fee_running = funding_fee
|
||||
prior_funding_fees = sum([o.funding_fee for o in self.orders if o.funding_fee])
|
||||
self.funding_fees = prior_funding_fees + funding_fee
|
||||
|
||||
def __set_stop_loss(self, stop_loss: float, percent: float):
|
||||
"""
|
||||
Method used internally to set self.stop_loss.
|
||||
@@ -621,18 +684,18 @@ class LocalTrade:
|
||||
self.stop_loss_pct = -1 * abs(percent)
|
||||
|
||||
def adjust_stop_loss(self, current_price: float, stoploss: Optional[float],
|
||||
initial: bool = False, refresh: bool = False) -> None:
|
||||
initial: bool = False, allow_refresh: bool = False) -> None:
|
||||
"""
|
||||
This adjusts the stop loss to it's most recently observed setting
|
||||
:param current_price: Current rate the asset is traded
|
||||
:param stoploss: Stoploss as factor (sample -0.05 -> -5% below current price).
|
||||
:param initial: Called to initiate stop_loss.
|
||||
Skips everything if self.stop_loss is already set.
|
||||
:param refresh: Called to refresh stop_loss, allows adjustment in both directions
|
||||
"""
|
||||
if stoploss is None or (initial and not (self.stop_loss is None or self.stop_loss == 0)):
|
||||
# Don't modify if called with initial and nothing to do
|
||||
return
|
||||
refresh = True if refresh and self.nr_of_successful_entries == 1 else False
|
||||
|
||||
leverage = self.leverage or 1.0
|
||||
if self.is_short:
|
||||
@@ -643,7 +706,7 @@ class LocalTrade:
|
||||
stop_loss_norm = price_to_precision(new_loss, self.price_precision, self.precision_mode,
|
||||
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
|
||||
# no stop loss assigned yet
|
||||
if self.initial_stop_loss_pct is None or refresh:
|
||||
if self.initial_stop_loss_pct is None:
|
||||
self.__set_stop_loss(stop_loss_norm, stoploss)
|
||||
self.initial_stop_loss = price_to_precision(
|
||||
stop_loss_norm, self.price_precision, self.precision_mode,
|
||||
@@ -658,8 +721,14 @@ class LocalTrade:
|
||||
# stop losses only walk up, never down!,
|
||||
# ? But adding more to a leveraged trade would create a lower liquidation price,
|
||||
# ? decreasing the minimum stoploss
|
||||
if (higher_stop and not self.is_short) or (lower_stop and self.is_short):
|
||||
if (
|
||||
allow_refresh
|
||||
or (higher_stop and not self.is_short)
|
||||
or (lower_stop and self.is_short)
|
||||
):
|
||||
logger.debug(f"{self.pair} - Adjusting stoploss...")
|
||||
if not allow_refresh:
|
||||
self.is_stop_loss_trailing = True
|
||||
self.__set_stop_loss(stop_loss_norm, stoploss)
|
||||
else:
|
||||
logger.debug(f"{self.pair} - Keeping current stoploss...")
|
||||
@@ -672,7 +741,7 @@ class LocalTrade:
|
||||
f"Trailing stoploss saved us: "
|
||||
f"{float(self.stop_loss) - float(self.initial_stop_loss or 0.0):.8f}.")
|
||||
|
||||
def update_trade(self, order: Order) -> None:
|
||||
def update_trade(self, order: Order, recalculating: bool = False) -> None:
|
||||
"""
|
||||
Updates this entity with amount and actual open/close rates.
|
||||
:param order: order retrieved by exchange.fetch_order()
|
||||
@@ -684,6 +753,10 @@ class LocalTrade:
|
||||
return
|
||||
|
||||
logger.info(f'Updating trade (id={self.id}) ...')
|
||||
if order.ft_order_side != 'stoploss':
|
||||
order.funding_fee = self.funding_fee_running
|
||||
# Reset running funding fees
|
||||
self.funding_fee_running = 0.0
|
||||
|
||||
if order.ft_order_side == self.entry_side:
|
||||
# Update open rate and actual amount
|
||||
@@ -692,24 +765,13 @@ class LocalTrade:
|
||||
if self.is_open:
|
||||
payment = "SELL" if self.is_short else "BUY"
|
||||
logger.info(f'{order.order_type.upper()}_{payment} has been fulfilled for {self}.')
|
||||
# condition to avoid reset value when updating fees
|
||||
if self.open_order_id == order.order_id:
|
||||
self.open_order_id = None
|
||||
else:
|
||||
logger.warning(
|
||||
f'Got different open_order_id {self.open_order_id} != {order.order_id}')
|
||||
|
||||
self.recalc_trade_from_orders()
|
||||
elif order.ft_order_side == self.exit_side:
|
||||
if self.is_open:
|
||||
payment = "BUY" if self.is_short else "SELL"
|
||||
# * On margin shorts, you buy a little bit more than the amount (amount + interest)
|
||||
logger.info(f'{order.order_type.upper()}_{payment} has been fulfilled for {self}.')
|
||||
# condition to avoid reset value when updating fees
|
||||
if self.open_order_id == order.order_id:
|
||||
self.open_order_id = None
|
||||
else:
|
||||
logger.warning(
|
||||
f'Got different open_order_id {self.open_order_id} != {order.order_id}')
|
||||
|
||||
elif order.ft_order_side == 'stoploss' and order.status not in ('open', ):
|
||||
self.stoploss_order_id = None
|
||||
@@ -725,8 +787,9 @@ class LocalTrade:
|
||||
self.precision_mode, self.contract_size)
|
||||
if (
|
||||
isclose(order.safe_amount_after_fee, amount_tr, abs_tol=MATH_CLOSE_PREC)
|
||||
or order.safe_amount_after_fee > amount_tr
|
||||
or (not recalculating and order.safe_amount_after_fee > amount_tr)
|
||||
):
|
||||
# When recalculating a trade, only comming out to 0 can force a close
|
||||
self.close(order.safe_price)
|
||||
else:
|
||||
self.recalc_trade_from_orders()
|
||||
@@ -739,10 +802,9 @@ class LocalTrade:
|
||||
and marks trade as closed
|
||||
"""
|
||||
self.close_rate = rate
|
||||
self.close_date = self.close_date or datetime.utcnow()
|
||||
self.close_date = self.close_date or self._date_last_filled_utc or dt_now()
|
||||
self.is_open = False
|
||||
self.exit_order_status = 'closed'
|
||||
self.open_order_id = None
|
||||
self.recalc_trade_from_orders(is_closing=True)
|
||||
if show_msg:
|
||||
logger.info(f"Marking {self} as closed as the trade is fulfilled "
|
||||
@@ -780,12 +842,13 @@ class LocalTrade:
|
||||
def update_order(self, order: Dict) -> None:
|
||||
Order.update_orders(self.orders, order)
|
||||
|
||||
def get_exit_order_count(self) -> int:
|
||||
def get_canceled_exit_order_count(self) -> int:
|
||||
"""
|
||||
Get amount of failed exiting orders
|
||||
assumes full exits.
|
||||
"""
|
||||
return len([o for o in self.orders if o.ft_order_side == self.exit_side])
|
||||
return len([o for o in self.orders if o.ft_order_side == self.exit_side
|
||||
and o.status in CANCELED_EXCHANGE_STATES])
|
||||
|
||||
def _calc_open_trade_value(self, amount: float, open_rate: float) -> float:
|
||||
"""
|
||||
@@ -878,11 +941,26 @@ class LocalTrade:
|
||||
open_rate: Optional[float] = None) -> float:
|
||||
"""
|
||||
Calculate the absolute profit in stake currency between Close and Open trade
|
||||
Deprecated - only available for backwards compatibility
|
||||
:param rate: close rate to compare with.
|
||||
:param amount: Amount to use for the calculation. Falls back to trade.amount if not set.
|
||||
:param open_rate: open_rate to use. Defaults to self.open_rate if not provided.
|
||||
:return: profit in stake currency as float
|
||||
"""
|
||||
prof = self.calculate_profit(rate, amount, open_rate)
|
||||
return prof.profit_abs
|
||||
|
||||
def calculate_profit(self, rate: float, amount: Optional[float] = None,
|
||||
open_rate: Optional[float] = None) -> ProfitStruct:
|
||||
"""
|
||||
Calculate profit metrics (absolute, ratio, total, total ratio).
|
||||
All calculations include fees.
|
||||
:param rate: close rate to compare with.
|
||||
:param amount: Amount to use for the calculation. Falls back to trade.amount if not set.
|
||||
:param open_rate: open_rate to use. Defaults to self.open_rate if not provided.
|
||||
:return: Profit structure, containing absolute and relative profits.
|
||||
"""
|
||||
|
||||
close_trade_value = self.calc_close_trade_value(rate, amount)
|
||||
if amount is None or open_rate is None:
|
||||
open_trade_value = self.open_trade_value
|
||||
@@ -890,10 +968,33 @@ class LocalTrade:
|
||||
open_trade_value = self._calc_open_trade_value(amount, open_rate)
|
||||
|
||||
if self.is_short:
|
||||
profit = open_trade_value - close_trade_value
|
||||
profit_abs = open_trade_value - close_trade_value
|
||||
else:
|
||||
profit = close_trade_value - open_trade_value
|
||||
return float(f"{profit:.8f}")
|
||||
profit_abs = close_trade_value - open_trade_value
|
||||
|
||||
try:
|
||||
if self.is_short:
|
||||
profit_ratio = (1 - (close_trade_value / open_trade_value)) * self.leverage
|
||||
else:
|
||||
profit_ratio = ((close_trade_value / open_trade_value) - 1) * self.leverage
|
||||
profit_ratio = float(f"{profit_ratio:.8f}")
|
||||
except ZeroDivisionError:
|
||||
profit_ratio = 0.0
|
||||
|
||||
total_profit_abs = profit_abs + self.realized_profit
|
||||
total_profit_ratio = (
|
||||
(total_profit_abs / self.max_stake_amount) * self.leverage
|
||||
if self.max_stake_amount else 0.0
|
||||
)
|
||||
total_profit_ratio = float(f"{total_profit_ratio:.8f}")
|
||||
profit_abs = float(f"{profit_abs:.8f}")
|
||||
|
||||
return ProfitStruct(
|
||||
profit_abs=profit_abs,
|
||||
profit_ratio=profit_ratio,
|
||||
total_profit=profit_abs + self.realized_profit,
|
||||
total_profit_ratio=total_profit_ratio,
|
||||
)
|
||||
|
||||
def calc_profit_ratio(
|
||||
self, rate: float, amount: Optional[float] = None,
|
||||
@@ -914,15 +1015,14 @@ class LocalTrade:
|
||||
|
||||
short_close_zero = (self.is_short and close_trade_value == 0.0)
|
||||
long_close_zero = (not self.is_short and open_trade_value == 0.0)
|
||||
leverage = self.leverage or 1.0
|
||||
|
||||
if (short_close_zero or long_close_zero):
|
||||
return 0.0
|
||||
else:
|
||||
if self.is_short:
|
||||
profit_ratio = (1 - (close_trade_value / open_trade_value)) * leverage
|
||||
profit_ratio = (1 - (close_trade_value / open_trade_value)) * self.leverage
|
||||
else:
|
||||
profit_ratio = ((close_trade_value / open_trade_value) - 1) * leverage
|
||||
profit_ratio = ((close_trade_value / open_trade_value) - 1) * self.leverage
|
||||
|
||||
return float(f"{profit_ratio:.8f}")
|
||||
|
||||
@@ -935,7 +1035,6 @@ class LocalTrade:
|
||||
avg_price = FtPrecise(0.0)
|
||||
close_profit = 0.0
|
||||
close_profit_abs = 0.0
|
||||
profit = None
|
||||
# Reset funding fees
|
||||
self.funding_fees = 0.0
|
||||
funding_fees = 0.0
|
||||
@@ -954,7 +1053,7 @@ class LocalTrade:
|
||||
price = avg_price if is_exit else tmp_price
|
||||
current_stake += price * tmp_amount * side
|
||||
|
||||
if current_amount > ZERO:
|
||||
if current_amount > ZERO and not is_exit:
|
||||
avg_price = current_stake / current_amount
|
||||
|
||||
if is_exit:
|
||||
@@ -965,11 +1064,12 @@ class LocalTrade:
|
||||
|
||||
exit_rate = o.safe_price
|
||||
exit_amount = o.safe_amount_after_fee
|
||||
profit = self.calc_profit(rate=exit_rate, amount=exit_amount,
|
||||
open_rate=float(avg_price))
|
||||
close_profit_abs += profit
|
||||
close_profit = self.calc_profit_ratio(
|
||||
exit_rate, amount=exit_amount, open_rate=avg_price)
|
||||
prof = self.calculate_profit(exit_rate, exit_amount, float(avg_price))
|
||||
close_profit_abs += prof.profit_abs
|
||||
if total_stake > 0:
|
||||
# This needs to be calculated based on the last occuring exit to be aligned
|
||||
# with realized_profit.
|
||||
close_profit = (close_profit_abs / total_stake) * self.leverage
|
||||
else:
|
||||
total_stake = total_stake + self._calc_open_trade_value(tmp_amount, price)
|
||||
max_stake_amount += (tmp_amount * price)
|
||||
@@ -979,7 +1079,7 @@ class LocalTrade:
|
||||
if close_profit:
|
||||
self.close_profit = close_profit
|
||||
self.realized_profit = close_profit_abs
|
||||
self.close_profit_abs = profit
|
||||
self.close_profit_abs = prof.profit_abs
|
||||
|
||||
current_amount_tr = amount_to_contract_precision(
|
||||
float(current_amount), self.amount_precision, self.precision_mode, self.contract_size)
|
||||
@@ -1194,7 +1294,7 @@ class LocalTrade:
|
||||
logger.info(f"Found open trade: {trade}")
|
||||
|
||||
# skip case if trailing-stop changed the stoploss already.
|
||||
if (trade.stop_loss == trade.initial_stop_loss
|
||||
if (not trade.is_stop_loss_trailing
|
||||
and trade.initial_stop_loss_pct != desired_stoploss):
|
||||
# Stoploss value got changed
|
||||
|
||||
@@ -1205,6 +1305,99 @@ class LocalTrade:
|
||||
trade.adjust_stop_loss(trade.open_rate, desired_stoploss)
|
||||
logger.info(f"New stoploss: {trade.stop_loss}.")
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_str: str) -> Self:
|
||||
"""
|
||||
Create a Trade instance from a json string.
|
||||
|
||||
Used for debugging purposes - please keep.
|
||||
:param json_str: json string to parse
|
||||
:return: Trade instance
|
||||
"""
|
||||
import rapidjson
|
||||
data = rapidjson.loads(json_str)
|
||||
trade = cls(
|
||||
__FROM_JSON=True,
|
||||
id=data["trade_id"],
|
||||
pair=data["pair"],
|
||||
base_currency=data["base_currency"],
|
||||
stake_currency=data["quote_currency"],
|
||||
is_open=data["is_open"],
|
||||
exchange=data["exchange"],
|
||||
amount=data["amount"],
|
||||
amount_requested=data["amount_requested"],
|
||||
stake_amount=data["stake_amount"],
|
||||
strategy=data["strategy"],
|
||||
enter_tag=data["enter_tag"],
|
||||
timeframe=data["timeframe"],
|
||||
fee_open=data["fee_open"],
|
||||
fee_open_cost=data["fee_open_cost"],
|
||||
fee_open_currency=data["fee_open_currency"],
|
||||
fee_close=data["fee_close"],
|
||||
fee_close_cost=data["fee_close_cost"],
|
||||
fee_close_currency=data["fee_close_currency"],
|
||||
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
|
||||
open_rate=data["open_rate"],
|
||||
open_rate_requested=data["open_rate_requested"],
|
||||
open_trade_value=data["open_trade_value"],
|
||||
close_date=(datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
|
||||
if data["close_timestamp"] else None),
|
||||
realized_profit=data["realized_profit"],
|
||||
close_rate=data["close_rate"],
|
||||
close_rate_requested=data["close_rate_requested"],
|
||||
close_profit=data["close_profit"],
|
||||
close_profit_abs=data["close_profit_abs"],
|
||||
exit_reason=data["exit_reason"],
|
||||
exit_order_status=data["exit_order_status"],
|
||||
stop_loss=data["stop_loss_abs"],
|
||||
stop_loss_pct=data["stop_loss_ratio"],
|
||||
stoploss_order_id=data["stoploss_order_id"],
|
||||
stoploss_last_update=(
|
||||
datetime.fromtimestamp(data["stoploss_last_update_timestamp"] // 1000,
|
||||
tz=timezone.utc)
|
||||
if data["stoploss_last_update_timestamp"] else None),
|
||||
initial_stop_loss=data["initial_stop_loss_abs"],
|
||||
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
|
||||
min_rate=data["min_rate"],
|
||||
max_rate=data["max_rate"],
|
||||
leverage=data["leverage"],
|
||||
interest_rate=data["interest_rate"],
|
||||
liquidation_price=data["liquidation_price"],
|
||||
is_short=data["is_short"],
|
||||
trading_mode=data["trading_mode"],
|
||||
funding_fees=data["funding_fees"],
|
||||
amount_precision=data.get('amount_precision', None),
|
||||
price_precision=data.get('price_precision', None),
|
||||
precision_mode=data.get('precision_mode', None),
|
||||
contract_size=data.get('contract_size', None),
|
||||
)
|
||||
for order in data["orders"]:
|
||||
|
||||
order_obj = Order(
|
||||
amount=order["amount"],
|
||||
ft_amount=order["amount"],
|
||||
ft_order_side=order["ft_order_side"],
|
||||
ft_pair=order["pair"],
|
||||
ft_is_open=order["is_open"],
|
||||
order_id=order["order_id"],
|
||||
status=order["status"],
|
||||
average=order["average"],
|
||||
cost=order["cost"],
|
||||
filled=order["filled"],
|
||||
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT),
|
||||
order_filled_date=(datetime.fromtimestamp(
|
||||
order["order_filled_timestamp"] // 1000, tz=timezone.utc)
|
||||
if order["order_filled_timestamp"] else None),
|
||||
order_type=order["order_type"],
|
||||
price=order["price"],
|
||||
ft_price=order["price"],
|
||||
remaining=order["remaining"],
|
||||
funding_fee=order.get("funding_fee", None),
|
||||
)
|
||||
trade.orders.append(order_obj)
|
||||
|
||||
return trade
|
||||
|
||||
|
||||
class Trade(ModelBase, LocalTrade):
|
||||
"""
|
||||
@@ -1256,7 +1449,6 @@ class Trade(ModelBase, LocalTrade):
|
||||
open_date: Mapped[datetime] = mapped_column(
|
||||
nullable=False, default=datetime.utcnow) # type: ignore
|
||||
close_date: Mapped[Optional[datetime]] = mapped_column() # type: ignore
|
||||
open_order_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) # type: ignore
|
||||
# absolute value of the stop loss
|
||||
stop_loss: Mapped[float] = mapped_column(Float(), nullable=True, default=0.0) # type: ignore
|
||||
# percentage value of the stop loss
|
||||
@@ -1267,6 +1459,8 @@ class Trade(ModelBase, LocalTrade):
|
||||
# percentage value of the initial stop loss
|
||||
initial_stop_loss_pct: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True) # type: ignore
|
||||
is_stop_loss_trailing: Mapped[bool] = mapped_column(
|
||||
nullable=False, default=False) # type: ignore
|
||||
# stoploss order id which is on exchange
|
||||
stoploss_order_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(255), nullable=True, index=True) # type: ignore
|
||||
@@ -1307,6 +1501,8 @@ class Trade(ModelBase, LocalTrade):
|
||||
# Futures properties
|
||||
funding_fees: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True, default=None) # type: ignore
|
||||
funding_fee_running: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True, default=None) # type: ignore
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
from_json = kwargs.pop('__FROM_JSON', None)
|
||||
@@ -1411,14 +1607,6 @@ class Trade(ModelBase, LocalTrade):
|
||||
# raise an exception.
|
||||
return Trade.session.scalars(query)
|
||||
|
||||
@staticmethod
|
||||
def get_open_order_trades() -> List['Trade']:
|
||||
"""
|
||||
Returns all open trades
|
||||
NOTE: Not supported in Backtesting.
|
||||
"""
|
||||
return cast(List[Trade], Trade.get_trades(Trade.open_order_id.isnot(None)).all())
|
||||
|
||||
@staticmethod
|
||||
def get_open_trades_without_assigned_fees():
|
||||
"""
|
||||
@@ -1595,7 +1783,7 @@ class Trade(ModelBase, LocalTrade):
|
||||
.order_by(desc('profit_sum_abs'))
|
||||
).all()
|
||||
|
||||
return_list: List[Dict] = []
|
||||
resp: List[Dict] = []
|
||||
for id, enter_tag, exit_reason, profit, profit_abs, count in mix_tag_perf:
|
||||
enter_tag = enter_tag if enter_tag is not None else "Other"
|
||||
exit_reason = exit_reason if exit_reason is not None else "Other"
|
||||
@@ -1603,24 +1791,25 @@ class Trade(ModelBase, LocalTrade):
|
||||
if (exit_reason is not None and enter_tag is not None):
|
||||
mix_tag = enter_tag + " " + exit_reason
|
||||
i = 0
|
||||
if not any(item["mix_tag"] == mix_tag for item in return_list):
|
||||
return_list.append({'mix_tag': mix_tag,
|
||||
'profit': profit,
|
||||
'profit_pct': round(profit * 100, 2),
|
||||
'profit_abs': profit_abs,
|
||||
'count': count})
|
||||
if not any(item["mix_tag"] == mix_tag for item in resp):
|
||||
resp.append({'mix_tag': mix_tag,
|
||||
'profit_ratio': profit,
|
||||
'profit_pct': round(profit * 100, 2),
|
||||
'profit_abs': profit_abs,
|
||||
'count': count})
|
||||
else:
|
||||
while i < len(return_list):
|
||||
if return_list[i]["mix_tag"] == mix_tag:
|
||||
return_list[i] = {
|
||||
while i < len(resp):
|
||||
if resp[i]["mix_tag"] == mix_tag:
|
||||
resp[i] = {
|
||||
'mix_tag': mix_tag,
|
||||
'profit': profit + return_list[i]["profit"],
|
||||
'profit_pct': round(profit + return_list[i]["profit"] * 100, 2),
|
||||
'profit_abs': profit_abs + return_list[i]["profit_abs"],
|
||||
'count': 1 + return_list[i]["count"]}
|
||||
'profit_ratio': profit + resp[i]["profit_ratio"],
|
||||
'profit_pct': round(profit + resp[i]["profit_ratio"] * 100, 2),
|
||||
'profit_abs': profit_abs + resp[i]["profit_abs"],
|
||||
'count': 1 + resp[i]["count"]
|
||||
}
|
||||
i += 1
|
||||
|
||||
return return_list
|
||||
return resp
|
||||
|
||||
@staticmethod
|
||||
def get_best_pair(start_date: datetime = datetime.fromtimestamp(0)):
|
||||
@@ -1655,92 +1844,3 @@ class Trade(ModelBase, LocalTrade):
|
||||
Order.status == 'closed'
|
||||
)).scalar_one()
|
||||
return trading_volume
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_str: str) -> Self:
|
||||
"""
|
||||
Create a Trade instance from a json string.
|
||||
|
||||
Used for debugging purposes - please keep.
|
||||
:param json_str: json string to parse
|
||||
:return: Trade instance
|
||||
"""
|
||||
import rapidjson
|
||||
data = rapidjson.loads(json_str)
|
||||
trade = cls(
|
||||
__FROM_JSON=True,
|
||||
id=data["trade_id"],
|
||||
pair=data["pair"],
|
||||
base_currency=data["base_currency"],
|
||||
stake_currency=data["quote_currency"],
|
||||
is_open=data["is_open"],
|
||||
exchange=data["exchange"],
|
||||
amount=data["amount"],
|
||||
amount_requested=data["amount_requested"],
|
||||
stake_amount=data["stake_amount"],
|
||||
strategy=data["strategy"],
|
||||
enter_tag=data["enter_tag"],
|
||||
timeframe=data["timeframe"],
|
||||
fee_open=data["fee_open"],
|
||||
fee_open_cost=data["fee_open_cost"],
|
||||
fee_open_currency=data["fee_open_currency"],
|
||||
fee_close=data["fee_close"],
|
||||
fee_close_cost=data["fee_close_cost"],
|
||||
fee_close_currency=data["fee_close_currency"],
|
||||
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
|
||||
open_rate=data["open_rate"],
|
||||
open_rate_requested=data["open_rate_requested"],
|
||||
open_trade_value=data["open_trade_value"],
|
||||
close_date=(datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
|
||||
if data["close_timestamp"] else None),
|
||||
realized_profit=data["realized_profit"],
|
||||
close_rate=data["close_rate"],
|
||||
close_rate_requested=data["close_rate_requested"],
|
||||
close_profit=data["close_profit"],
|
||||
close_profit_abs=data["close_profit_abs"],
|
||||
exit_reason=data["exit_reason"],
|
||||
exit_order_status=data["exit_order_status"],
|
||||
stop_loss=data["stop_loss_abs"],
|
||||
stop_loss_pct=data["stop_loss_ratio"],
|
||||
stoploss_order_id=data["stoploss_order_id"],
|
||||
stoploss_last_update=(
|
||||
datetime.fromtimestamp(data["stoploss_last_update_timestamp"] // 1000,
|
||||
tz=timezone.utc)
|
||||
if data["stoploss_last_update_timestamp"] else None),
|
||||
initial_stop_loss=data["initial_stop_loss_abs"],
|
||||
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
|
||||
min_rate=data["min_rate"],
|
||||
max_rate=data["max_rate"],
|
||||
leverage=data["leverage"],
|
||||
interest_rate=data["interest_rate"],
|
||||
liquidation_price=data["liquidation_price"],
|
||||
is_short=data["is_short"],
|
||||
trading_mode=data["trading_mode"],
|
||||
funding_fees=data["funding_fees"],
|
||||
open_order_id=data["open_order_id"],
|
||||
)
|
||||
for order in data["orders"]:
|
||||
|
||||
order_obj = Order(
|
||||
amount=order["amount"],
|
||||
ft_amount=order["amount"],
|
||||
ft_order_side=order["ft_order_side"],
|
||||
ft_pair=order["pair"],
|
||||
ft_is_open=order["is_open"],
|
||||
order_id=order["order_id"],
|
||||
status=order["status"],
|
||||
average=order["average"],
|
||||
cost=order["cost"],
|
||||
filled=order["filled"],
|
||||
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT),
|
||||
order_filled_date=(datetime.fromtimestamp(
|
||||
order["order_filled_timestamp"] // 1000, tz=timezone.utc)
|
||||
if order["order_filled_timestamp"] else None),
|
||||
order_type=order["order_type"],
|
||||
price=order["price"],
|
||||
ft_price=order["price"],
|
||||
remaining=order["remaining"],
|
||||
)
|
||||
trade.orders.append(order_obj)
|
||||
|
||||
return trade
|
||||
|
||||
@@ -21,6 +21,7 @@ from freqtrade.misc import pair_to_filename
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
from freqtrade.strategy import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -636,7 +637,7 @@ def load_and_plot_trades(config: Config):
|
||||
exchange = ExchangeResolver.load_exchange(config)
|
||||
IStrategy.dp = DataProvider(config, exchange)
|
||||
strategy.ft_bot_start()
|
||||
strategy.bot_loop_start(datetime.now(timezone.utc))
|
||||
strategy_safe_wrapper(strategy.bot_loop_start)(current_time=datetime.now(timezone.utc))
|
||||
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
|
||||
timerange = plot_elements['timerange']
|
||||
trades = plot_elements['trades']
|
||||
|
||||
57
freqtrade/plugins/pairlist/FullTradesFilter.py
Normal file
57
freqtrade/plugins/pairlist/FullTradesFilter.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""
|
||||
Full trade slots pair list filter
|
||||
"""
|
||||
import logging
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exchange.types import Tickers
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.plugins.pairlist.IPairList import IPairList
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FullTradesFilter(IPairList):
|
||||
|
||||
def __init__(self, exchange, pairlistmanager,
|
||||
config: Config, pairlistconfig: Dict[str, Any],
|
||||
pairlist_pos: int) -> None:
|
||||
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
"""
|
||||
Boolean property defining if tickers are necessary.
|
||||
If no Pairlist requires tickers, an empty List is passed
|
||||
as tickers argument to filter_pairlist
|
||||
"""
|
||||
return False
|
||||
|
||||
def short_desc(self) -> str:
|
||||
"""
|
||||
Short allowlist method description - used for startup-messages
|
||||
"""
|
||||
return f"{self.name} - Shrink whitelist when trade slots are full."
|
||||
|
||||
@staticmethod
|
||||
def description() -> str:
|
||||
return "Shrink whitelist when trade slots are full."
|
||||
|
||||
def filter_pairlist(self, pairlist: List[str], tickers: Tickers) -> List[str]:
|
||||
"""
|
||||
Filters and sorts pairlist and returns the allowlist again.
|
||||
Called on each bot iteration - please use internal caching if necessary
|
||||
:param pairlist: pairlist to filter or sort
|
||||
:param tickers: Tickers (from exchange.get_tickers). May be cached.
|
||||
:return: new allowlist
|
||||
"""
|
||||
# Get the number of open trades and max open trades config
|
||||
num_open = Trade.get_open_trade_count()
|
||||
max_trades = self._config['max_open_trades']
|
||||
|
||||
if (num_open >= max_trades) and (max_trades > 0):
|
||||
return []
|
||||
|
||||
return pairlist
|
||||
@@ -260,6 +260,7 @@ class VolumePairList(IPairList):
|
||||
quoteVolume = (pair_candles['quoteVolume']
|
||||
.rolling(self._lookback_period)
|
||||
.sum()
|
||||
.fillna(0)
|
||||
.iloc[-1])
|
||||
|
||||
# replace quoteVolume with range quoteVolume sum calculated above
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user