Merge pull request #9112 from freqtrade/new_release

New release 2023.8
This commit is contained in:
Matthias
2023-08-28 18:44:59 +02:00
committed by GitHub
182 changed files with 3562 additions and 2294 deletions

View File

@@ -10,7 +10,7 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: weekly interval: weekly
open-pull-requests-limit: 10 open-pull-requests-limit: 15
target-branch: develop target-branch: develop
- package-ecosystem: "github-actions" - package-ecosystem: "github-actions"

View File

@@ -461,7 +461,7 @@ jobs:
python setup.py sdist bdist_wheel python setup.py sdist bdist_wheel
- name: Publish to PyPI (Test) - name: Publish to PyPI (Test)
uses: pypa/gh-action-pypi-publish@v1.8.8 uses: pypa/gh-action-pypi-publish@v1.8.10
if: (github.event_name == 'release') if: (github.event_name == 'release')
with: with:
user: __token__ user: __token__
@@ -469,7 +469,7 @@ jobs:
repository_url: https://test.pypi.org/legacy/ repository_url: https://test.pypi.org/legacy/
- name: Publish to PyPI - name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@v1.8.8 uses: pypa/gh-action-pypi-publish@v1.8.10
if: (github.event_name == 'release') if: (github.event_name == 'release')
with: with:
user: __token__ user: __token__

View File

@@ -8,7 +8,7 @@ repos:
# stages: [push] # stages: [push]
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.3.0" rev: "v1.5.0"
hooks: hooks:
- id: mypy - id: mypy
exclude: build_helpers exclude: build_helpers
@@ -18,7 +18,7 @@ repos:
- types-requests==2.31.0.2 - types-requests==2.31.0.2
- types-tabulate==0.9.0.3 - types-tabulate==0.9.0.3
- types-python-dateutil==2.8.19.14 - types-python-dateutil==2.8.19.14
- SQLAlchemy==2.0.19 - SQLAlchemy==2.0.20
# stages: [push] # stages: [push]
- repo: https://github.com/pycqa/isort - repo: https://github.com/pycqa/isort

View File

@@ -1,8 +1,14 @@
# .readthedocs.yml # .readthedocs.yml
version: 2
build: build:
image: latest os: "ubuntu-22.04"
tools:
python: "3.11"
python: python:
version: 3.8 install:
setup_py_install: false - requirements: docs/requirements-docs.txt
mkdocs:
configuration: mkdocs.yml

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -8,8 +8,9 @@ if [ -n "$2" ] || [ ! -f "${INSTALL_LOC}/lib/libta_lib.a" ]; then
tar zxvf ta-lib-0.4.0-src.tar.gz tar zxvf ta-lib-0.4.0-src.tar.gz
cd ta-lib \ cd ta-lib \
&& sed -i.bak "s|0.00000001|0.000000000000000001 |g" src/ta_func/ta_utility.h \ && sed -i.bak "s|0.00000001|0.000000000000000001 |g" src/ta_func/ta_utility.h \
&& curl 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.guess' -o config.guess \ && echo "Downloading gcc config.guess and config.sub" \
&& curl 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.sub' -o config.sub \ && curl -s 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.guess' -o config.guess \
&& curl -s 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.sub' -o config.sub \
&& ./configure --prefix=${INSTALL_LOC}/ \ && ./configure --prefix=${INSTALL_LOC}/ \
&& make && make
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then

View File

@@ -5,7 +5,7 @@ python -m pip install --upgrade pip wheel
$pyv = python -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')" $pyv = python -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"
pip install --find-links=build_helpers\ TA-Lib pip install --find-links=build_helpers\ --prefer-binary TA-Lib
pip install -r requirements-dev.txt pip install -r requirements-dev.txt
pip install -e . pip install -e .

View File

@@ -89,7 +89,6 @@
], ],
"exchange": { "exchange": {
"name": "binance", "name": "binance",
"sandbox": false,
"key": "your_exchange_key", "key": "your_exchange_key",
"secret": "your_exchange_secret", "secret": "your_exchange_secret",
"password": "", "password": "",
@@ -206,6 +205,6 @@
"recursive_strategy_search": false, "recursive_strategy_search": false,
"add_config_files": [], "add_config_files": [],
"reduce_df_footprint": false, "reduce_df_footprint": false,
"dataformat_ohlcv": "json", "dataformat_ohlcv": "feather",
"dataformat_trades": "jsongz" "dataformat_trades": "feather"
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

View File

@@ -7,7 +7,7 @@ This page provides you some basic concepts on how Freqtrade works and operates.
* **Strategy**: Your trading strategy, telling the bot what to do. * **Strategy**: Your trading strategy, telling the bot what to do.
* **Trade**: Open position. * **Trade**: Open position.
* **Open Order**: Order which is currently placed on the exchange, and is not yet complete. * **Open Order**: Order which is currently placed on the exchange, and is not yet complete.
* **Pair**: Tradable pair, usually in the format of Base/Quote (e.g. XRP/USDT). * **Pair**: Tradable pair, usually in the format of Base/Quote (e.g. `XRP/USDT` for spot, `XRP/USDT:USDT` for futures).
* **Timeframe**: Candle length to use (e.g. `"5m"`, `"1h"`, ...). * **Timeframe**: Candle length to use (e.g. `"5m"`, `"1h"`, ...).
* **Indicators**: Technical indicators (SMA, EMA, RSI, ...). * **Indicators**: Technical indicators (SMA, EMA, RSI, ...).
* **Limit order**: Limit orders which execute at the defined limit price or better. * **Limit order**: Limit orders which execute at the defined limit price or better.
@@ -20,6 +20,20 @@ This page provides you some basic concepts on how Freqtrade works and operates.
All profit calculations of Freqtrade include fees. For Backtesting / Hyperopt / Dry-run modes, the exchange default fee is used (lowest tier on the exchange). For live operations, fees are used as applied by the exchange (this includes BNB rebates etc.). All profit calculations of Freqtrade include fees. For Backtesting / Hyperopt / Dry-run modes, the exchange default fee is used (lowest tier on the exchange). For live operations, fees are used as applied by the exchange (this includes BNB rebates etc.).
## Pair naming
Freqtrade follows the [ccxt naming convention](https://docs.ccxt.com/#/README?id=consistency-of-base-and-quote-currencies) for currencies.
Using the wrong naming convention in the wrong market will usually result in the bot not recognizing the pair, usually resulting in errors like "this pair is not available".
### Spot pair naming
For spot pairs, naming will be `base/quote` (e.g. `ETH/USDT`).
### Futures pair naming
For futures pairs, naming will be `base/quote:settle` (e.g. `ETH/USDT:USDT`).
## Bot execution logic ## Bot execution logic
Starting freqtrade in dry-run or live mode (using `freqtrade trade`) will start the bot and start the bot iteration loop. Starting freqtrade in dry-run or live mode (using `freqtrade trade`) will start the bot and start the bot iteration loop.

View File

@@ -3,7 +3,7 @@
This page explains the different parameters of the bot and how to run it. This page explains the different parameters of the bot and how to run it.
!!! Note !!! Note
If you've used `setup.sh`, don't forget to activate your virtual environment (`source .env/bin/activate`) before running freqtrade commands. If you've used `setup.sh`, don't forget to activate your virtual environment (`source .venv/bin/activate`) before running freqtrade commands.
!!! Warning "Up-to-date clock" !!! Warning "Up-to-date clock"
The clock on the system running the bot must be accurate, synchronized to a NTP server frequently enough to avoid problems with communication to the exchanges. The clock on the system running the bot must be accurate, synchronized to a NTP server frequently enough to avoid problems with communication to the exchanges.

View File

@@ -188,7 +188,6 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `max_entry_position_adjustment` | Maximum additional order(s) for each open trade on top of the first entry Order. Set it to `-1` for unlimited additional orders. [More information here](strategy-callbacks.md#adjust-trade-position). <br> [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `-1`.*<br> **Datatype:** Positive Integer or -1 | `max_entry_position_adjustment` | Maximum additional order(s) for each open trade on top of the first entry Order. Set it to `-1` for unlimited additional orders. [More information here](strategy-callbacks.md#adjust-trade-position). <br> [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `-1`.*<br> **Datatype:** Positive Integer or -1
| | **Exchange** | | **Exchange**
| `exchange.name` | **Required.** Name of the exchange class to use. [List below](#user-content-what-values-for-exchangename). <br> **Datatype:** String | `exchange.name` | **Required.** Name of the exchange class to use. [List below](#user-content-what-values-for-exchangename). <br> **Datatype:** String
| `exchange.sandbox` | Use the 'sandbox' version of the exchange, where the exchange provides a sandbox for risk-free integration. See [here](sandbox-testing.md) in more details.<br> **Datatype:** Boolean
| `exchange.key` | API key to use for the exchange. Only required when you are in production mode.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String | `exchange.key` | API key to use for the exchange. Only required when you are in production mode.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
| `exchange.secret` | API secret to use for the exchange. Only required when you are in production mode.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String | `exchange.secret` | API secret to use for the exchange. Only required when you are in production mode.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
| `exchange.password` | API password to use for the exchange. Only required when you are in production mode and for exchanges that use password for API requests.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String | `exchange.password` | API password to use for the exchange. Only required when you are in production mode and for exchanges that use password for API requests.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
@@ -251,8 +250,8 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `db_url` | Declares database URL to use. NOTE: This defaults to `sqlite:///tradesv3.dryrun.sqlite` if `dry_run` is `true`, and to `sqlite:///tradesv3.sqlite` for production instances. <br> **Datatype:** String, SQLAlchemy connect string | `db_url` | Declares database URL to use. NOTE: This defaults to `sqlite:///tradesv3.dryrun.sqlite` if `dry_run` is `true`, and to `sqlite:///tradesv3.sqlite` for production instances. <br> **Datatype:** String, SQLAlchemy connect string
| `logfile` | Specifies logfile name. Uses a rolling strategy for log file rotation for 10 files with the 1MB limit per file. <br> **Datatype:** String | `logfile` | Specifies logfile name. Uses a rolling strategy for log file rotation for 10 files with the 1MB limit per file. <br> **Datatype:** String
| `add_config_files` | Additional config files. These files will be loaded and merged with the current config file. The files are resolved relative to the initial file.<br> *Defaults to `[]`*. <br> **Datatype:** List of strings | `add_config_files` | Additional config files. These files will be loaded and merged with the current config file. The files are resolved relative to the initial file.<br> *Defaults to `[]`*. <br> **Datatype:** List of strings
| `dataformat_ohlcv` | Data format to use to store historical candle (OHLCV) data. <br> *Defaults to `json`*. <br> **Datatype:** String | `dataformat_ohlcv` | Data format to use to store historical candle (OHLCV) data. <br> *Defaults to `feather`*. <br> **Datatype:** String
| `dataformat_trades` | Data format to use to store historical trades data. <br> *Defaults to `jsongz`*. <br> **Datatype:** String | `dataformat_trades` | Data format to use to store historical trades data. <br> *Defaults to `feather`*. <br> **Datatype:** String
| `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage (and decreasing train/inference timing in FreqAI). (Currently only affects FreqAI use-cases) <br> **Datatype:** Boolean. <br> Default: `False`. | `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage (and decreasing train/inference timing in FreqAI). (Currently only affects FreqAI use-cases) <br> **Datatype:** Boolean. <br> Default: `False`.
### Parameters in the strategy ### Parameters in the strategy

View File

@@ -27,7 +27,7 @@ For this to work, first activate your virtual environment and run the following
``` bash ``` bash
# Activate virtual environment # Activate virtual environment
source .env/bin/activate source .venv/bin/activate
pip install ipykernel pip install ipykernel
ipython kernel install --user --name=freqtrade ipython kernel install --user --name=freqtrade

View File

@@ -27,11 +27,11 @@ usage: freqtrade download-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[--exchange EXCHANGE] [--exchange EXCHANGE]
[-t TIMEFRAMES [TIMEFRAMES ...]] [--erase] [-t TIMEFRAMES [TIMEFRAMES ...]] [--erase]
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}] [--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
[--data-format-trades {json,jsongz,hdf5}] [--data-format-trades {json,jsongz,hdf5,feather}]
[--trading-mode {spot,margin,futures}] [--trading-mode {spot,margin,futures}]
[--prepend] [--prepend]
optional arguments: options:
-h, --help show this help message and exit -h, --help show this help message and exit
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...] -p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space- Limit command to these pairs. Pairs are space-
@@ -48,8 +48,7 @@ optional arguments:
--dl-trades Download trades instead of OHLCV data. The bot will --dl-trades Download trades instead of OHLCV data. The bot will
resample trades to the desired timeframe as specified resample trades to the desired timeframe as specified
as --timeframes/-t. as --timeframes/-t.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no --exchange EXCHANGE Exchange name. Only valid if no config is provided.
config is provided.
-t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...] -t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...]
Specify which tickers to download. Space-separated Specify which tickers to download. Space-separated
list. Default: `1m 5m`. list. Default: `1m 5m`.
@@ -57,17 +56,18 @@ optional arguments:
exchange/pairs/timeframes. exchange/pairs/timeframes.
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet} --data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
Storage format for downloaded candle (OHLCV) data. Storage format for downloaded candle (OHLCV) data.
(default: `json`). (default: `feather`).
--data-format-trades {json,jsongz,hdf5} --data-format-trades {json,jsongz,hdf5,feather}
Storage format for downloaded trades data. (default: Storage format for downloaded trades data. (default:
`jsongz`). `feather`).
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures} --trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
Select Trading mode Select Trading mode
--prepend Allow data prepending. (Data-appending is disabled) --prepend Allow data prepending. (Data-appending is disabled)
Common arguments: Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages). -v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--logfile FILE Log to the file specified. Special values are: --logfile FILE, --log-file FILE
Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more 'syslog', 'journald'. See the documentation for more
details. details.
-V, --version show program's version number and exit -V, --version show program's version number and exit
@@ -157,7 +157,7 @@ Freqtrade currently supports the following data-formats:
* `json` - plain "text" json files * `json` - plain "text" json files
* `jsongz` - a gzip-zipped version of json files * `jsongz` - a gzip-zipped version of json files
* `hdf5` - a high performance datastore * `hdf5` - a high performance datastore
* `feather` - a dataformat based on Apache Arrow (OHLCV only) * `feather` - a dataformat based on Apache Arrow
* `parquet` - columnar datastore (OHLCV only) * `parquet` - columnar datastore (OHLCV only)
By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data. By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data.
@@ -255,7 +255,7 @@ usage: freqtrade convert-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[--trading-mode {spot,margin,futures}] [--trading-mode {spot,margin,futures}]
[--candle-types {spot,futures,mark,index,premiumIndex,funding_rate} [{spot,futures,mark,index,premiumIndex,funding_rate} ...]] [--candle-types {spot,futures,mark,index,premiumIndex,funding_rate} [{spot,futures,mark,index,premiumIndex,funding_rate} ...]]
optional arguments: options:
-h, --help show this help message and exit -h, --help show this help message and exit
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...] -p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space- Limit command to these pairs. Pairs are space-
@@ -266,19 +266,20 @@ optional arguments:
Destination format for data conversion. Destination format for data conversion.
--erase Clean all existing data for the selected --erase Clean all existing data for the selected
exchange/pairs/timeframes. exchange/pairs/timeframes.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no --exchange EXCHANGE Exchange name. Only valid if no config is provided.
config is provided.
-t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...] -t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...]
Specify which tickers to download. Space-separated Specify which tickers to download. Space-separated
list. Default: `1m 5m`. list. Default: `1m 5m`.
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures} --trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
Select Trading mode Select Trading mode
--candle-types {spot,futures,mark,index,premiumIndex,funding_rate} [{spot,futures,mark,index,premiumIndex,funding_rate} ...] --candle-types {spot,futures,mark,index,premiumIndex,funding_rate} [{spot,futures,mark,index,premiumIndex,funding_rate} ...]
Select candle type to use Select candle type to convert. Defaults to all
available types.
Common arguments: Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages). -v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--logfile FILE Log to the file specified. Special values are: --logfile FILE, --log-file FILE
Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more 'syslog', 'journald'. See the documentation for more
details. details.
-V, --version show program's version number and exit -V, --version show program's version number and exit
@@ -291,7 +292,6 @@ Common arguments:
Path to directory with historical backtesting data. Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH --userdir PATH, --user-data-dir PATH
Path to userdata directory. Path to userdata directory.
``` ```
### Example converting data ### Example converting data
@@ -314,7 +314,7 @@ usage: freqtrade convert-trade-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
{json,jsongz,hdf5,feather,parquet} {json,jsongz,hdf5,feather,parquet}
[--erase] [--exchange EXCHANGE] [--erase] [--exchange EXCHANGE]
optional arguments: options:
-h, --help show this help message and exit -h, --help show this help message and exit
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...] -p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space- Limit command to these pairs. Pairs are space-
@@ -325,12 +325,12 @@ optional arguments:
Destination format for data conversion. Destination format for data conversion.
--erase Clean all existing data for the selected --erase Clean all existing data for the selected
exchange/pairs/timeframes. exchange/pairs/timeframes.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no --exchange EXCHANGE Exchange name. Only valid if no config is provided.
config is provided.
Common arguments: Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages). -v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--logfile FILE Log to the file specified. Special values are: --logfile FILE, --log-file FILE
Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more 'syslog', 'journald'. See the documentation for more
details. details.
-V, --version show program's version number and exit -V, --version show program's version number and exit
@@ -367,9 +367,9 @@ usage: freqtrade trades-to-ohlcv [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-t TIMEFRAMES [TIMEFRAMES ...]] [-t TIMEFRAMES [TIMEFRAMES ...]]
[--exchange EXCHANGE] [--exchange EXCHANGE]
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}] [--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
[--data-format-trades {json,jsongz,hdf5}] [--data-format-trades {json,jsongz,hdf5,feather}]
optional arguments: options:
-h, --help show this help message and exit -h, --help show this help message and exit
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...] -p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space- Limit command to these pairs. Pairs are space-
@@ -377,18 +377,18 @@ optional arguments:
-t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...] -t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...]
Specify which tickers to download. Space-separated Specify which tickers to download. Space-separated
list. Default: `1m 5m`. list. Default: `1m 5m`.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no --exchange EXCHANGE Exchange name. Only valid if no config is provided.
config is provided.
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet} --data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
Storage format for downloaded candle (OHLCV) data. Storage format for downloaded candle (OHLCV) data.
(default: `json`). (default: `feather`).
--data-format-trades {json,jsongz,hdf5} --data-format-trades {json,jsongz,hdf5,feather}
Storage format for downloaded trades data. (default: Storage format for downloaded trades data. (default:
`jsongz`). `feather`).
Common arguments: Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages). -v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--logfile FILE Log to the file specified. Special values are: --logfile FILE, --log-file FILE
Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more 'syslog', 'journald'. See the documentation for more
details. details.
-V, --version show program's version number and exit -V, --version show program's version number and exit
@@ -422,13 +422,12 @@ usage: freqtrade list-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH]
[--trading-mode {spot,margin,futures}] [--trading-mode {spot,margin,futures}]
[--show-timerange] [--show-timerange]
optional arguments: options:
-h, --help show this help message and exit -h, --help show this help message and exit
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no --exchange EXCHANGE Exchange name. Only valid if no config is provided.
config is provided.
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet} --data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
Storage format for downloaded candle (OHLCV) data. Storage format for downloaded candle (OHLCV) data.
(default: `json`). (default: `feather`).
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...] -p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space- Limit command to these pairs. Pairs are space-
separated. separated.
@@ -439,7 +438,8 @@ optional arguments:
Common arguments: Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages). -v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--logfile FILE Log to the file specified. Special values are: --logfile FILE, --log-file FILE
Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more 'syslog', 'journald'. See the documentation for more
details. details.
-V, --version show program's version number and exit -V, --version show program's version number and exit
@@ -474,7 +474,7 @@ ETH/USDT 5m, 15m, 30m, 1h, 2h, 4h
By default, `download-data` sub-command downloads Candles (OHLCV) data. Some exchanges also provide historic trade-data via their API. By default, `download-data` sub-command downloads Candles (OHLCV) data. Some exchanges also provide historic trade-data via their API.
This data can be useful if you need many different timeframes, since it is only downloaded once, and then resampled locally to the desired timeframes. This data can be useful if you need many different timeframes, since it is only downloaded once, and then resampled locally to the desired timeframes.
Since this data is large by default, the files use gzip by default. They are stored in your data-directory with the naming convention of `<pair>-trades.json.gz` (`ETH_BTC-trades.json.gz`). Incremental mode is also supported, as for historic OHLCV data, so downloading the data once per week with `--days 8` will create an incremental data-repository. Since this data is large by default, the files use the feather fileformat by default. They are stored in your data-directory with the naming convention of `<pair>-trades.feather` (`ETH_BTC-trades.feather`). Incremental mode is also supported, as for historic OHLCV data, so downloading the data once per week with `--days 8` will create an incremental data-repository.
To use this mode, simply add `--dl-trades` to your call. This will swap the download method to download trades, and resamples the data locally. To use this mode, simply add `--dl-trades` to your call. This will swap the download method to download trades, and resamples the data locally.

View File

@@ -77,7 +77,7 @@ def test_method_to_test(caplog):
### Debug configuration ### Debug configuration
To debug freqtrade, we recommend VSCode with the following launch configuration (located in `.vscode/launch.json`). To debug freqtrade, we recommend VSCode (with the Python extension) with the following launch configuration (located in `.vscode/launch.json`).
Details will obviously vary between setups - but this should work to get you started. Details will obviously vary between setups - but this should work to get you started.
``` json ``` json
@@ -102,6 +102,19 @@ This method can also be used to debug a strategy, by setting the breakpoints wit
A similar setup can also be taken for Pycharm - using `freqtrade` as module name, and setting the command line arguments as "parameters". A similar setup can also be taken for Pycharm - using `freqtrade` as module name, and setting the command line arguments as "parameters".
??? Tip "Correct venv usage"
When using a virtual environment (which you should), make sure that your Editor is using the correct virtual environment to avoid problems or "unknown import" errors.
#### Vscode
You can select the correct environment in VSCode with the command "Python: Select Interpreter" - which will show you environments the extension detected.
If your environment has not been detected, you can also pick a path manually.
#### Pycharm
In pycharm, you can select the appropriate Environment in the "Run/Debug Configurations" window.
![Pycharm debug configuration](assets/pycharm_debug.png)
!!! Note "Startup directory" !!! Note "Startup directory"
This assumes that you have the repository checked out, and the editor is started at the repository root level (so setup.py is at the top level of your repository). This assumes that you have the repository checked out, and the editor is started at the repository root level (so setup.py is at the top level of your repository).

View File

@@ -14,6 +14,9 @@ Start by downloading and installing Docker / Docker Desktop for your platform:
Freqtrade documentation assumes the use of Docker desktop (or the docker compose plugin). Freqtrade documentation assumes the use of Docker desktop (or the docker compose plugin).
While the docker-compose standalone installation still works, it will require changing all `docker compose` commands from `docker compose` to `docker-compose` to work (e.g. `docker compose up -d` will become `docker-compose up -d`). While the docker-compose standalone installation still works, it will require changing all `docker compose` commands from `docker compose` to `docker-compose` to work (e.g. `docker compose up -d` will become `docker-compose up -d`).
??? Warning "Docker on windows"
If you just installed docker on a windows system, make sure to reboot your system, otherwise you might encounter unexplainable Problems related to network connectivity to docker containers.
## Freqtrade with docker ## Freqtrade with docker
Freqtrade provides an official Docker image on [Dockerhub](https://hub.docker.com/r/freqtradeorg/freqtrade/), as well as a [docker compose file](https://github.com/freqtrade/freqtrade/blob/stable/docker-compose.yml) ready for usage. Freqtrade provides an official Docker image on [Dockerhub](https://hub.docker.com/r/freqtradeorg/freqtrade/), as well as a [docker compose file](https://github.com/freqtrade/freqtrade/blob/stable/docker-compose.yml) ready for usage.
@@ -78,7 +81,7 @@ If you've selected to enable FreqUI in the `new-config` step, you will have freq
You can now access the UI by typing localhost:8080 in your browser. You can now access the UI by typing localhost:8080 in your browser.
??? Note "UI Access on a remote servers" ??? Note "UI Access on a remote server"
If you're running on a VPS, you should consider using either a ssh tunnel, or setup a VPN (openVPN, wireguard) to connect to your bot. If you're running on a VPS, you should consider using either a ssh tunnel, or setup a VPN (openVPN, wireguard) to connect to your bot.
This will ensure that freqUI is not directly exposed to the internet, which is not recommended for security reasons (freqUI does not support https out of the box). This will ensure that freqUI is not directly exposed to the internet, which is not recommended for security reasons (freqUI does not support https out of the box).
Setup of these tools is not part of this tutorial, however many good tutorials can be found on the internet. Setup of these tools is not part of this tutorial, however many good tutorials can be found on the internet.
@@ -128,7 +131,7 @@ All freqtrade arguments will be available by running `docker compose run --rm fr
!!! Note "`docker compose run --rm`" !!! Note "`docker compose run --rm`"
Including `--rm` will remove the container after completion, and is highly recommended for all modes except trading mode (running with `freqtrade trade` command). Including `--rm` will remove the container after completion, and is highly recommended for all modes except trading mode (running with `freqtrade trade` command).
??? Note "Using docker without docker" ??? Note "Using docker without docker compose"
"`docker compose run --rm`" will require a compose file to be provided. "`docker compose run --rm`" will require a compose file to be provided.
Some freqtrade commands that don't require authentication such as `list-pairs` can be run with "`docker run --rm`" instead. Some freqtrade commands that don't require authentication such as `list-pairs` can be run with "`docker run --rm`" instead.
For example `docker run --rm freqtradeorg/freqtrade:stable list-pairs --exchange binance --quote BTC --print-json`. For example `docker run --rm freqtradeorg/freqtrade:stable list-pairs --exchange binance --quote BTC --print-json`.
@@ -172,7 +175,7 @@ You can then run `docker compose build --pull` to build the docker image, and ru
### Plotting with docker ### Plotting with docker
Commands `freqtrade plot-profit` and `freqtrade plot-dataframe` ([Documentation](plotting.md)) are available by changing the image to `*_plot` in your docker-compose.yml file. Commands `freqtrade plot-profit` and `freqtrade plot-dataframe` ([Documentation](plotting.md)) are available by changing the image to `*_plot` in your `docker-compose.yml` file.
You can then use these commands as follows: You can then use these commands as follows:
``` bash ``` bash
@@ -203,16 +206,20 @@ docker compose -f docker/docker-compose-jupyter.yml build --no-cache
### Docker on Windows ### Docker on Windows
* Error: `"Timestamp for this request is outside of the recvWindow."` * Error: `"Timestamp for this request is outside of the recvWindow."`
* The market api requests require a synchronized clock but the time in the docker container shifts a bit over time into the past. The market api requests require a synchronized clock but the time in the docker container shifts a bit over time into the past.
To fix this issue temporarily you need to run `wsl --shutdown` and restart docker again (a popup on windows 10 will ask you to do so). To fix this issue temporarily you need to run `wsl --shutdown` and restart docker again (a popup on windows 10 will ask you to do so).
A permanent solution is either to host the docker container on a linux host or restart the wsl from time to time with the scheduler. A permanent solution is either to host the docker container on a linux host or restart the wsl from time to time with the scheduler.
``` bash ``` bash
taskkill /IM "Docker Desktop.exe" /F taskkill /IM "Docker Desktop.exe" /F
wsl --shutdown wsl --shutdown
start "" "C:\Program Files\Docker\Docker\Docker Desktop.exe" start "" "C:\Program Files\Docker\Docker\Docker Desktop.exe"
``` ```
* Cannot connect to the API (Windows)
If you're on windows and just installed Docker (desktop), make sure to reboot your System. Docker can have problems with network connectivity without a restart.
You should obviously also make sure to have your [settings](#accessing-the-ui) accordingly.
!!! Warning !!! Warning
Due to the above, we do not recommend the usage of docker on windows for production setups, but only for experimentation, datadownload and backtesting. Due to the above, we do not recommend the usage of docker on windows for production setups, but only for experimentation, datadownload and backtesting.

View File

@@ -20,7 +20,7 @@ Futures trading is supported for selected exchanges. Please refer to the [docume
* When you work with your strategy & hyperopt file you should use a proper code editor like VSCode or PyCharm. A good code editor will provide syntax highlighting as well as line numbers, making it easy to find syntax errors (most likely pointed out by Freqtrade during startup). * When you work with your strategy & hyperopt file you should use a proper code editor like VSCode or PyCharm. A good code editor will provide syntax highlighting as well as line numbers, making it easy to find syntax errors (most likely pointed out by Freqtrade during startup).
## Freqtrade common issues ## Freqtrade common questions
### Can freqtrade open multiple positions on the same pair in parallel? ### Can freqtrade open multiple positions on the same pair in parallel?
@@ -36,7 +36,7 @@ Running the bot with `freqtrade trade --config config.json` shows the output `fr
This could be caused by the following reasons: This could be caused by the following reasons:
* The virtual environment is not active. * The virtual environment is not active.
* Run `source .env/bin/activate` to activate the virtual environment. * Run `source .venv/bin/activate` to activate the virtual environment.
* The installation did not complete successfully. * The installation did not complete successfully.
* Please check the [Installation documentation](installation.md). * Please check the [Installation documentation](installation.md).
@@ -78,6 +78,14 @@ Where possible (e.g. on binance), the use of the exchange's dedicated fee curren
On binance, it's sufficient to have BNB in your account, and have "Pay fees in BNB" enabled in your profile. Your BNB balance will slowly decline (as it's used to pay fees) - but you'll no longer encounter dust (Freqtrade will include the fees in the profit calculations). On binance, it's sufficient to have BNB in your account, and have "Pay fees in BNB" enabled in your profile. Your BNB balance will slowly decline (as it's used to pay fees) - but you'll no longer encounter dust (Freqtrade will include the fees in the profit calculations).
Other exchanges don't offer such possibilities, where it's simply something you'll have to accept or move to a different exchange. Other exchanges don't offer such possibilities, where it's simply something you'll have to accept or move to a different exchange.
### I deposited more funds to the exchange, but my bot doesn't recognize this
Freqtrade will update the exchange balance when necessary (Before placing an order).
RPC calls (Telegram's `/balance`, API calls to `/balance`) can trigger an update at max. once per hour.
If `adjust_trade_position` is enabled (and the bot has open trades eligible for position adjustments) - then the wallets will be refreshed once per hour.
To force an immediate update, you can use `/reload_config` - which will restart the bot.
### I want to use incomplete candles ### I want to use incomplete candles
Freqtrade will not provide incomplete candles to strategies. Using incomplete candles will lead to repainting and consequently to strategies with "ghost" buys, which are impossible to both backtest, and verify after they happened. Freqtrade will not provide incomplete candles to strategies. Using incomplete candles will lead to repainting and consequently to strategies with "ghost" buys, which are impossible to both backtest, and verify after they happened.

View File

@@ -100,12 +100,12 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
#### trainer_kwargs #### trainer_kwargs
| Parameter | Description | | Parameter | Description |
|------------|-------------| |--------------|-------------|
| | **Model training parameters within the `freqai.model_training_parameters.model_kwargs` sub dictionary** | | **Model training parameters within the `freqai.model_training_parameters.model_kwargs` sub dictionary**
| `max_iters` | The number of training iterations to run. iteration here refers to the number of times we call self.optimizer.step(). used to calculate n_epochs. <br> **Datatype:** int. <br> Default: `100`. | `n_epochs` | The `n_epochs` parameter is a crucial setting in the PyTorch training loop that determines the number of times the entire training dataset will be used to update the model's parameters. An epoch represents one full pass through the entire training dataset. Overrides `n_steps`. Either `n_epochs` or `n_steps` must be set. <br><br> **Datatype:** int. optional. <br> Default: `10`.
| `batch_size` | The size of the batches to use during training.. <br> **Datatype:** int. <br> Default: `64`. | `n_steps` | An alternative way of setting `n_epochs` - the number of training iterations to run. Iteration here refer to the number of times we call `optimizer.step()`. Ignored if `n_epochs` is set. A simplified version of the function: <br><br> n_epochs = n_steps / (n_obs / batch_size) <br><br> The motivation here is that `n_steps` is easier to optimize and keep stable across different n_obs - the number of data points. <br> <br> **Datatype:** int. optional. <br> Default: `None`.
| `max_n_eval_batches` | The maximum number batches to use for evaluation.. <br> **Datatype:** int, optional. <br> Default: `None`. | `batch_size` | The size of the batches to use during training. <br><br> **Datatype:** int. <br> Default: `64`.
### Additional parameters ### Additional parameters

View File

@@ -20,7 +20,7 @@ With the current framework, we aim to expose the training environment via the co
We envision the majority of users focusing their effort on creative design of the `calculate_reward()` function [details here](#creating-a-custom-reward-function), while leaving the rest of the environment untouched. Other users may not touch the environment at all, and they will only play with the configuration settings and the powerful feature engineering that already exists in FreqAI. Meanwhile, we enable advanced users to create their own model classes entirely. We envision the majority of users focusing their effort on creative design of the `calculate_reward()` function [details here](#creating-a-custom-reward-function), while leaving the rest of the environment untouched. Other users may not touch the environment at all, and they will only play with the configuration settings and the powerful feature engineering that already exists in FreqAI. Meanwhile, we enable advanced users to create their own model classes entirely.
The framework is built on stable_baselines3 (torch) and OpenAI gym for the base environment class. But generally speaking, the model class is well isolated. Thus, the addition of competing libraries can be easily integrated into the existing framework. For the environment, it is inheriting from `gym.env` which means that it is necessary to write an entirely new environment in order to switch to a different library. The framework is built on stable_baselines3 (torch) and OpenAI gym for the base environment class. But generally speaking, the model class is well isolated. Thus, the addition of competing libraries can be easily integrated into the existing framework. For the environment, it is inheriting from `gym.Env` which means that it is necessary to write an entirely new environment in order to switch to a different library.
### Important considerations ### Important considerations
@@ -173,7 +173,7 @@ class MyCoolRLModel(ReinforcementLearner):
""" """
class MyRLEnv(Base5ActionRLEnv): class MyRLEnv(Base5ActionRLEnv):
""" """
User made custom environment. This class inherits from BaseEnvironment and gym.env. User made custom environment. This class inherits from BaseEnvironment and gym.Env.
Users can override any functions from those parent classes. Here is an example Users can override any functions from those parent classes. Here is an example
of a user customized `calculate_reward()` function. of a user customized `calculate_reward()` function.
@@ -254,7 +254,7 @@ FreqAI also provides a built in episodic summary logger called `self.tensorboard
```python ```python
class MyRLEnv(Base5ActionRLEnv): class MyRLEnv(Base5ActionRLEnv):
""" """
User made custom environment. This class inherits from BaseEnvironment and gym.env. User made custom environment. This class inherits from BaseEnvironment and gym.Env.
Users can override any functions from those parent classes. Here is an example Users can override any functions from those parent classes. Here is an example
of a user customized `calculate_reward()` function. of a user customized `calculate_reward()` function.
""" """

View File

@@ -31,7 +31,7 @@ The docker-image includes hyperopt dependencies, no further action needed.
### Easy installation script (setup.sh) / Manual installation ### Easy installation script (setup.sh) / Manual installation
```bash ```bash
source .env/bin/activate source .venv/bin/activate
pip install -r requirements-hyperopt.txt pip install -r requirements-hyperopt.txt
``` ```
@@ -433,9 +433,14 @@ While this strategy is most likely too simple to provide consistent profit, it s
`range` property may also be used with `DecimalParameter` and `CategoricalParameter`. `RealParameter` does not provide this property due to infinite search space. `range` property may also be used with `DecimalParameter` and `CategoricalParameter`. `RealParameter` does not provide this property due to infinite search space.
??? Hint "Performance tip" ??? Hint "Performance tip"
During normal hyperopting, indicators are calculated once and supplied to each epoch, linearly increasing RAM usage as a factor of increasing cores. As this also has performance implications, hyperopt provides `--analyze-per-epoch` which will move the execution of `populate_indicators()` to the epoch process, calculating a single value per parameter per epoch instead of using the `.range` functionality. In this case, `.range` functionality will only return the actually used value. This will reduce RAM usage, but increase CPU usage. However, your hyperopting run will be less likely to fail due to Out Of Memory (OOM) issues. During normal hyperopting, indicators are calculated once and supplied to each epoch, linearly increasing RAM usage as a factor of increasing cores. As this also has performance implications, there are two alternatives to reduce RAM usage
In either case, you should try to use space ranges as small as possible this will improve CPU/RAM usage in both scenarios. * Move `ema_short` and `ema_long` calculations from `populate_indicators()` to `populate_entry_trend()`. Since `populate_entry_trend()` gonna be calculated every epochs, you don't need to use `.range` functionality.
* hyperopt provides `--analyze-per-epoch` which will move the execution of `populate_indicators()` to the epoch process, calculating a single value per parameter per epoch instead of using the `.range` functionality. In this case, `.range` functionality will only return the actually used value.
These alternatives will reduce RAM usage, but increase CPU usage. However, your hyperopting run will be less likely to fail due to Out Of Memory (OOM) issues.
Whether you are using `.range` functionality or the alternatives above, you should try to use space ranges as small as possible since this will improve CPU/RAM usage.
## Optimizing protections ## Optimizing protections

View File

@@ -143,11 +143,11 @@ If you are on Debian, Ubuntu or MacOS, freqtrade provides the script to install
### Activate your virtual environment ### Activate your virtual environment
Each time you open a new terminal, you must run `source .env/bin/activate` to activate your virtual environment. Each time you open a new terminal, you must run `source .venv/bin/activate` to activate your virtual environment.
```bash ```bash
# then activate your .env # activate virtual environment
source ./.env/bin/activate source ./.venv/bin/activate
``` ```
### Congratulations ### Congratulations
@@ -172,7 +172,7 @@ With this option, the script will install the bot and most dependencies:
You will need to have git and python3.8+ installed beforehand for this to work. You will need to have git and python3.8+ installed beforehand for this to work.
* Mandatory software as: `ta-lib` * Mandatory software as: `ta-lib`
* Setup your virtualenv under `.env/` * Setup your virtualenv under `.venv/`
This option is a combination of installation tasks and `--reset` This option is a combination of installation tasks and `--reset`
@@ -225,11 +225,11 @@ rm -rf ./ta-lib*
You will run freqtrade in separated `virtual environment` You will run freqtrade in separated `virtual environment`
```bash ```bash
# create virtualenv in directory /freqtrade/.env # create virtualenv in directory /freqtrade/.venv
python3 -m venv .env python3 -m venv .venv
# run virtualenv # run virtualenv
source .env/bin/activate source .venv/bin/activate
``` ```
#### Install python dependencies #### Install python dependencies
@@ -286,7 +286,7 @@ cd freqtrade
#### Freqtrade install: Conda Environment #### Freqtrade install: Conda Environment
```bash ```bash
conda create --name freqtrade python=3.10 conda create --name freqtrade python=3.11
``` ```
!!! Note "Creating Conda Environment" !!! Note "Creating Conda Environment"
@@ -383,7 +383,7 @@ You've made it this far, so you have successfully installed freqtrade.
freqtrade create-userdir --userdir user_data freqtrade create-userdir --userdir user_data
# Step 2 - Create a new configuration file # Step 2 - Create a new configuration file
freqtrade new-config --config config.json freqtrade new-config --config user_data/config.json
``` ```
You are ready to run, read [Bot Configuration](configuration.md), remember to start with `dry_run: True` and verify that everything is working. You are ready to run, read [Bot Configuration](configuration.md), remember to start with `dry_run: True` and verify that everything is working.
@@ -393,7 +393,7 @@ To learn how to setup your configuration, please refer to the [Bot Configuration
### Start the Bot ### Start the Bot
```bash ```bash
freqtrade trade --config config.json --strategy SampleStrategy freqtrade trade --config user_data/config.json --strategy SampleStrategy
``` ```
!!! Warning !!! Warning
@@ -411,8 +411,8 @@ If you used (1)`Script` or (2)`Manual` installation, you need to run the bot in
# if: # if:
bash: freqtrade: command not found bash: freqtrade: command not found
# then activate your .env # then activate your virtual environment
source ./.env/bin/activate source ./.venv/bin/activate
``` ```
### MacOS installation error ### MacOS installation error

View File

@@ -64,7 +64,7 @@ You will also have to pick a "margin mode" (explanation below) - with freqtrade
##### Pair namings ##### Pair namings
Freqtrade follows the [ccxt naming conventions for futures](https://docs.ccxt.com/en/latest/manual.html?#perpetual-swap-perpetual-future). Freqtrade follows the [ccxt naming conventions for futures](https://docs.ccxt.com/#/README?id=perpetual-swap-perpetual-future).
A futures pair will therefore have the naming of `base/quote:settle` (e.g. `ETH/USDT:USDT`). A futures pair will therefore have the naming of `base/quote:settle` (e.g. `ETH/USDT:USDT`).
### Margin mode ### Margin mode

View File

@@ -21,7 +21,10 @@ It also supports the lookahead-analysis of freqai strategies.
- `--cache` is forced to "none". - `--cache` is forced to "none".
- `--max-open-trades` is forced to be at least equal to the number of pairs. - `--max-open-trades` is forced to be at least equal to the number of pairs.
- `--dry-run-wallet` is forced to be basically infinite. - `--dry-run-wallet` is forced to be basically infinite (1 billion).
- `--stake-amount` is forced to be a static 10000 (10k).
Those are set to avoid users accidentally generating false positives.
## Lookahead-analysis command reference ## Lookahead-analysis command reference

View File

@@ -1,6 +1,6 @@
markdown==3.3.7 markdown==3.4.4
mkdocs==1.4.3 mkdocs==1.5.2
mkdocs-material==9.1.19 mkdocs-material==9.2.1
mdx_truly_sane_lists==1.3 mdx_truly_sane_lists==1.3
pymdown-extensions==10.1 pymdown-extensions==10.1
jinja2==3.1.2 jinja2==3.1.2

View File

@@ -1,121 +0,0 @@
# Sandbox API testing
Some exchanges provide sandboxes or testbeds for risk-free testing, while running the bot against a real exchange.
With some configuration, freqtrade (in combination with ccxt) provides access to these.
This document is an overview to configure Freqtrade to be used with sandboxes.
This can be useful to developers and trader alike.
!!! Warning
Sandboxes usually have very low volume, and either a very wide spread, or no orders available at all.
Therefore, sandboxes will usually not do a good job of showing you how a strategy would work in real trading.
## Exchanges known to have a sandbox / testnet
* [binance](https://testnet.binance.vision/)
* [coinbasepro](https://public.sandbox.pro.coinbase.com)
* [gemini](https://exchange.sandbox.gemini.com/)
* [huobipro](https://www.testnet.huobi.pro/)
* [kucoin](https://sandbox.kucoin.com/)
* [phemex](https://testnet.phemex.com/)
!!! Note
We did not test correct functioning of all of the above testnets. Please report your experiences with each sandbox.
---
## Configure a Sandbox account
When testing your API connectivity, make sure to use the appropriate sandbox / testnet URL.
In general, you should follow these steps to enable an exchange's sandbox:
* Figure out if an exchange has a sandbox (most likely by using google or the exchange's support documents)
* Create a sandbox account (often the sandbox-account requires separate registration)
* [Add some test assets to account](#add-test-funds)
* Create API keys
### Add test funds
Usually, sandbox exchanges allow depositing funds directly via web-interface.
You should make sure to have a realistic amount of funds available to your test-account, so results are representable of your real account funds.
!!! Warning
Test exchanges will **NEVER** require your real credit card or banking details!
## Configure freqtrade to use a exchange's sandbox
### Sandbox URLs
Freqtrade makes use of CCXT which in turn provides a list of URLs to Freqtrade.
These include `['test']` and `['api']`.
* `[Test]` if available will point to an Exchanges sandbox.
* `[Api]` normally used, and resolves to live API target on the exchange.
To make use of sandbox / test add "sandbox": true, to your config.json
```json
"exchange": {
"name": "coinbasepro",
"sandbox": true,
"key": "5wowfxemogxeowo;heiohgmd",
"secret": "/ZMH1P62rCVmwefewrgcewX8nh4gob+lywxfwfxwwfxwfNsH1ySgvWCUR/w==",
"password": "1bkjfkhfhfu6sr",
"outdated_offset": 5
"pair_whitelist": [
"BTC/USD"
]
},
"datadir": "user_data/data/coinbasepro_sandbox"
```
Also the following information:
* api-key (created for the sandbox webpage)
* api-secret (noted earlier)
* password (the passphrase - noted earlier)
!!! Tip "Different data directory"
We also recommend to set `datadir` to something identifying downloaded data as sandbox data, to avoid having sandbox data mixed with data from the real exchange.
This can be done by adding the `"datadir"` key to the configuration.
Now, whenever you use this configuration, your data directory will be set to this directory.
---
## You should now be ready to test your sandbox
Ensure Freqtrade logs show the sandbox URL, and trades made are shown in sandbox. Also make sure to select a pair which shows at least some decent value (which very often is BTC/<somestablecoin>).
## Common problems with sandbox exchanges
Sandbox exchange instances often have very low volume, which can cause some problems which usually are not seen on a real exchange instance.
### Old Candles problem
Since Sandboxes often have low volume, candles can be quite old and show no volume.
To disable the error "Outdated history for pair ...", best increase the parameter `"outdated_offset"` to a number that seems realistic for the sandbox you're using.
### Unfilled orders
Sandboxes often have very low volumes - which means that many trades can go unfilled, or can go unfilled for a very long time.
To mitigate this, you can try to match the first order on the opposite orderbook side using the following configuration:
``` jsonc
"order_types": {
"entry": "limit",
"exit": "limit"
// ...
},
"entry_pricing": {
"price_side": "other",
// ...
},
"exit_pricing":{
"price_side": "other",
// ...
},
```
The configuration is similar to the suggested configuration for market orders - however by using limit-orders you can avoid moving the price too much, and you can set the worst price you might get.

View File

@@ -967,7 +967,7 @@ Print trades with id 2 and 3 as json
freqtrade show-trades --db-url sqlite:///tradesv3.sqlite --trade-ids 2 3 --print-json freqtrade show-trades --db-url sqlite:///tradesv3.sqlite --trade-ids 2 3 --print-json
``` ```
### Strategy-Updater ## Strategy-Updater
Updates listed strategies or all strategies within the strategies folder to be v3 compliant. Updates listed strategies or all strategies within the strategies folder to be v3 compliant.
If the command runs without --strategy-list then all strategies inside the strategies folder will be converted. If the command runs without --strategy-list then all strategies inside the strategies folder will be converted.

View File

@@ -31,8 +31,8 @@ Other versions must be downloaded from the above link.
``` powershell ``` powershell
cd \path\freqtrade cd \path\freqtrade
python -m venv .env python -m venv .venv
.env\Scripts\activate.ps1 .venv\Scripts\activate.ps1
# optionally install ta-lib from wheel # optionally install ta-lib from wheel
# Eventually adjust the below filename to match the downloaded wheel # Eventually adjust the below filename to match the downloaded wheel
pip install --find-links build_helpers\ TA-Lib -U pip install --find-links build_helpers\ TA-Lib -U

View File

@@ -1,5 +1,5 @@
""" Freqtrade bot """ """ Freqtrade bot """
__version__ = '2023.7' __version__ = '2023.8'
if 'dev' in __version__: if 'dev' in __version__:
from pathlib import Path from pathlib import Path

View File

@@ -10,7 +10,7 @@ from freqtrade.configuration.directory_operations import chown_user_directory
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT from freqtrade.constants import UNLIMITED_STAKE_AMOUNT
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
from freqtrade.exchange import MAP_EXCHANGE_CHILDCLASS, available_exchanges from freqtrade.exchange import MAP_EXCHANGE_CHILDCLASS, available_exchanges
from freqtrade.misc import render_template from freqtrade.util import render_template
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -105,7 +105,7 @@ def ask_user_config() -> Dict[str, Any]:
"type": "select", "type": "select",
"name": "exchange_name", "name": "exchange_name",
"message": "Select exchange", "message": "Select exchange",
"choices": lambda x: [ "choices": [
"binance", "binance",
"binanceus", "binanceus",
"bittrex", "bittrex",

View File

@@ -435,13 +435,13 @@ AVAILABLE_CLI_OPTIONS = {
), ),
"dataformat_ohlcv": Arg( "dataformat_ohlcv": Arg(
'--data-format-ohlcv', '--data-format-ohlcv',
help='Storage format for downloaded candle (OHLCV) data. (default: `json`).', help='Storage format for downloaded candle (OHLCV) data. (default: `feather`).',
choices=constants.AVAILABLE_DATAHANDLERS, choices=constants.AVAILABLE_DATAHANDLERS,
), ),
"dataformat_trades": Arg( "dataformat_trades": Arg(
'--data-format-trades', '--data-format-trades',
help='Storage format for downloaded trades data. (default: `jsongz`).', help='Storage format for downloaded trades data. (default: `feather`).',
choices=constants.AVAILABLE_DATAHANDLERS_TRADES, choices=constants.AVAILABLE_DATAHANDLERS,
), ),
"show_timerange": Arg( "show_timerange": Arg(
'--show-timerange', '--show-timerange',

View File

@@ -10,7 +10,7 @@ from freqtrade.configuration.directory_operations import copy_sample_files, crea
from freqtrade.constants import USERPATH_STRATEGIES from freqtrade.constants import USERPATH_STRATEGIES
from freqtrade.enums import RunMode from freqtrade.enums import RunMode
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
from freqtrade.misc import render_template, render_template_with_fallback from freqtrade.util import render_template, render_template_with_fallback
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -35,6 +35,10 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
Deploy new strategy from template to strategy_path Deploy new strategy from template to strategy_path
""" """
fallback = 'full' fallback = 'full'
attributes = render_template_with_fallback(
templatefile=f"strategy_subtemplates/strategy_attributes_{subtemplate}.j2",
templatefallbackfile=f"strategy_subtemplates/strategy_attributes_{fallback}.j2",
)
indicators = render_template_with_fallback( indicators = render_template_with_fallback(
templatefile=f"strategy_subtemplates/indicators_{subtemplate}.j2", templatefile=f"strategy_subtemplates/indicators_{subtemplate}.j2",
templatefallbackfile=f"strategy_subtemplates/indicators_{fallback}.j2", templatefallbackfile=f"strategy_subtemplates/indicators_{fallback}.j2",
@@ -58,6 +62,7 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
strategy_text = render_template(templatefile='base_strategy.py.j2', strategy_text = render_template(templatefile='base_strategy.py.j2',
arguments={"strategy": strategy_name, arguments={"strategy": strategy_name,
"attributes": attributes,
"indicators": indicators, "indicators": indicators,
"buy_trend": buy_trend, "buy_trend": buy_trend,
"sell_trend": sell_trend, "sell_trend": sell_trend,

View File

@@ -7,9 +7,10 @@ def start_webserver(args: Dict[str, Any]) -> None:
""" """
Main entry point for webserver mode Main entry point for webserver mode
""" """
from freqtrade.configuration import Configuration from freqtrade.configuration import setup_utils_configuration
from freqtrade.rpc.api_server import ApiServer from freqtrade.rpc.api_server import ApiServer
# Initialize configuration # Initialize configuration
config = Configuration(args, RunMode.WEBSERVER).get_config()
config = setup_utils_configuration(args, RunMode.WEBSERVER)
ApiServer(config, standalone=True) ApiServer(config, standalone=True)

View File

@@ -3,4 +3,5 @@
from freqtrade.configuration.config_setup import setup_utils_configuration from freqtrade.configuration.config_setup import setup_utils_configuration
from freqtrade.configuration.config_validation import validate_config_consistency from freqtrade.configuration.config_validation import validate_config_consistency
from freqtrade.configuration.configuration import Configuration from freqtrade.configuration.configuration import Configuration
from freqtrade.configuration.detect_environment import running_in_docker
from freqtrade.configuration.timerange import TimeRange from freqtrade.configuration.timerange import TimeRange

View File

@@ -51,6 +51,8 @@ def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> D
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED
else: else:
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED_FINAL conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED_FINAL
elif conf.get('runmode', RunMode.OTHER) == RunMode.WEBSERVER:
conf_schema['required'] = constants.SCHEMA_MINIMAL_WEBSERVER
else: else:
conf_schema['required'] = constants.SCHEMA_MINIMAL_REQUIRED conf_schema['required'] = constants.SCHEMA_MINIMAL_REQUIRED
try: try:

View File

@@ -41,7 +41,7 @@ def flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str,
key = env_var.replace(prefix, '') key = env_var.replace(prefix, '')
for k in reversed(key.split('__')): for k in reversed(key.split('__')):
val = {k.lower(): get_var_typed(val) val = {k.lower(): get_var_typed(val)
if type(val) != dict and k not in no_convert else val} if not isinstance(val, dict) and k not in no_convert else val}
relevant_vars = deep_merge_dicts(val, relevant_vars) relevant_vars = deep_merge_dicts(val, relevant_vars)
return relevant_vars return relevant_vars

View File

@@ -38,8 +38,7 @@ AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList', 'ProducerPairList', '
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter'] 'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
AVAILABLE_PROTECTIONS = ['CooldownPeriod', AVAILABLE_PROTECTIONS = ['CooldownPeriod',
'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard'] 'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
AVAILABLE_DATAHANDLERS_TRADES = ['json', 'jsongz', 'hdf5', 'feather'] AVAILABLE_DATAHANDLERS = ['json', 'jsongz', 'hdf5', 'feather', 'parquet']
AVAILABLE_DATAHANDLERS = AVAILABLE_DATAHANDLERS_TRADES + ['parquet']
BACKTEST_BREAKDOWNS = ['day', 'week', 'month'] BACKTEST_BREAKDOWNS = ['day', 'week', 'month']
BACKTEST_CACHE_AGE = ['none', 'day', 'week', 'month'] BACKTEST_CACHE_AGE = ['none', 'day', 'week', 'month']
BACKTEST_CACHE_DEFAULT = 'day' BACKTEST_CACHE_DEFAULT = 'day'
@@ -50,6 +49,15 @@ DEFAULT_DATAFRAME_COLUMNS = ['date', 'open', 'high', 'low', 'close', 'volume']
# Don't modify sequence of DEFAULT_TRADES_COLUMNS # Don't modify sequence of DEFAULT_TRADES_COLUMNS
# it has wide consequences for stored trades files # it has wide consequences for stored trades files
DEFAULT_TRADES_COLUMNS = ['timestamp', 'id', 'type', 'side', 'price', 'amount', 'cost'] DEFAULT_TRADES_COLUMNS = ['timestamp', 'id', 'type', 'side', 'price', 'amount', 'cost']
TRADES_DTYPES = {
'timestamp': 'int64',
'id': 'str',
'type': 'str',
'side': 'str',
'price': 'float64',
'amount': 'float64',
'cost': 'float64',
}
TRADING_MODES = ['spot', 'margin', 'futures'] TRADING_MODES = ['spot', 'margin', 'futures']
MARGIN_MODES = ['cross', 'isolated', ''] MARGIN_MODES = ['cross', 'isolated', '']
@@ -153,7 +161,7 @@ CONF_SCHEMA = {
}, },
}, },
'amount_reserve_percent': {'type': 'number', 'minimum': 0.0, 'maximum': 0.5}, 'amount_reserve_percent': {'type': 'number', 'minimum': 0.0, 'maximum': 0.5},
'stoploss': {'type': 'number', 'maximum': 0, 'exclusiveMaximum': True, 'minimum': -1}, 'stoploss': {'type': 'number', 'maximum': 0, 'exclusiveMaximum': True},
'trailing_stop': {'type': 'boolean'}, 'trailing_stop': {'type': 'boolean'},
'trailing_stop_positive': {'type': 'number', 'minimum': 0, 'maximum': 1}, 'trailing_stop_positive': {'type': 'number', 'minimum': 0, 'maximum': 1},
'trailing_stop_positive_offset': {'type': 'number', 'minimum': 0, 'maximum': 1}, 'trailing_stop_positive_offset': {'type': 'number', 'minimum': 0, 'maximum': 1},
@@ -446,12 +454,12 @@ CONF_SCHEMA = {
'dataformat_ohlcv': { 'dataformat_ohlcv': {
'type': 'string', 'type': 'string',
'enum': AVAILABLE_DATAHANDLERS, 'enum': AVAILABLE_DATAHANDLERS,
'default': 'json' 'default': 'feather'
}, },
'dataformat_trades': { 'dataformat_trades': {
'type': 'string', 'type': 'string',
'enum': AVAILABLE_DATAHANDLERS_TRADES, 'enum': AVAILABLE_DATAHANDLERS,
'default': 'jsongz' 'default': 'feather'
}, },
'position_adjustment_enable': {'type': 'boolean'}, 'position_adjustment_enable': {'type': 'boolean'},
'max_entry_position_adjustment': {'type': ['integer', 'number'], 'minimum': -1}, 'max_entry_position_adjustment': {'type': ['integer', 'number'], 'minimum': -1},
@@ -461,7 +469,6 @@ CONF_SCHEMA = {
'type': 'object', 'type': 'object',
'properties': { 'properties': {
'name': {'type': 'string'}, 'name': {'type': 'string'},
'sandbox': {'type': 'boolean', 'default': False},
'key': {'type': 'string', 'default': ''}, 'key': {'type': 'string', 'default': ''},
'secret': {'type': 'string', 'default': ''}, 'secret': {'type': 'string', 'default': ''},
'password': {'type': 'string', 'default': ''}, 'password': {'type': 'string', 'default': ''},
@@ -668,6 +675,9 @@ SCHEMA_MINIMAL_REQUIRED = [
'dataformat_ohlcv', 'dataformat_ohlcv',
'dataformat_trades', 'dataformat_trades',
] ]
SCHEMA_MINIMAL_WEBSERVER = SCHEMA_MINIMAL_REQUIRED + [
'api_server',
]
CANCEL_REASON = { CANCEL_REASON = {
"TIMEOUT": "cancelled due to timeout", "TIMEOUT": "cancelled due to timeout",

View File

@@ -5,16 +5,17 @@ import logging
from copy import copy from copy import copy
from datetime import datetime, timezone from datetime import datetime, timezone
from pathlib import Path from pathlib import Path
from typing import Any, Dict, List, Optional, Union from typing import Any, Dict, List, Literal, Optional, Union
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from freqtrade.constants import LAST_BT_RESULT_FN, IntOrInf from freqtrade.constants import LAST_BT_RESULT_FN, IntOrInf
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
from freqtrade.misc import json_load from freqtrade.misc import file_dump_json, json_load
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
from freqtrade.persistence import LocalTrade, Trade, init_db from freqtrade.persistence import LocalTrade, Trade, init_db
from freqtrade.types import BacktestHistoryEntryType, BacktestResultType
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -128,7 +129,7 @@ def load_backtest_metadata(filename: Union[Path, str]) -> Dict[str, Any]:
raise OperationalException('Unexpected error while loading backtest metadata.') from e raise OperationalException('Unexpected error while loading backtest metadata.') from e
def load_backtest_stats(filename: Union[Path, str]) -> Dict[str, Any]: def load_backtest_stats(filename: Union[Path, str]) -> BacktestResultType:
""" """
Load backtest statistics file. Load backtest statistics file.
:param filename: pathlib.Path object, or string pointing to the file. :param filename: pathlib.Path object, or string pointing to the file.
@@ -147,21 +148,21 @@ def load_backtest_stats(filename: Union[Path, str]) -> Dict[str, Any]:
# Legacy list format does not contain metadata. # Legacy list format does not contain metadata.
if isinstance(data, dict): if isinstance(data, dict):
data['metadata'] = load_backtest_metadata(filename) data['metadata'] = load_backtest_metadata(filename)
return data return data
def load_and_merge_backtest_result(strategy_name: str, filename: Path, results: Dict[str, Any]): def load_and_merge_backtest_result(strategy_name: str, filename: Path, results: Dict[str, Any]):
""" """
Load one strategy from multi-strategy result Load one strategy from multi-strategy result and merge it with results
and merge it with results
:param strategy_name: Name of the strategy contained in the result :param strategy_name: Name of the strategy contained in the result
:param filename: Backtest-result-filename to load :param filename: Backtest-result-filename to load
:param results: dict to merge the result to. :param results: dict to merge the result to.
""" """
bt_data = load_backtest_stats(filename) bt_data = load_backtest_stats(filename)
for k in ('metadata', 'strategy'): k: Literal['metadata', 'strategy']
for k in ('metadata', 'strategy'): # type: ignore
results[k][strategy_name] = bt_data[k][strategy_name] results[k][strategy_name] = bt_data[k][strategy_name]
results['metadata'][strategy_name]['filename'] = filename.stem
comparison = bt_data['strategy_comparison'] comparison = bt_data['strategy_comparison']
for i in range(len(comparison)): for i in range(len(comparison)):
if comparison[i]['key'] == strategy_name: if comparison[i]['key'] == strategy_name:
@@ -174,24 +175,37 @@ def _get_backtest_files(dirname: Path) -> List[Path]:
return list(reversed(sorted(dirname.glob('backtest-result-*-[0-9][0-9].json')))) return list(reversed(sorted(dirname.glob('backtest-result-*-[0-9][0-9].json'))))
def get_backtest_resultlist(dirname: Path): def get_backtest_result(filename: Path) -> List[BacktestHistoryEntryType]:
"""
Get backtest result read from metadata file
"""
return [
{
'filename': filename.stem,
'strategy': s,
'notes': v.get('notes', ''),
'run_id': v['run_id'],
'backtest_start_time': v['backtest_start_time'],
} for s, v in load_backtest_metadata(filename).items()
]
def get_backtest_resultlist(dirname: Path) -> List[BacktestHistoryEntryType]:
""" """
Get list of backtest results read from metadata files Get list of backtest results read from metadata files
""" """
results = [] return [
for filename in _get_backtest_files(dirname): {
metadata = load_backtest_metadata(filename) 'filename': filename.stem,
if not metadata: 'strategy': s,
continue 'run_id': v['run_id'],
for s, v in metadata.items(): 'notes': v.get('notes', ''),
results.append({ 'backtest_start_time': v['backtest_start_time'],
'filename': filename.stem, }
'strategy': s, for filename in _get_backtest_files(dirname)
'run_id': v['run_id'], for s, v in load_backtest_metadata(filename).items()
'backtest_start_time': v['backtest_start_time'], if v
]
})
return results
def delete_backtest_result(file_abs: Path): def delete_backtest_result(file_abs: Path):
@@ -205,6 +219,21 @@ def delete_backtest_result(file_abs: Path):
file_abs_meta.unlink() file_abs_meta.unlink()
def update_backtest_metadata(filename: Path, strategy: str, content: Dict[str, Any]):
"""
Updates backtest metadata file with new content.
:raises: ValueError if metadata file does not exist, or strategy is not in this file.
"""
metadata = load_backtest_metadata(filename)
if not metadata:
raise ValueError("File does not exist.")
if strategy not in metadata:
raise ValueError("Strategy not in metadata.")
metadata[strategy].update(content)
# Write data again.
file_dump_json(get_backtest_metadata_filename(filename), metadata)
def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, str], def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, str],
min_backtest_date: Optional[datetime] = None) -> Dict[str, Any]: min_backtest_date: Optional[datetime] = None) -> Dict[str, Any]:
""" """

View File

@@ -1,16 +1,15 @@
""" """
Functions to convert data from one format to another Functions to convert data from one format to another
""" """
import itertools
import logging import logging
from operator import itemgetter
from typing import Dict, List from typing import Dict, List
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from pandas import DataFrame, to_datetime from pandas import DataFrame, to_datetime
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, Config, TradeList from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TRADES_DTYPES,
Config, TradeList)
from freqtrade.enums import CandleType, TradingMode from freqtrade.enums import CandleType, TradingMode
@@ -195,15 +194,14 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
return frame return frame
def trades_remove_duplicates(trades: List[List]) -> List[List]: def trades_df_remove_duplicates(trades: pd.DataFrame) -> pd.DataFrame:
""" """
Removes duplicates from the trades list. Removes duplicates from the trades DataFrame.
Uses itertools.groupby to avoid converting to pandas. Uses pandas.DataFrame.drop_duplicates to remove duplicates based on the 'timestamp' column.
Tests show it as being pretty efficient on lists of 4M Lists. :param trades: DataFrame with the columns constants.DEFAULT_TRADES_COLUMNS
:param trades: List of Lists with constants.DEFAULT_TRADES_COLUMNS as columns :return: DataFrame with duplicates removed based on the 'timestamp' column
:return: same format as above, but with duplicates removed
""" """
return [i for i, _ in itertools.groupby(sorted(trades, key=itemgetter(0)))] return trades.drop_duplicates(subset=['timestamp', 'id'])
def trades_dict_to_list(trades: List[Dict]) -> TradeList: def trades_dict_to_list(trades: List[Dict]) -> TradeList:
@@ -215,7 +213,32 @@ def trades_dict_to_list(trades: List[Dict]) -> TradeList:
return [[t[col] for col in DEFAULT_TRADES_COLUMNS] for t in trades] return [[t[col] for col in DEFAULT_TRADES_COLUMNS] for t in trades]
def trades_to_ohlcv(trades: TradeList, timeframe: str) -> DataFrame: def trades_convert_types(trades: DataFrame) -> DataFrame:
"""
Convert Trades dtypes and add 'date' column
"""
trades = trades.astype(TRADES_DTYPES)
trades['date'] = to_datetime(trades['timestamp'], unit='ms', utc=True)
return trades
def trades_list_to_df(trades: TradeList, convert: bool = True):
"""
convert trades list to dataframe
:param trades: List of Lists with constants.DEFAULT_TRADES_COLUMNS as columns
"""
if not trades:
df = DataFrame(columns=DEFAULT_TRADES_COLUMNS)
else:
df = DataFrame(trades, columns=DEFAULT_TRADES_COLUMNS)
if convert:
df = trades_convert_types(df)
return df
def trades_to_ohlcv(trades: DataFrame, timeframe: str) -> DataFrame:
""" """
Converts trades list to OHLCV list Converts trades list to OHLCV list
:param trades: List of trades, as returned by ccxt.fetch_trades. :param trades: List of trades, as returned by ccxt.fetch_trades.
@@ -225,12 +248,9 @@ def trades_to_ohlcv(trades: TradeList, timeframe: str) -> DataFrame:
""" """
from freqtrade.exchange import timeframe_to_minutes from freqtrade.exchange import timeframe_to_minutes
timeframe_minutes = timeframe_to_minutes(timeframe) timeframe_minutes = timeframe_to_minutes(timeframe)
if not trades: if trades.empty:
raise ValueError('Trade-list empty.') raise ValueError('Trade-list empty.')
df = pd.DataFrame(trades, columns=DEFAULT_TRADES_COLUMNS) df = trades.set_index('date', drop=True)
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms',
utc=True,)
df = df.set_index('timestamp')
df_new = df['price'].resample(f'{timeframe_minutes}min').ohlc() df_new = df['price'].resample(f'{timeframe_minutes}min').ohlc()
df_new['volume'] = df['amount'].resample(f'{timeframe_minutes}min').sum() df_new['volume'] = df['amount'].resample(f'{timeframe_minutes}min').sum()

View File

@@ -17,7 +17,7 @@ from freqtrade.constants import (FULL_DATAFRAME_THRESHOLD, Config, ListPairsWith
from freqtrade.data.history import load_pair_history from freqtrade.data.history import load_pair_history
from freqtrade.enums import CandleType, RPCMessageType, RunMode from freqtrade.enums import CandleType, RPCMessageType, RunMode
from freqtrade.exceptions import ExchangeError, OperationalException from freqtrade.exceptions import ExchangeError, OperationalException
from freqtrade.exchange import Exchange, timeframe_to_seconds from freqtrade.exchange import Exchange, timeframe_to_prev_date, timeframe_to_seconds
from freqtrade.exchange.types import OrderBook from freqtrade.exchange.types import OrderBook
from freqtrade.misc import append_candles_to_dataframe from freqtrade.misc import append_candles_to_dataframe
from freqtrade.rpc import RPCManager from freqtrade.rpc import RPCManager
@@ -46,6 +46,8 @@ class DataProvider:
self.__rpc = rpc self.__rpc = rpc
self.__cached_pairs: Dict[PairWithTimeframe, Tuple[DataFrame, datetime]] = {} self.__cached_pairs: Dict[PairWithTimeframe, Tuple[DataFrame, datetime]] = {}
self.__slice_index: Optional[int] = None self.__slice_index: Optional[int] = None
self.__slice_date: Optional[datetime] = None
self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {} self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {}
self.__producer_pairs_df: Dict[str, self.__producer_pairs_df: Dict[str,
Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]] = {} Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]] = {}
@@ -64,10 +66,19 @@ class DataProvider:
def _set_dataframe_max_index(self, limit_index: int): def _set_dataframe_max_index(self, limit_index: int):
""" """
Limit analyzed dataframe to max specified index. Limit analyzed dataframe to max specified index.
Only relevant in backtesting.
:param limit_index: dataframe index. :param limit_index: dataframe index.
""" """
self.__slice_index = limit_index self.__slice_index = limit_index
def _set_dataframe_max_date(self, limit_date: datetime):
"""
Limit infomrative dataframe to max specified index.
Only relevant in backtesting.
:param limit_date: "current date"
"""
self.__slice_date = limit_date
def _set_cached_df( def _set_cached_df(
self, self,
pair: str, pair: str,
@@ -284,7 +295,7 @@ class DataProvider:
def historic_ohlcv( def historic_ohlcv(
self, self,
pair: str, pair: str,
timeframe: Optional[str] = None, timeframe: str,
candle_type: str = '' candle_type: str = ''
) -> DataFrame: ) -> DataFrame:
""" """
@@ -307,10 +318,10 @@ class DataProvider:
timerange.subtract_start(tf_seconds * startup_candles) timerange.subtract_start(tf_seconds * startup_candles)
self.__cached_pairs_backtesting[saved_pair] = load_pair_history( self.__cached_pairs_backtesting[saved_pair] = load_pair_history(
pair=pair, pair=pair,
timeframe=timeframe or self._config['timeframe'], timeframe=timeframe,
datadir=self._config['datadir'], datadir=self._config['datadir'],
timerange=timerange, timerange=timerange,
data_format=self._config.get('dataformat_ohlcv', 'json'), data_format=self._config['dataformat_ohlcv'],
candle_type=_candle_type, candle_type=_candle_type,
) )
@@ -354,7 +365,13 @@ class DataProvider:
data = self.ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type) data = self.ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
else: else:
# Get historical OHLCV data (cached on disk). # Get historical OHLCV data (cached on disk).
timeframe = timeframe or self._config['timeframe']
data = self.historic_ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type) data = self.historic_ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
# Cut date to timeframe-specific date.
# This is necessary to prevent lookahead bias in callbacks through informative pairs.
if self.__slice_date:
cutoff_date = timeframe_to_prev_date(timeframe, self.__slice_date)
data = data.loc[data['date'] < cutoff_date]
if len(data) == 0: if len(data) == 0:
logger.warning(f"No data found for ({pair}, {timeframe}, {candle_type}).") logger.warning(f"No data found for ({pair}, {timeframe}, {candle_type}).")
return data return data

View File

@@ -4,7 +4,7 @@ from typing import Optional
from pandas import DataFrame, read_feather, to_datetime from pandas import DataFrame, read_feather, to_datetime
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS
from freqtrade.enums import CandleType from freqtrade.enums import CandleType
from .idatahandler import IDataHandler from .idatahandler import IDataHandler
@@ -82,43 +82,41 @@ class FeatherDataHandler(IDataHandler):
""" """
raise NotImplementedError() raise NotImplementedError()
def trades_store(self, pair: str, data: TradeList) -> None: def _trades_store(self, pair: str, data: DataFrame) -> None:
""" """
Store trades data (list of Dicts) to file Store trades data (list of Dicts) to file
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
filename = self._pair_trades_filename(self._datadir, pair) filename = self._pair_trades_filename(self._datadir, pair)
self.create_dir_if_needed(filename) self.create_dir_if_needed(filename)
data.reset_index(drop=True).to_feather(filename, compression_level=9, compression='lz4')
tradesdata = DataFrame(data, columns=DEFAULT_TRADES_COLUMNS) def trades_append(self, pair: str, data: DataFrame):
tradesdata.to_feather(filename, compression_level=9, compression='lz4')
def trades_append(self, pair: str, data: TradeList):
""" """
Append data to existing files Append data to existing files
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
raise NotImplementedError() raise NotImplementedError()
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList: def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> DataFrame:
""" """
Load a pair from file, either .json.gz or .json Load a pair from file, either .json.gz or .json
# TODO: respect timerange ... # TODO: respect timerange ...
:param pair: Load trades for this pair :param pair: Load trades for this pair
:param timerange: Timerange to load trades for - currently not implemented :param timerange: Timerange to load trades for - currently not implemented
:return: List of trades :return: Dataframe containing trades
""" """
filename = self._pair_trades_filename(self._datadir, pair) filename = self._pair_trades_filename(self._datadir, pair)
if not filename.exists(): if not filename.exists():
return [] return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
tradesdata = read_feather(filename) tradesdata = read_feather(filename)
return tradesdata.values.tolist() return tradesdata
@classmethod @classmethod
def _get_file_extension(cls): def _get_file_extension(cls):

View File

@@ -5,7 +5,7 @@ import numpy as np
import pandas as pd import pandas as pd
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS
from freqtrade.enums import CandleType from freqtrade.enums import CandleType
from .idatahandler import IDataHandler from .idatahandler import IDataHandler
@@ -100,42 +100,42 @@ class HDF5DataHandler(IDataHandler):
""" """
raise NotImplementedError() raise NotImplementedError()
def trades_store(self, pair: str, data: TradeList) -> None: def _trades_store(self, pair: str, data: pd.DataFrame) -> None:
""" """
Store trades data (list of Dicts) to file Store trades data (list of Dicts) to file
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
key = self._pair_trades_key(pair) key = self._pair_trades_key(pair)
pd.DataFrame(data, columns=DEFAULT_TRADES_COLUMNS).to_hdf( data.to_hdf(
self._pair_trades_filename(self._datadir, pair), key, self._pair_trades_filename(self._datadir, pair), key,
mode='a', complevel=9, complib='blosc', mode='a', complevel=9, complib='blosc',
format='table', data_columns=['timestamp'] format='table', data_columns=['timestamp']
) )
def trades_append(self, pair: str, data: TradeList): def trades_append(self, pair: str, data: pd.DataFrame):
""" """
Append data to existing files Append data to existing files
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
raise NotImplementedError() raise NotImplementedError()
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList: def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> pd.DataFrame:
""" """
Load a pair from h5 file. Load a pair from h5 file.
:param pair: Load trades for this pair :param pair: Load trades for this pair
:param timerange: Timerange to load trades for - currently not implemented :param timerange: Timerange to load trades for - currently not implemented
:return: List of trades :return: Dataframe containing trades
""" """
key = self._pair_trades_key(pair) key = self._pair_trades_key(pair)
filename = self._pair_trades_filename(self._datadir, pair) filename = self._pair_trades_filename(self._datadir, pair)
if not filename.exists(): if not filename.exists():
return [] return pd.DataFrame(columns=DEFAULT_TRADES_COLUMNS)
where = [] where = []
if timerange: if timerange:
if timerange.starttype == 'date': if timerange.starttype == 'date':
@@ -145,7 +145,7 @@ class HDF5DataHandler(IDataHandler):
trades: pd.DataFrame = pd.read_hdf(filename, key=key, mode="r", where=where) trades: pd.DataFrame = pd.read_hdf(filename, key=key, mode="r", where=where)
trades[['id', 'type']] = trades[['id', 'type']].replace({np.nan: None}) trades[['id', 'type']] = trades[['id', 'type']].replace({np.nan: None})
return trades.values.tolist() return trades
@classmethod @classmethod
def _get_file_extension(cls): def _get_file_extension(cls):

View File

@@ -10,14 +10,16 @@ from freqtrade.configuration import TimeRange
from freqtrade.constants import (DATETIME_PRINT_FORMAT, DEFAULT_DATAFRAME_COLUMNS, from freqtrade.constants import (DATETIME_PRINT_FORMAT, DEFAULT_DATAFRAME_COLUMNS,
DL_DATA_TIMEFRAMES, Config) DL_DATA_TIMEFRAMES, Config)
from freqtrade.data.converter import (clean_ohlcv_dataframe, ohlcv_to_dataframe, from freqtrade.data.converter import (clean_ohlcv_dataframe, ohlcv_to_dataframe,
trades_remove_duplicates, trades_to_ohlcv) trades_df_remove_duplicates, trades_list_to_df,
trades_to_ohlcv)
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler
from freqtrade.enums import CandleType from freqtrade.enums import CandleType
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
from freqtrade.exchange import Exchange from freqtrade.exchange import Exchange
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
from freqtrade.util import format_ms_time from freqtrade.util import dt_ts, format_ms_time
from freqtrade.util.binance_mig import migrate_binance_futures_data from freqtrade.util.binance_mig import migrate_binance_futures_data
from freqtrade.util.datetime_helpers import dt_now
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -69,7 +71,7 @@ def load_data(datadir: Path,
fill_up_missing: bool = True, fill_up_missing: bool = True,
startup_candles: int = 0, startup_candles: int = 0,
fail_without_data: bool = False, fail_without_data: bool = False,
data_format: str = 'json', data_format: str = 'feather',
candle_type: CandleType = CandleType.SPOT, candle_type: CandleType = CandleType.SPOT,
user_futures_funding_rate: Optional[int] = None, user_futures_funding_rate: Optional[int] = None,
) -> Dict[str, DataFrame]: ) -> Dict[str, DataFrame]:
@@ -349,24 +351,27 @@ def _download_trades_history(exchange: Exchange,
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp # DEFAULT_TRADES_COLUMNS: 0 -> timestamp
# DEFAULT_TRADES_COLUMNS: 1 -> id # DEFAULT_TRADES_COLUMNS: 1 -> id
if trades and since < trades[0][0]: if not trades.empty and since > 0 and since < trades.iloc[0]['timestamp']:
# since is before the first trade # since is before the first trade
logger.info(f"Start earlier than available data. Redownloading trades for {pair}...") logger.info(f"Start ({trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}) earlier than "
trades = [] f"available data. Redownloading trades for {pair}...")
trades = trades_list_to_df([])
if not since: from_id = trades.iloc[-1]['id'] if not trades.empty else None
since = int((datetime.now() - timedelta(days=new_pairs_days)).timestamp()) * 1000 if not trades.empty and since < trades.iloc[-1]['timestamp']:
from_id = trades[-1][1] if trades else None
if trades and since < trades[-1][0]:
# Reset since to the last available point # Reset since to the last available point
# - 5 seconds (to ensure we're getting all trades) # - 5 seconds (to ensure we're getting all trades)
since = trades[-1][0] - (5 * 1000) since = trades.iloc[-1]['timestamp'] - (5 * 1000)
logger.info(f"Using last trade date -5s - Downloading trades for {pair} " logger.info(f"Using last trade date -5s - Downloading trades for {pair} "
f"since: {format_ms_time(since)}.") f"since: {format_ms_time(since)}.")
logger.debug(f"Current Start: {format_ms_time(trades[0][0]) if trades else 'None'}") if not since:
logger.debug(f"Current End: {format_ms_time(trades[-1][0]) if trades else 'None'}") since = dt_ts(dt_now() - timedelta(days=new_pairs_days))
logger.debug("Current Start: %s", 'None' if trades.empty else
f"{trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}")
logger.debug("Current End: %s", 'None' if trades.empty else
f"{trades.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}")
logger.info(f"Current Amount of trades: {len(trades)}") logger.info(f"Current Amount of trades: {len(trades)}")
# Default since_ms to 30 days if nothing is given # Default since_ms to 30 days if nothing is given
@@ -375,13 +380,16 @@ def _download_trades_history(exchange: Exchange,
until=until, until=until,
from_id=from_id, from_id=from_id,
) )
trades.extend(new_trades[1]) new_trades_df = trades_list_to_df(new_trades[1])
trades = concat([trades, new_trades_df], axis=0)
# Remove duplicates to make sure we're not storing data we don't need # Remove duplicates to make sure we're not storing data we don't need
trades = trades_remove_duplicates(trades) trades = trades_df_remove_duplicates(trades)
data_handler.trades_store(pair, data=trades) data_handler.trades_store(pair, data=trades)
logger.debug(f"New Start: {format_ms_time(trades[0][0])}") logger.debug("New Start: %s", 'None' if trades.empty else
logger.debug(f"New End: {format_ms_time(trades[-1][0])}") f"{trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}")
logger.debug("New End: %s", 'None' if trades.empty else
f"{trades.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}")
logger.info(f"New Amount of trades: {len(trades)}") logger.info(f"New Amount of trades: {len(trades)}")
return True return True
@@ -394,7 +402,7 @@ def _download_trades_history(exchange: Exchange,
def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path, def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path,
timerange: TimeRange, new_pairs_days: int = 30, timerange: TimeRange, new_pairs_days: int = 30,
erase: bool = False, data_format: str = 'jsongz') -> List[str]: erase: bool = False, data_format: str = 'feather') -> List[str]:
""" """
Refresh stored trades data for backtesting and hyperopt operations. Refresh stored trades data for backtesting and hyperopt operations.
Used by freqtrade download-data subcommand. Used by freqtrade download-data subcommand.
@@ -427,8 +435,8 @@ def convert_trades_to_ohlcv(
datadir: Path, datadir: Path,
timerange: TimeRange, timerange: TimeRange,
erase: bool = False, erase: bool = False,
data_format_ohlcv: str = 'json', data_format_ohlcv: str = 'feather',
data_format_trades: str = 'jsongz', data_format_trades: str = 'feather',
candle_type: CandleType = CandleType.SPOT candle_type: CandleType = CandleType.SPOT
) -> None: ) -> None:
""" """

View File

@@ -15,8 +15,9 @@ from pandas import DataFrame
from freqtrade import misc from freqtrade import misc
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.constants import ListPairsWithTimeframes, TradeList from freqtrade.constants import DEFAULT_TRADES_COLUMNS, ListPairsWithTimeframes
from freqtrade.data.converter import clean_ohlcv_dataframe, trades_remove_duplicates, trim_dataframe from freqtrade.data.converter import (clean_ohlcv_dataframe, trades_convert_types,
trades_df_remove_duplicates, trim_dataframe)
from freqtrade.enums import CandleType, TradingMode from freqtrade.enums import CandleType, TradingMode
from freqtrade.exchange import timeframe_to_seconds from freqtrade.exchange import timeframe_to_seconds
@@ -170,32 +171,42 @@ class IDataHandler(ABC):
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match] return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
@abstractmethod @abstractmethod
def trades_store(self, pair: str, data: TradeList) -> None: def _trades_store(self, pair: str, data: DataFrame) -> None:
""" """
Store trades data (list of Dicts) to file Store trades data (list of Dicts) to file
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
@abstractmethod @abstractmethod
def trades_append(self, pair: str, data: TradeList): def trades_append(self, pair: str, data: DataFrame):
""" """
Append data to existing files Append data to existing files
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
@abstractmethod @abstractmethod
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList: def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> DataFrame:
""" """
Load a pair from file, either .json.gz or .json Load a pair from file, either .json.gz or .json
:param pair: Load trades for this pair :param pair: Load trades for this pair
:param timerange: Timerange to load trades for - currently not implemented :param timerange: Timerange to load trades for - currently not implemented
:return: List of trades :return: Dataframe containing trades
""" """
def trades_store(self, pair: str, data: DataFrame) -> None:
"""
Store trades data (list of Dicts) to file
:param pair: Pair - used for filename
:param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS
"""
# Filter on expected columns (will remove the actual date column).
self._trades_store(pair, data[DEFAULT_TRADES_COLUMNS])
def trades_purge(self, pair: str) -> bool: def trades_purge(self, pair: str) -> bool:
""" """
Remove data for this pair Remove data for this pair
@@ -208,7 +219,7 @@ class IDataHandler(ABC):
return True return True
return False return False
def trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList: def trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> DataFrame:
""" """
Load a pair from file, either .json.gz or .json Load a pair from file, either .json.gz or .json
Removes duplicates in the process. Removes duplicates in the process.
@@ -216,7 +227,10 @@ class IDataHandler(ABC):
:param timerange: Timerange to load trades for - currently not implemented :param timerange: Timerange to load trades for - currently not implemented
:return: List of trades :return: List of trades
""" """
return trades_remove_duplicates(self._trades_load(pair, timerange=timerange)) trades = trades_df_remove_duplicates(self._trades_load(pair, timerange=timerange))
trades = trades_convert_types(trades)
return trades
@classmethod @classmethod
def create_dir_if_needed(cls, datadir: Path): def create_dir_if_needed(cls, datadir: Path):
@@ -427,6 +441,6 @@ def get_datahandler(datadir: Path, data_format: Optional[str] = None,
""" """
if not data_handler: if not data_handler:
HandlerClass = get_datahandlerclass(data_format or 'json') HandlerClass = get_datahandlerclass(data_format or 'feather')
data_handler = HandlerClass(datadir) data_handler = HandlerClass(datadir)
return data_handler return data_handler

View File

@@ -6,8 +6,8 @@ from pandas import DataFrame, read_json, to_datetime
from freqtrade import misc from freqtrade import misc
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS
from freqtrade.data.converter import trades_dict_to_list from freqtrade.data.converter import trades_dict_to_list, trades_list_to_df
from freqtrade.enums import CandleType from freqtrade.enums import CandleType
from .idatahandler import IDataHandler from .idatahandler import IDataHandler
@@ -94,45 +94,46 @@ class JsonDataHandler(IDataHandler):
""" """
raise NotImplementedError() raise NotImplementedError()
def trades_store(self, pair: str, data: TradeList) -> None: def _trades_store(self, pair: str, data: DataFrame) -> None:
""" """
Store trades data (list of Dicts) to file Store trades data (list of Dicts) to file
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
filename = self._pair_trades_filename(self._datadir, pair) filename = self._pair_trades_filename(self._datadir, pair)
misc.file_dump_json(filename, data, is_zip=self._use_zip) trades = data.values.tolist()
misc.file_dump_json(filename, trades, is_zip=self._use_zip)
def trades_append(self, pair: str, data: TradeList): def trades_append(self, pair: str, data: DataFrame):
""" """
Append data to existing files Append data to existing files
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
raise NotImplementedError() raise NotImplementedError()
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList: def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> DataFrame:
""" """
Load a pair from file, either .json.gz or .json Load a pair from file, either .json.gz or .json
# TODO: respect timerange ... # TODO: respect timerange ...
:param pair: Load trades for this pair :param pair: Load trades for this pair
:param timerange: Timerange to load trades for - currently not implemented :param timerange: Timerange to load trades for - currently not implemented
:return: List of trades :return: Dataframe containing trades
""" """
filename = self._pair_trades_filename(self._datadir, pair) filename = self._pair_trades_filename(self._datadir, pair)
tradesdata = misc.file_load_json(filename) tradesdata = misc.file_load_json(filename)
if not tradesdata: if not tradesdata:
return [] return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
if isinstance(tradesdata[0], dict): if isinstance(tradesdata[0], dict):
# Convert trades dict to list # Convert trades dict to list
logger.info("Old trades format detected - converting") logger.info("Old trades format detected - converting")
tradesdata = trades_dict_to_list(tradesdata) tradesdata = trades_dict_to_list(tradesdata)
pass pass
return tradesdata return trades_list_to_df(tradesdata, convert=False)
@classmethod @classmethod
def _get_file_extension(cls): def _get_file_extension(cls):

View File

@@ -4,7 +4,7 @@ from typing import Optional
from pandas import DataFrame, read_parquet, to_datetime from pandas import DataFrame, read_parquet, to_datetime
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList
from freqtrade.enums import CandleType from freqtrade.enums import CandleType
from .idatahandler import IDataHandler from .idatahandler import IDataHandler
@@ -81,25 +81,22 @@ class ParquetDataHandler(IDataHandler):
""" """
raise NotImplementedError() raise NotImplementedError()
def trades_store(self, pair: str, data: TradeList) -> None: def _trades_store(self, pair: str, data: DataFrame) -> None:
""" """
Store trades data (list of Dicts) to file Store trades data (list of Dicts) to file
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
# filename = self._pair_trades_filename(self._datadir, pair) filename = self._pair_trades_filename(self._datadir, pair)
self.create_dir_if_needed(filename)
data.reset_index(drop=True).to_parquet(filename)
raise NotImplementedError() def trades_append(self, pair: str, data: DataFrame):
# array = pa.array(data)
# array
# feather.write_feather(data, filename)
def trades_append(self, pair: str, data: TradeList):
""" """
Append data to existing files Append data to existing files
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Lists containing trade data, :param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS column sequence as in DEFAULT_TRADES_COLUMNS
""" """
raise NotImplementedError() raise NotImplementedError()
@@ -112,14 +109,13 @@ class ParquetDataHandler(IDataHandler):
:param timerange: Timerange to load trades for - currently not implemented :param timerange: Timerange to load trades for - currently not implemented
:return: List of trades :return: List of trades
""" """
raise NotImplementedError() filename = self._pair_trades_filename(self._datadir, pair)
# filename = self._pair_trades_filename(self._datadir, pair) if not filename.exists():
# tradesdata = misc.file_load_json(filename) return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
# if not tradesdata: tradesdata = read_parquet(filename)
# return []
# return tradesdata return tradesdata
@classmethod @classmethod
def _get_file_extension(cls): def _get_file_extension(cls):

View File

@@ -115,7 +115,7 @@ class Edge:
exchange=self.exchange, exchange=self.exchange,
timeframe=self.strategy.timeframe, timeframe=self.strategy.timeframe,
timerange=timerange_startup, timerange=timerange_startup,
data_format=self.config.get('dataformat_ohlcv', 'json'), data_format=self.config['dataformat_ohlcv'],
candle_type=self.config.get('candle_type_def', CandleType.SPOT), candle_type=self.config.get('candle_type_def', CandleType.SPOT),
) )
# Download informative pairs too # Download informative pairs too
@@ -132,7 +132,7 @@ class Edge:
exchange=self.exchange, exchange=self.exchange,
timeframe=timeframe, timeframe=timeframe,
timerange=timerange_startup, timerange=timerange_startup,
data_format=self.config.get('dataformat_ohlcv', 'json'), data_format=self.config['dataformat_ohlcv'],
candle_type=self.config.get('candle_type_def', CandleType.SPOT), candle_type=self.config.get('candle_type_def', CandleType.SPOT),
) )
@@ -142,7 +142,7 @@ class Edge:
timeframe=self.strategy.timeframe, timeframe=self.strategy.timeframe,
timerange=self._timerange, timerange=self._timerange,
startup_candles=self.strategy.startup_candle_count, startup_candles=self.strategy.startup_candle_count,
data_format=self.config.get('dataformat_ohlcv', 'json'), data_format=self.config['dataformat_ohlcv'],
candle_type=self.config.get('candle_type_def', CandleType.SPOT), candle_type=self.config.get('candle_type_def', CandleType.SPOT),
) )

File diff suppressed because it is too large Load Diff

View File

@@ -7,10 +7,10 @@ import ccxt
from freqtrade.constants import BuySell from freqtrade.constants import BuySell
from freqtrade.enums import MarginMode, PriceType, TradingMode from freqtrade.enums import MarginMode, PriceType, TradingMode
from freqtrade.enums.candletype import CandleType
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
from freqtrade.exchange import Exchange from freqtrade.exchange import Exchange
from freqtrade.exchange.common import retrier from freqtrade.exchange.common import retrier
from freqtrade.exchange.exchange_utils import timeframe_to_msecs
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -27,7 +27,7 @@ class Bybit(Exchange):
""" """
_ft_has: Dict = { _ft_has: Dict = {
"ohlcv_candle_limit": 200, "ohlcv_candle_limit": 1000,
"ohlcv_has_history": True, "ohlcv_has_history": True,
} }
_ft_has_futures: Dict = { _ft_has_futures: Dict = {
@@ -91,28 +91,13 @@ class Bybit(Exchange):
except ccxt.BaseError as e: except ccxt.BaseError as e:
raise OperationalException(e) from e raise OperationalException(e) from e
async def _fetch_funding_rate_history( def ohlcv_candle_limit(
self, self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None) -> int:
pair: str,
timeframe: str, if candle_type in (CandleType.FUNDING_RATE):
limit: int, return 200
since_ms: Optional[int] = None,
) -> List[List]: return super().ohlcv_candle_limit(timeframe, candle_type, since_ms)
"""
Fetch funding rate history
Necessary workaround until https://github.com/ccxt/ccxt/issues/15990 is fixed.
"""
params = {}
if since_ms:
until = since_ms + (timeframe_to_msecs(timeframe) * self._ft_has['ohlcv_candle_limit'])
params.update({'until': until})
# Funding rate
data = await self._api_async.fetch_funding_rate_history(
pair, since=since_ms,
params=params)
# Convert funding rate to candle pattern
data = [[x['timestamp'], x['fundingRate'], 0, 0, 0, 0] for x in data]
return data
def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False): def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False):
if self.trading_mode != TradingMode.SPOT: if self.trading_mode != TradingMode.SPOT:

View File

@@ -5,6 +5,7 @@ Cryptocurrency Exchanges support
import asyncio import asyncio
import inspect import inspect
import logging import logging
import signal
from copy import deepcopy from copy import deepcopy
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from math import floor from math import floor
@@ -263,8 +264,6 @@ class Exchange:
except ccxt.BaseError as e: except ccxt.BaseError as e:
raise OperationalException(f"Initialization of ccxt failed. Reason: {e}") from e raise OperationalException(f"Initialization of ccxt failed. Reason: {e}") from e
self.set_sandbox(api, exchange_config, name)
return api return api
@property @property
@@ -465,16 +464,6 @@ class Exchange:
return amount_to_contract_precision(amount, self.get_precision_amount(pair), return amount_to_contract_precision(amount, self.get_precision_amount(pair),
self.precisionMode, contract_size) self.precisionMode, contract_size)
def set_sandbox(self, api: ccxt.Exchange, exchange_config: dict, name: str) -> None:
if exchange_config.get('sandbox'):
if api.urls.get('test'):
api.urls['api'] = api.urls['test']
logger.info("Enabled Sandbox API on %s", name)
else:
logger.warning(
f"No Sandbox URL in CCXT for {name}, exiting. Please check your config.json")
raise OperationalException(f'Exchange {name} does not provide a sandbox api')
def _load_async_markets(self, reload: bool = False) -> None: def _load_async_markets(self, reload: bool = False) -> None:
try: try:
if self._api_async: if self._api_async:
@@ -580,7 +569,7 @@ class Exchange:
for pair in [f"{curr_1}/{curr_2}", f"{curr_2}/{curr_1}"]: for pair in [f"{curr_1}/{curr_2}", f"{curr_2}/{curr_1}"]:
if pair in self.markets and self.markets[pair].get('active'): if pair in self.markets and self.markets[pair].get('active'):
return pair return pair
raise ExchangeError(f"Could not combine {curr_1} and {curr_2} to get a valid pair.") raise ValueError(f"Could not combine {curr_1} and {curr_2} to get a valid pair.")
def validate_timeframes(self, timeframe: Optional[str]) -> None: def validate_timeframes(self, timeframe: Optional[str]) -> None:
""" """
@@ -1876,7 +1865,7 @@ class Exchange:
tick = self.fetch_ticker(comb) tick = self.fetch_ticker(comb)
fee_to_quote_rate = safe_value_fallback2(tick, tick, 'last', 'ask') fee_to_quote_rate = safe_value_fallback2(tick, tick, 'last', 'ask')
except ExchangeError: except (ValueError, ExchangeError):
fee_to_quote_rate = self._config['exchange'].get('unknown_fee_rate', None) fee_to_quote_rate = self._config['exchange'].get('unknown_fee_rate', None)
if not fee_to_quote_rate: if not fee_to_quote_rate:
return None return None
@@ -2163,7 +2152,7 @@ class Exchange:
except IndexError: except IndexError:
logger.exception("Error loading %s. Result was %s.", pair, data) logger.exception("Error loading %s. Result was %s.", pair, data)
return pair, timeframe, candle_type, [], self._ohlcv_partial_candle return pair, timeframe, candle_type, [], self._ohlcv_partial_candle
logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe) logger.debug("Done fetching pair %s, %s interval %s...", pair, candle_type, timeframe)
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
except ccxt.NotSupported as e: except ccxt.NotSupported as e:
@@ -2265,20 +2254,24 @@ class Exchange:
from_id = t[-1][1] from_id = t[-1][1]
trades.extend(t[:-1]) trades.extend(t[:-1])
while True: while True:
t = await self._async_fetch_trades(pair, try:
params={self._trades_pagination_arg: from_id}) t = await self._async_fetch_trades(pair,
if t: params={self._trades_pagination_arg: from_id})
# Skip last id since its the key for the next call if t:
trades.extend(t[:-1]) # Skip last id since its the key for the next call
if from_id == t[-1][1] or t[-1][0] > until: trades.extend(t[:-1])
logger.debug(f"Stopping because from_id did not change. " if from_id == t[-1][1] or t[-1][0] > until:
f"Reached {t[-1][0]} > {until}") logger.debug(f"Stopping because from_id did not change. "
# Reached the end of the defined-download period - add last trade as well. f"Reached {t[-1][0]} > {until}")
trades.extend(t[-1:]) # Reached the end of the defined-download period - add last trade as well.
break trades.extend(t[-1:])
break
from_id = t[-1][1] from_id = t[-1][1]
else: else:
break
except asyncio.CancelledError:
logger.debug("Async operation Interrupted, breaking trades DL loop.")
break break
return (pair, trades) return (pair, trades)
@@ -2298,16 +2291,20 @@ class Exchange:
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp # DEFAULT_TRADES_COLUMNS: 0 -> timestamp
# DEFAULT_TRADES_COLUMNS: 1 -> id # DEFAULT_TRADES_COLUMNS: 1 -> id
while True: while True:
t = await self._async_fetch_trades(pair, since=since) try:
if t: t = await self._async_fetch_trades(pair, since=since)
since = t[-1][0] if t:
trades.extend(t) since = t[-1][0]
# Reached the end of the defined-download period trades.extend(t)
if until and t[-1][0] > until: # Reached the end of the defined-download period
logger.debug( if until and t[-1][0] > until:
f"Stopping because until was reached. {t[-1][0]} > {until}") logger.debug(
f"Stopping because until was reached. {t[-1][0]} > {until}")
break
else:
break break
else: except asyncio.CancelledError:
logger.debug("Async operation Interrupted, breaking trades DL loop.")
break break
return (pair, trades) return (pair, trades)
@@ -2356,9 +2353,16 @@ class Exchange:
raise OperationalException("This exchange does not support downloading Trades.") raise OperationalException("This exchange does not support downloading Trades.")
with self._loop_lock: with self._loop_lock:
return self.loop.run_until_complete( task = asyncio.ensure_future(self._async_get_trade_history(
self._async_get_trade_history(pair=pair, since=since, pair=pair, since=since, until=until, from_id=from_id))
until=until, from_id=from_id))
for sig in [signal.SIGINT, signal.SIGTERM]:
try:
self.loop.add_signal_handler(sig, task.cancel)
except NotImplementedError:
# Not all platforms implement signals (e.g. windows)
pass
return self.loop.run_until_complete(task)
@retrier @retrier
def _get_funding_fees_from_exchange(self, pair: str, since: Union[datetime, int]) -> float: def _get_funding_fees_from_exchange(self, pair: str, since: Union[datetime, int]) -> float:

View File

@@ -11,6 +11,8 @@ from gymnasium import spaces
from gymnasium.utils import seeding from gymnasium.utils import seeding
from pandas import DataFrame from pandas import DataFrame
from freqtrade.exceptions import OperationalException
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -80,8 +82,9 @@ class BaseEnvironment(gym.Env):
self.can_short: bool = can_short self.can_short: bool = can_short
self.live: bool = live self.live: bool = live
if not self.live and self.add_state_info: if not self.live and self.add_state_info:
self.add_state_info = False raise OperationalException("`add_state_info` is not available in backtesting. Change "
logger.warning("add_state_info is not available in backtesting. Deactivating.") "parameter to false in your rl_config. See `add_state_info` "
"docs for more info.")
self.seed(seed) self.seed(seed)
self.reset_env(df, prices, window_size, reward_kwargs, starting_point) self.reset_env(df, prices, window_size, reward_kwargs, starting_point)

View File

@@ -375,7 +375,7 @@ class FreqaiDataDrawer:
num_keep = self.freqai_info["purge_old_models"] num_keep = self.freqai_info["purge_old_models"]
if not num_keep: if not num_keep:
return return
elif type(num_keep) == bool: elif isinstance(num_keep, bool):
num_keep = 2 num_keep = 2
model_folders = [x for x in self.full_path.iterdir() if x.is_dir()] model_folders = [x for x in self.full_path.iterdir() if x.is_dir()]
@@ -635,7 +635,7 @@ class FreqaiDataDrawer:
timeframe=tf, timeframe=tf,
pair=pair, pair=pair,
timerange=timerange, timerange=timerange,
data_format=self.config.get("dataformat_ohlcv", "json"), data_format=self.config.get("dataformat_ohlcv", "feather"),
candle_type=self.config.get("candle_type_def", CandleType.SPOT), candle_type=self.config.get("candle_type_def", CandleType.SPOT),
) )

View File

@@ -26,9 +26,9 @@ class PyTorchMLPClassifier(BasePyTorchClassifier):
"model_training_parameters" : { "model_training_parameters" : {
"learning_rate": 3e-4, "learning_rate": 3e-4,
"trainer_kwargs": { "trainer_kwargs": {
"max_iters": 5000, "n_steps": 5000,
"batch_size": 64, "batch_size": 64,
"max_n_eval_batches": null, "n_epochs": null,
}, },
"model_kwargs": { "model_kwargs": {
"hidden_dim": 512, "hidden_dim": 512,

View File

@@ -27,9 +27,9 @@ class PyTorchMLPRegressor(BasePyTorchRegressor):
"model_training_parameters" : { "model_training_parameters" : {
"learning_rate": 3e-4, "learning_rate": 3e-4,
"trainer_kwargs": { "trainer_kwargs": {
"max_iters": 5000, "n_steps": 5000,
"batch_size": 64, "batch_size": 64,
"max_n_eval_batches": null, "n_epochs": null,
}, },
"model_kwargs": { "model_kwargs": {
"hidden_dim": 512, "hidden_dim": 512,

View File

@@ -30,9 +30,9 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
"model_training_parameters" : { "model_training_parameters" : {
"learning_rate": 3e-4, "learning_rate": 3e-4,
"trainer_kwargs": { "trainer_kwargs": {
"max_iters": 5000, "n_steps": 5000,
"batch_size": 64, "batch_size": 64,
"max_n_eval_batches": null "n_epochs": null
}, },
"model_kwargs": { "model_kwargs": {
"hidden_dim": 512, "hidden_dim": 512,

View File

@@ -1,5 +1,4 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import Optional
import pandas as pd import pandas as pd
import torch import torch
@@ -12,14 +11,14 @@ class PyTorchDataConvertor(ABC):
""" """
@abstractmethod @abstractmethod
def convert_x(self, df: pd.DataFrame, device: Optional[str] = None) -> torch.Tensor: def convert_x(self, df: pd.DataFrame, device: str) -> torch.Tensor:
""" """
:param df: "*_features" dataframe. :param df: "*_features" dataframe.
:param device: The device to use for training (e.g. 'cpu', 'cuda'). :param device: The device to use for training (e.g. 'cpu', 'cuda').
""" """
@abstractmethod @abstractmethod
def convert_y(self, df: pd.DataFrame, device: Optional[str] = None) -> torch.Tensor: def convert_y(self, df: pd.DataFrame, device: str) -> torch.Tensor:
""" """
:param df: "*_labels" dataframe. :param df: "*_labels" dataframe.
:param device: The device to use for training (e.g. 'cpu', 'cuda'). :param device: The device to use for training (e.g. 'cpu', 'cuda').
@@ -33,8 +32,8 @@ class DefaultPyTorchDataConvertor(PyTorchDataConvertor):
def __init__( def __init__(
self, self,
target_tensor_type: Optional[torch.dtype] = None, target_tensor_type: torch.dtype = torch.float32,
squeeze_target_tensor: bool = False squeeze_target_tensor: bool = False,
): ):
""" """
:param target_tensor_type: type of target tensor, for classification use :param target_tensor_type: type of target tensor, for classification use
@@ -45,23 +44,14 @@ class DefaultPyTorchDataConvertor(PyTorchDataConvertor):
self._target_tensor_type = target_tensor_type self._target_tensor_type = target_tensor_type
self._squeeze_target_tensor = squeeze_target_tensor self._squeeze_target_tensor = squeeze_target_tensor
def convert_x(self, df: pd.DataFrame, device: Optional[str] = None) -> torch.Tensor: def convert_x(self, df: pd.DataFrame, device: str) -> torch.Tensor:
x = torch.from_numpy(df.values).float() numpy_arrays = df.values
if device: x = torch.tensor(numpy_arrays, device=device, dtype=torch.float32)
x = x.to(device)
return x return x
def convert_y(self, df: pd.DataFrame, device: Optional[str] = None) -> torch.Tensor: def convert_y(self, df: pd.DataFrame, device: str) -> torch.Tensor:
y = torch.from_numpy(df.values) numpy_arrays = df.values
y = torch.tensor(numpy_arrays, device=device, dtype=self._target_tensor_type)
if self._target_tensor_type:
y = y.to(self._target_tensor_type)
if self._squeeze_target_tensor: if self._squeeze_target_tensor:
y = y.squeeze() y = y.squeeze()
if device:
y = y.to(device)
return y return y

View File

@@ -1,5 +1,4 @@
import logging import logging
import math
from pathlib import Path from pathlib import Path
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
@@ -40,23 +39,27 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
state_dict and model_meta_data saved by self.save() method. state_dict and model_meta_data saved by self.save() method.
:param model_meta_data: Additional metadata about the model (optional). :param model_meta_data: Additional metadata about the model (optional).
:param data_convertor: convertor from pd.DataFrame to torch.tensor. :param data_convertor: convertor from pd.DataFrame to torch.tensor.
:param max_iters: The number of training iterations to run. :param n_steps: used to calculate n_epochs. The number of training iterations to run.
iteration here refers to the number of times we call iteration here refers to the number of times optimizer.step() is called.
self.optimizer.step(). used to calculate n_epochs. ignored if n_epochs is set.
:param n_epochs: The maximum number batches to use for evaluation.
:param batch_size: The size of the batches to use during training. :param batch_size: The size of the batches to use during training.
:param max_n_eval_batches: The maximum number batches to use for evaluation.
""" """
self.model = model self.model = model
self.optimizer = optimizer self.optimizer = optimizer
self.criterion = criterion self.criterion = criterion
self.model_meta_data = model_meta_data self.model_meta_data = model_meta_data
self.device = device self.device = device
self.max_iters: int = kwargs.get("max_iters", 100) self.n_epochs: Optional[int] = kwargs.get("n_epochs", 10)
self.n_steps: Optional[int] = kwargs.get("n_steps", None)
if self.n_steps is None and not self.n_epochs:
raise Exception("Either `n_steps` or `n_epochs` should be set.")
self.batch_size: int = kwargs.get("batch_size", 64) self.batch_size: int = kwargs.get("batch_size", 64)
self.max_n_eval_batches: Optional[int] = kwargs.get("max_n_eval_batches", None)
self.data_convertor = data_convertor self.data_convertor = data_convertor
self.window_size: int = window_size self.window_size: int = window_size
self.tb_logger = tb_logger self.tb_logger = tb_logger
self.test_batch_counter = 0
def fit(self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]): def fit(self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]):
""" """
@@ -72,55 +75,46 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
backpropagation. backpropagation.
- Updates the model's parameters using an optimizer. - Updates the model's parameters using an optimizer.
""" """
data_loaders_dictionary = self.create_data_loaders_dictionary(data_dictionary, splits)
epochs = self.calc_n_epochs(
n_obs=len(data_dictionary["train_features"]),
batch_size=self.batch_size,
n_iters=self.max_iters
)
self.model.train() self.model.train()
for epoch in range(1, epochs + 1):
for i, batch_data in enumerate(data_loaders_dictionary["train"]):
data_loaders_dictionary = self.create_data_loaders_dictionary(data_dictionary, splits)
n_obs = len(data_dictionary["train_features"])
n_epochs = self.n_epochs or self.calc_n_epochs(n_obs=n_obs)
batch_counter = 0
for _ in range(n_epochs):
for _, batch_data in enumerate(data_loaders_dictionary["train"]):
xb, yb = batch_data xb, yb = batch_data
xb.to(self.device) xb = xb.to(self.device)
yb.to(self.device) yb = yb.to(self.device)
yb_pred = self.model(xb) yb_pred = self.model(xb)
loss = self.criterion(yb_pred, yb) loss = self.criterion(yb_pred, yb)
self.optimizer.zero_grad(set_to_none=True) self.optimizer.zero_grad(set_to_none=True)
loss.backward() loss.backward()
self.optimizer.step() self.optimizer.step()
self.tb_logger.log_scalar("train_loss", loss.item(), i) self.tb_logger.log_scalar("train_loss", loss.item(), batch_counter)
batch_counter += 1
# evaluation # evaluation
if "test" in splits: if "test" in splits:
self.estimate_loss( self.estimate_loss(data_loaders_dictionary, "test")
data_loaders_dictionary,
self.max_n_eval_batches,
"test"
)
@torch.no_grad() @torch.no_grad()
def estimate_loss( def estimate_loss(
self, self,
data_loader_dictionary: Dict[str, DataLoader], data_loader_dictionary: Dict[str, DataLoader],
max_n_eval_batches: Optional[int],
split: str, split: str,
) -> None: ) -> None:
self.model.eval() self.model.eval()
n_batches = 0 for _, batch_data in enumerate(data_loader_dictionary[split]):
for i, batch_data in enumerate(data_loader_dictionary[split]):
if max_n_eval_batches and i > max_n_eval_batches:
n_batches += 1
break
xb, yb = batch_data xb, yb = batch_data
xb.to(self.device) xb = xb.to(self.device)
yb.to(self.device) yb = yb.to(self.device)
yb_pred = self.model(xb) yb_pred = self.model(xb)
loss = self.criterion(yb_pred, yb) loss = self.criterion(yb_pred, yb)
self.tb_logger.log_scalar(f"{split}_loss", loss.item(), i) self.tb_logger.log_scalar(f"{split}_loss", loss.item(), self.test_batch_counter)
self.test_batch_counter += 1
self.model.train() self.model.train()
@@ -148,31 +142,30 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
return data_loader_dictionary return data_loader_dictionary
@staticmethod def calc_n_epochs(self, n_obs: int) -> int:
def calc_n_epochs(n_obs: int, batch_size: int, n_iters: int) -> int:
""" """
Calculates the number of epochs required to reach the maximum number Calculates the number of epochs required to reach the maximum number
of iterations specified in the model training parameters. of iterations specified in the model training parameters.
the motivation here is that `max_iters` is easier to optimize and keep stable, the motivation here is that `n_steps` is easier to optimize and keep stable,
across different n_obs - the number of data points. across different n_obs - the number of data points.
""" """
assert isinstance(self.n_steps, int), "Either `n_steps` or `n_epochs` should be set."
n_batches = n_obs // self.batch_size
n_epochs = min(self.n_steps // n_batches, 1)
if n_epochs <= 10:
logger.warning(
f"Setting low n_epochs: {n_epochs}. "
f"Please consider increasing `n_steps` hyper-parameter."
)
n_batches = math.ceil(n_obs // batch_size) return n_epochs
epochs = math.ceil(n_iters // n_batches)
if epochs <= 10:
logger.warning("User set `max_iters` in such a way that the trainer will only perform "
f" {epochs} epochs. Please consider increasing this value accordingly")
if epochs <= 1:
logger.warning("Epochs set to 1. Please review your `max_iters` value")
epochs = 1
return epochs
def save(self, path: Path): def save(self, path: Path):
""" """
- Saving any nn.Module state_dict - Saving any nn.Module state_dict
- Saving model_meta_data, this dict should contain any additional data that the - Saving model_meta_data, this dict should contain any additional data that the
user needs to store. e.g class_names for classification models. user needs to store. e.g. class_names for classification models.
""" """
torch.save({ torch.save({

View File

@@ -50,7 +50,7 @@ def download_all_data_for_training(dp: DataProvider, config: Config) -> None:
timerange=timerange, timerange=timerange,
new_pairs_days=new_pairs_days, new_pairs_days=new_pairs_days,
erase=False, erase=False,
data_format=config.get("dataformat_ohlcv", "json"), data_format=config.get("dataformat_ohlcv", "feather"),
trading_mode=config.get("trading_mode", "spot"), trading_mode=config.get("trading_mode", "spot"),
prepend=config.get("prepend_data", False), prepend=config.get("prepend_data", False),
) )

View File

@@ -613,6 +613,8 @@ class FreqtradeBot(LoggingMixin):
for trade in Trade.get_open_trades(): for trade in Trade.get_open_trades():
# If there is any open orders, wait for them to finish. # If there is any open orders, wait for them to finish.
if trade.open_order_id is None: if trade.open_order_id is None:
# Do a wallets update (will be ratelimited to once per hour)
self.wallets.update(False)
try: try:
self.check_and_call_adjust_trade_position(trade) self.check_and_call_adjust_trade_position(trade)
except DependencyException as exception: except DependencyException as exception:

View File

@@ -192,30 +192,6 @@ def plural(num: float, singular: str, plural: Optional[str] = None) -> str:
return singular if (num == 1 or num == -1) else plural or singular + 's' return singular if (num == 1 or num == -1) else plural or singular + 's'
def render_template(templatefile: str, arguments: dict = {}) -> str:
from jinja2 import Environment, PackageLoader, select_autoescape
env = Environment(
loader=PackageLoader('freqtrade', 'templates'),
autoescape=select_autoescape(['html', 'xml'])
)
template = env.get_template(templatefile)
return template.render(**arguments)
def render_template_with_fallback(templatefile: str, templatefallbackfile: str,
arguments: dict = {}) -> str:
"""
Use templatefile if possible, otherwise fall back to templatefallbackfile
"""
from jinja2.exceptions import TemplateNotFound
try:
return render_template(templatefile, arguments)
except TemplateNotFound:
return render_template(templatefallbackfile, arguments)
def chunks(lst: List[Any], n: int) -> Iterator[List[Any]]: def chunks(lst: List[Any], n: int) -> Iterator[List[Any]]:
""" """
Split lst into chunks of the size n. Split lst into chunks of the size n.

View File

@@ -39,6 +39,7 @@ from freqtrade.plugins.protectionmanager import ProtectionManager
from freqtrade.resolvers import ExchangeResolver, StrategyResolver from freqtrade.resolvers import ExchangeResolver, StrategyResolver
from freqtrade.strategy.interface import IStrategy from freqtrade.strategy.interface import IStrategy
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
from freqtrade.types import BacktestResultType, get_BacktestResultType_default
from freqtrade.util.binance_mig import migrate_binance_futures_data from freqtrade.util.binance_mig import migrate_binance_futures_data
from freqtrade.wallets import Wallets from freqtrade.wallets import Wallets
@@ -77,7 +78,7 @@ class Backtesting:
LoggingMixin.show_output = False LoggingMixin.show_output = False
self.config = config self.config = config
self.results: Dict[str, Any] = {} self.results: BacktestResultType = get_BacktestResultType_default()
self.trade_id_counter: int = 0 self.trade_id_counter: int = 0
self.order_id_counter: int = 0 self.order_id_counter: int = 0
@@ -239,7 +240,7 @@ class Backtesting:
timerange=self.timerange, timerange=self.timerange,
startup_candles=self.config['startup_candle_count'], startup_candles=self.config['startup_candle_count'],
fail_without_data=True, fail_without_data=True,
data_format=self.config.get('dataformat_ohlcv', 'json'), data_format=self.config['dataformat_ohlcv'],
candle_type=self.config.get('candle_type_def', CandleType.SPOT) candle_type=self.config.get('candle_type_def', CandleType.SPOT)
) )
@@ -268,7 +269,7 @@ class Backtesting:
timerange=self.timerange, timerange=self.timerange,
startup_candles=0, startup_candles=0,
fail_without_data=True, fail_without_data=True,
data_format=self.config.get('dataformat_ohlcv', 'json'), data_format=self.config['dataformat_ohlcv'],
candle_type=self.config.get('candle_type_def', CandleType.SPOT) candle_type=self.config.get('candle_type_def', CandleType.SPOT)
) )
else: else:
@@ -282,7 +283,7 @@ class Backtesting:
timerange=self.timerange, timerange=self.timerange,
startup_candles=0, startup_candles=0,
fail_without_data=True, fail_without_data=True,
data_format=self.config.get('dataformat_ohlcv', 'json'), data_format=self.config['dataformat_ohlcv'],
candle_type=CandleType.FUNDING_RATE candle_type=CandleType.FUNDING_RATE
) )
@@ -294,7 +295,7 @@ class Backtesting:
timerange=self.timerange, timerange=self.timerange,
startup_candles=0, startup_candles=0,
fail_without_data=True, fail_without_data=True,
data_format=self.config.get('dataformat_ohlcv', 'json'), data_format=self.config['dataformat_ohlcv'],
candle_type=CandleType.from_string(self.exchange.get_option("mark_ohlcv_price")) candle_type=CandleType.from_string(self.exchange.get_option("mark_ohlcv_price"))
) )
# Combine data to avoid combining the data per trade. # Combine data to avoid combining the data per trade.
@@ -368,13 +369,14 @@ class Backtesting:
# Cleanup from prior runs # Cleanup from prior runs
pair_data.drop(HEADERS[5:] + ['buy', 'sell'], axis=1, errors='ignore') pair_data.drop(HEADERS[5:] + ['buy', 'sell'], axis=1, errors='ignore')
df_analyzed = self.strategy.ft_advise_signals(pair_data, {'pair': pair}) df_analyzed = self.strategy.ft_advise_signals(pair_data, {'pair': pair})
# Trim startup period from analyzed dataframe
df_analyzed = processed[pair] = pair_data = trim_dataframe(
df_analyzed, self.timerange, startup_candles=self.required_startup)
# Update dataprovider cache # Update dataprovider cache
self.dataprovider._set_cached_df( self.dataprovider._set_cached_df(
pair, self.timeframe, df_analyzed, self.config['candle_type_def']) pair, self.timeframe, df_analyzed, self.config['candle_type_def'])
# Trim startup period from analyzed dataframe
df_analyzed = processed[pair] = pair_data = trim_dataframe(
df_analyzed, self.timerange, startup_candles=self.required_startup)
# Create a copy of the dataframe before shifting, that way the entry signal/tag # Create a copy of the dataframe before shifting, that way the entry signal/tag
# remains on the correct candle for callbacks. # remains on the correct candle for callbacks.
df_analyzed = df_analyzed.copy() df_analyzed = df_analyzed.copy()
@@ -566,8 +568,7 @@ class Backtesting:
pos_trade = self._get_exit_for_signal(trade, row, exit_, amount) pos_trade = self._get_exit_for_signal(trade, row, exit_, amount)
if pos_trade is not None: if pos_trade is not None:
order = pos_trade.orders[-1] order = pos_trade.orders[-1]
if self._get_order_filled(order.ft_price, row): if self._try_close_open_order(order, trade, current_date, row):
order.close_bt_order(current_date, trade)
trade.recalc_trade_from_orders() trade.recalc_trade_from_orders()
self.wallets.update() self.wallets.update()
return pos_trade return pos_trade
@@ -578,6 +579,19 @@ class Backtesting:
""" Rate is within candle, therefore filled""" """ Rate is within candle, therefore filled"""
return row[LOW_IDX] <= rate <= row[HIGH_IDX] return row[LOW_IDX] <= rate <= row[HIGH_IDX]
def _try_close_open_order(
self, order: Optional[Order], trade: LocalTrade, current_date: datetime,
row: Tuple) -> bool:
"""
Check if an order is open and if it should've filled.
:return: True if the order filled.
"""
if order and self._get_order_filled(order.ft_price, row):
order.close_bt_order(current_date, trade)
trade.open_order_id = None
return True
return False
def _get_exit_for_signal( def _get_exit_for_signal(
self, trade: LocalTrade, row: Tuple, exit_: ExitCheckTuple, self, trade: LocalTrade, row: Tuple, exit_: ExitCheckTuple,
amount: Optional[float] = None) -> Optional[LocalTrade]: amount: Optional[float] = None) -> Optional[LocalTrade]:
@@ -902,9 +916,7 @@ class Backtesting:
) )
order._trade_bt = trade order._trade_bt = trade
trade.orders.append(order) trade.orders.append(order)
if pos_adjust and self._get_order_filled(order.ft_price, row): if not self._try_close_open_order(order, trade, current_time, row):
order.close_bt_order(current_time, trade)
else:
trade.open_order_id = str(self.order_id_counter) trade.open_order_id = str(self.order_id_counter)
trade.recalc_trade_from_orders() trade.recalc_trade_from_orders()
@@ -1120,23 +1132,18 @@ class Backtesting:
for trade in list(LocalTrade.bt_trades_open_pp[pair]): for trade in list(LocalTrade.bt_trades_open_pp[pair]):
# 3. Process entry orders. # 3. Process entry orders.
order = trade.select_order(trade.entry_side, is_open=True) order = trade.select_order(trade.entry_side, is_open=True)
if order and self._get_order_filled(order.ft_price, row): if self._try_close_open_order(order, trade, current_time, row):
order.close_bt_order(current_time, trade)
trade.open_order_id = None
self.wallets.update() self.wallets.update()
# 4. Create exit orders (if any) # 4. Create exit orders (if any)
if not trade.open_order_id: if not trade.open_order_id:
self._check_trade_exit(trade, row) # Place exit order if necessary self._check_trade_exit(trade, row) # Place exit order if necessary
# 5. Process exit orders. # 5. Process exit orders.
order = trade.select_order(trade.exit_side, is_open=True) order = trade.select_order(trade.exit_side, is_open=True)
if order and self._get_order_filled(order.ft_price, row): if order and self._try_close_open_order(order, trade, current_time, row):
order.close_bt_order(current_time, trade)
trade.open_order_id = None
sub_trade = order.safe_amount_after_fee != trade.amount sub_trade = order.safe_amount_after_fee != trade.amount
if sub_trade: if sub_trade:
order.close_bt_order(current_time, trade)
trade.recalc_trade_from_orders() trade.recalc_trade_from_orders()
else: else:
trade.close_date = current_time trade.close_date = current_time
@@ -1190,7 +1197,8 @@ class Backtesting:
row_index += 1 row_index += 1
indexes[pair] = row_index indexes[pair] = row_index
self.dataprovider._set_dataframe_max_index(row_index) self.dataprovider._set_dataframe_max_index(self.required_startup + row_index)
self.dataprovider._set_dataframe_max_date(current_time)
current_detail_time: datetime = row[DATE_IDX].to_pydatetime() current_detail_time: datetime = row[DATE_IDX].to_pydatetime()
trade_dir: Optional[LongShort] = self.check_for_trade_entry(row) trade_dir: Optional[LongShort] = self.check_for_trade_entry(row)
@@ -1223,12 +1231,14 @@ class Backtesting:
is_first = True is_first = True
current_time_det = current_time current_time_det = current_time
for det_row in detail_data[HEADERS].values.tolist(): for det_row in detail_data[HEADERS].values.tolist():
self.dataprovider._set_dataframe_max_date(current_time_det)
open_trade_count_start = self.backtest_loop( open_trade_count_start = self.backtest_loop(
det_row, pair, current_time_det, end_date, det_row, pair, current_time_det, end_date,
open_trade_count_start, trade_dir, is_first) open_trade_count_start, trade_dir, is_first)
current_time_det += timedelta(minutes=self.timeframe_detail_min) current_time_det += timedelta(minutes=self.timeframe_detail_min)
is_first = False is_first = False
else: else:
self.dataprovider._set_dataframe_max_date(current_time)
open_trade_count_start = self.backtest_loop( open_trade_count_start = self.backtest_loop(
row, pair, current_time, end_date, row, pair, current_time, end_date,
open_trade_count_start, trade_dir) open_trade_count_start, trade_dir)

View File

@@ -48,6 +48,7 @@ class LookaheadAnalysis:
self.entry_varHolders: List[VarHolder] = [] self.entry_varHolders: List[VarHolder] = []
self.exit_varHolders: List[VarHolder] = [] self.exit_varHolders: List[VarHolder] = []
self.exchange: Optional[Any] = None self.exchange: Optional[Any] = None
self._fee = None
# pull variables the scope of the lookahead_analysis-instance # pull variables the scope of the lookahead_analysis-instance
self.local_config = deepcopy(config) self.local_config = deepcopy(config)
@@ -145,8 +146,13 @@ class LookaheadAnalysis:
str(self.dt_to_timestamp(varholder.to_dt))) str(self.dt_to_timestamp(varholder.to_dt)))
prepare_data_config['exchange']['pair_whitelist'] = pairs_to_load prepare_data_config['exchange']['pair_whitelist'] = pairs_to_load
if self._fee is not None:
# Don't re-calculate fee per pair, as fee might differ per pair.
prepare_data_config['fee'] = self._fee
backtesting = Backtesting(prepare_data_config, self.exchange) backtesting = Backtesting(prepare_data_config, self.exchange)
self.exchange = backtesting.exchange self.exchange = backtesting.exchange
self._fee = backtesting.fee
backtesting._set_strategy(backtesting.strategylist[0]) backtesting._set_strategy(backtesting.strategylist[0])
varholder.data, varholder.timerange = backtesting.load_bt_data() varholder.data, varholder.timerange = backtesting.load_bt_data()
@@ -198,7 +204,7 @@ class LookaheadAnalysis:
self.prepare_data(exit_varHolder, [result_row['pair']]) self.prepare_data(exit_varHolder, [result_row['pair']])
# now we analyze a full trade of full_varholder and look for analyze its bias # now we analyze a full trade of full_varholder and look for analyze its bias
def analyze_row(self, idx, result_row): def analyze_row(self, idx: int, result_row):
# if force-sold, ignore this signal since here it will unconditionally exit. # if force-sold, ignore this signal since here it will unconditionally exit.
if result_row.close_date == self.dt_to_timestamp(self.full_varHolder.to_dt): if result_row.close_date == self.dt_to_timestamp(self.full_varHolder.to_dt):
return return
@@ -209,12 +215,16 @@ class LookaheadAnalysis:
# fill entry_varHolder and exit_varHolder # fill entry_varHolder and exit_varHolder
self.fill_entry_and_exit_varHolders(result_row) self.fill_entry_and_exit_varHolders(result_row)
# this will trigger a logger-message
buy_or_sell_biased: bool = False
# register if buy signal is broken # register if buy signal is broken
if not self.report_signal( if not self.report_signal(
self.entry_varHolders[idx].result, self.entry_varHolders[idx].result,
"open_date", "open_date",
self.entry_varHolders[idx].compared_dt): self.entry_varHolders[idx].compared_dt):
self.current_analysis.false_entry_signals += 1 self.current_analysis.false_entry_signals += 1
buy_or_sell_biased = True
# register if buy or sell signal is broken # register if buy or sell signal is broken
if not self.report_signal( if not self.report_signal(
@@ -222,6 +232,13 @@ class LookaheadAnalysis:
"close_date", "close_date",
self.exit_varHolders[idx].compared_dt): self.exit_varHolders[idx].compared_dt):
self.current_analysis.false_exit_signals += 1 self.current_analysis.false_exit_signals += 1
buy_or_sell_biased = True
if buy_or_sell_biased:
logger.info(f"found lookahead-bias in trade "
f"pair: {result_row['pair']}, "
f"timerange:{result_row['open_date']} - {result_row['close_date']}, "
f"idx: {idx}")
# check if the indicators themselves contain biased data # check if the indicators themselves contain biased data
self.analyze_indicators(self.full_varHolder, self.entry_varHolders[idx], result_row['pair']) self.analyze_indicators(self.full_varHolder, self.entry_varHolders[idx], result_row['pair'])
@@ -251,9 +268,33 @@ class LookaheadAnalysis:
# starting from the same datetime to avoid miss-reports of bias # starting from the same datetime to avoid miss-reports of bias
for idx, result_row in self.full_varHolder.result['results'].iterrows(): for idx, result_row in self.full_varHolder.result['results'].iterrows():
if self.current_analysis.total_signals == self.targeted_trade_amount: if self.current_analysis.total_signals == self.targeted_trade_amount:
logger.info(f"Found targeted trade amount = {self.targeted_trade_amount} signals.")
break break
if found_signals < self.minimum_trade_amount:
logger.info(f"only found {found_signals} "
f"which is smaller than "
f"minimum trade amount = {self.minimum_trade_amount}. "
f"Exiting this lookahead-analysis")
return None
if "force_exit" in result_row['exit_reason']:
logger.info("found force-exit in pair: {result_row['pair']}, "
f"timerange:{result_row['open_date']}-{result_row['close_date']}, "
f"idx: {idx}, skipping this one to avoid a false-positive.")
# just to keep the IDs of both full, entry and exit varholders the same
# to achieve a better debugging experience
self.entry_varHolders.append(VarHolder())
self.exit_varHolders.append(VarHolder())
continue
self.analyze_row(idx, result_row) self.analyze_row(idx, result_row)
if len(self.entry_varHolders) < self.minimum_trade_amount:
logger.info(f"only found {found_signals} after skipping forced exits "
f"which is smaller than "
f"minimum trade amount = {self.minimum_trade_amount}. "
f"Exiting this lookahead-analysis")
# Restore verbosity, so it's not too quiet for the next strategy # Restore verbosity, so it's not too quiet for the next strategy
restore_verbosity_for_bias_tester() restore_verbosity_for_bias_tester()
# check and report signals # check and report signals

View File

@@ -137,6 +137,19 @@ class LookaheadAnalysisSubFunctions:
'just to avoid false positives') 'just to avoid false positives')
config['dry_run_wallet'] = min_dry_run_wallet config['dry_run_wallet'] = min_dry_run_wallet
if 'timerange' not in config:
# setting a timerange is enforced here
raise OperationalException(
"Please set a timerange. "
"Usually a few months are enough depending on your needs and strategy."
)
# fix stake_amount to 10k.
# in a combination with a wallet size of 1 billion it should always be able to trade
# no matter if they use custom_stake_amount as a small percentage of wallet size
# or fixate custom_stake_amount to a certain value.
logger.info('fixing stake_amount to 10k')
config['stake_amount'] = 10000
# enforce cache to be 'none', shift it to 'none' if not already # enforce cache to be 'none', shift it to 'none' if not already
# (since the default value is 'day') # (since the default value is 'day')
if config.get('backtest_cache') is None: if config.get('backtest_cache') is None:

View File

@@ -6,6 +6,7 @@ from tabulate import tabulate
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT, Config from freqtrade.constants import UNLIMITED_STAKE_AMOUNT, Config
from freqtrade.misc import decimals_per_coin, round_coin_value from freqtrade.misc import decimals_per_coin, round_coin_value
from freqtrade.optimize.optimize_reports.optimize_reports import generate_periodic_breakdown_stats from freqtrade.optimize.optimize_reports.optimize_reports import generate_periodic_breakdown_stats
from freqtrade.types import BacktestResultType
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -363,7 +364,7 @@ def show_backtest_result(strategy: str, results: Dict[str, Any], stake_currency:
print() print()
def show_backtest_results(config: Config, backtest_stats: Dict): def show_backtest_results(config: Config, backtest_stats: BacktestResultType):
stake_currency = config['stake_currency'] stake_currency = config['stake_currency']
for strategy, results in backtest_stats['strategy'].items(): for strategy, results in backtest_stats['strategy'].items():
@@ -383,7 +384,7 @@ def show_backtest_results(config: Config, backtest_stats: Dict):
print('\nFor more details, please look at the detail tables above') print('\nFor more details, please look at the detail tables above')
def show_sorted_pairlist(config: Config, backtest_stats: Dict): def show_sorted_pairlist(config: Config, backtest_stats: BacktestResultType):
if config.get('backtest_show_pair_list', False): if config.get('backtest_show_pair_list', False):
for strategy, results in backtest_stats['strategy'].items(): for strategy, results in backtest_stats['strategy'].items():
print(f"Pairs for Strategy {strategy}: \n[") print(f"Pairs for Strategy {strategy}: \n[")

View File

@@ -2,18 +2,17 @@ import logging
from pathlib import Path from pathlib import Path
from typing import Dict from typing import Dict
from pandas import DataFrame
from freqtrade.constants import LAST_BT_RESULT_FN from freqtrade.constants import LAST_BT_RESULT_FN
from freqtrade.misc import file_dump_joblib, file_dump_json from freqtrade.misc import file_dump_joblib, file_dump_json
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
from freqtrade.types import BacktestResultType
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def store_backtest_stats( def store_backtest_stats(
recordfilename: Path, stats: Dict[str, DataFrame], dtappendix: str) -> None: recordfilename: Path, stats: BacktestResultType, dtappendix: str) -> Path:
""" """
Stores backtest results Stores backtest results
:param recordfilename: Path object, which can either be a filename or a directory. :param recordfilename: Path object, which can either be a filename or a directory.
@@ -31,13 +30,19 @@ def store_backtest_stats(
# Store metadata separately. # Store metadata separately.
file_dump_json(get_backtest_metadata_filename(filename), stats['metadata']) file_dump_json(get_backtest_metadata_filename(filename), stats['metadata'])
del stats['metadata'] # Don't mutate the original stats dict.
stats_copy = {
'strategy': stats['strategy'],
'strategy_comparison': stats['strategy_comparison'],
}
file_dump_json(filename, stats) file_dump_json(filename, stats_copy)
latest_filename = Path.joinpath(filename.parent, LAST_BT_RESULT_FN) latest_filename = Path.joinpath(filename.parent, LAST_BT_RESULT_FN)
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)}) file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
return filename
def _store_backtest_analysis_data( def _store_backtest_analysis_data(
recordfilename: Path, data: Dict[str, Dict], recordfilename: Path, data: Dict[str, Dict],

View File

@@ -11,6 +11,7 @@ from freqtrade.data.metrics import (calculate_cagr, calculate_calmar, calculate_
calculate_expectancy, calculate_market_change, calculate_expectancy, calculate_market_change,
calculate_max_drawdown, calculate_sharpe, calculate_sortino) calculate_max_drawdown, calculate_sharpe, calculate_sortino)
from freqtrade.misc import decimals_per_coin, round_coin_value from freqtrade.misc import decimals_per_coin, round_coin_value
from freqtrade.types import BacktestResultType
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -535,7 +536,7 @@ def generate_strategy_stats(pairlist: List[str],
def generate_backtest_stats(btdata: Dict[str, DataFrame], def generate_backtest_stats(btdata: Dict[str, DataFrame],
all_results: Dict[str, Dict[str, Union[DataFrame, Dict]]], all_results: Dict[str, Dict[str, Union[DataFrame, Dict]]],
min_date: datetime, max_date: datetime min_date: datetime, max_date: datetime
) -> Dict[str, Any]: ) -> BacktestResultType:
""" """
:param btdata: Backtest data :param btdata: Backtest data
:param all_results: backtest result - dictionary in the form: :param all_results: backtest result - dictionary in the form:
@@ -544,7 +545,7 @@ def generate_backtest_stats(btdata: Dict[str, DataFrame],
:param max_date: Backtest end date :param max_date: Backtest end date
:return: Dictionary containing results per strategy and a strategy summary. :return: Dictionary containing results per strategy and a strategy summary.
""" """
result: Dict[str, Any] = { result: BacktestResultType = {
'metadata': {}, 'metadata': {},
'strategy': {}, 'strategy': {},
'strategy_comparison': [], 'strategy_comparison': [],

View File

@@ -48,7 +48,7 @@ class Order(ModelBase):
id: Mapped[int] = mapped_column(Integer, primary_key=True) id: Mapped[int] = mapped_column(Integer, primary_key=True)
ft_trade_id: Mapped[int] = mapped_column(Integer, ForeignKey('trades.id'), index=True) ft_trade_id: Mapped[int] = mapped_column(Integer, ForeignKey('trades.id'), index=True)
_trade_live: Mapped["Trade"] = relationship("Trade", back_populates="orders") _trade_live: Mapped["Trade"] = relationship("Trade", back_populates="orders", lazy="immediate")
_trade_bt: "LocalTrade" = None # type: ignore _trade_bt: "LocalTrade" = None # type: ignore
# order_side can only be 'buy', 'sell' or 'stoploss' # order_side can only be 'buy', 'sell' or 'stoploss'
@@ -614,11 +614,9 @@ class LocalTrade:
""" """
Method used internally to set self.stop_loss. Method used internally to set self.stop_loss.
""" """
stop_loss_norm = price_to_precision(stop_loss, self.price_precision, self.precision_mode,
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
if not self.stop_loss: if not self.stop_loss:
self.initial_stop_loss = stop_loss_norm self.initial_stop_loss = stop_loss
self.stop_loss = stop_loss_norm self.stop_loss = stop_loss
self.stop_loss_pct = -1 * abs(percent) self.stop_loss_pct = -1 * abs(percent)
@@ -642,26 +640,27 @@ class LocalTrade:
else: else:
new_loss = float(current_price * (1 - abs(stoploss / leverage))) new_loss = float(current_price * (1 - abs(stoploss / leverage)))
stop_loss_norm = price_to_precision(new_loss, self.price_precision, self.precision_mode,
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
# no stop loss assigned yet # no stop loss assigned yet
if self.initial_stop_loss_pct is None or refresh: if self.initial_stop_loss_pct is None or refresh:
self.__set_stop_loss(new_loss, stoploss) self.__set_stop_loss(stop_loss_norm, stoploss)
self.initial_stop_loss = price_to_precision( self.initial_stop_loss = price_to_precision(
new_loss, self.price_precision, self.precision_mode, stop_loss_norm, self.price_precision, self.precision_mode,
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP) rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
self.initial_stop_loss_pct = -1 * abs(stoploss) self.initial_stop_loss_pct = -1 * abs(stoploss)
# evaluate if the stop loss needs to be updated # evaluate if the stop loss needs to be updated
else: else:
higher_stop = stop_loss_norm > self.stop_loss
higher_stop = new_loss > self.stop_loss lower_stop = stop_loss_norm < self.stop_loss
lower_stop = new_loss < self.stop_loss
# stop losses only walk up, never down!, # stop losses only walk up, never down!,
# ? But adding more to a leveraged trade would create a lower liquidation price, # ? But adding more to a leveraged trade would create a lower liquidation price,
# ? decreasing the minimum stoploss # ? decreasing the minimum stoploss
if (higher_stop and not self.is_short) or (lower_stop and self.is_short): if (higher_stop and not self.is_short) or (lower_stop and self.is_short):
logger.debug(f"{self.pair} - Adjusting stoploss...") logger.debug(f"{self.pair} - Adjusting stoploss...")
self.__set_stop_loss(new_loss, stoploss) self.__set_stop_loss(stop_loss_norm, stoploss)
else: else:
logger.debug(f"{self.pair} - Keeping current stoploss...") logger.debug(f"{self.pair} - Keeping current stoploss...")
@@ -746,10 +745,8 @@ class LocalTrade:
self.open_order_id = None self.open_order_id = None
self.recalc_trade_from_orders(is_closing=True) self.recalc_trade_from_orders(is_closing=True)
if show_msg: if show_msg:
logger.info( logger.info(f"Marking {self} as closed as the trade is fulfilled "
'Marking %s as closed as the trade is fulfilled and found no open orders for it.', "and found no open orders for it.")
self
)
def update_fee(self, fee_cost: float, fee_currency: Optional[str], fee_rate: Optional[float], def update_fee(self, fee_cost: float, fee_currency: Optional[str], fee_rate: Optional[float],
side: str) -> None: side: str) -> None:
@@ -1035,7 +1032,8 @@ class LocalTrade:
def select_filled_orders(self, order_side: Optional[str] = None) -> List['Order']: def select_filled_orders(self, order_side: Optional[str] = None) -> List['Order']:
""" """
Finds filled orders for this orderside. Finds filled orders for this order side.
Will not return open orders which already partially filled.
:param order_side: Side of the order (either 'buy', 'sell', or None) :param order_side: Side of the order (either 'buy', 'sell', or None)
:return: array of Order objects :return: array of Order objects
""" """
@@ -1187,12 +1185,13 @@ class LocalTrade:
return LocalTrade.bt_open_open_trade_count return LocalTrade.bt_open_open_trade_count
@staticmethod @staticmethod
def stoploss_reinitialization(desired_stoploss): def stoploss_reinitialization(desired_stoploss: float):
""" """
Adjust initial Stoploss to desired stoploss for all open trades. Adjust initial Stoploss to desired stoploss for all open trades.
""" """
trade: Trade
for trade in Trade.get_open_trades(): for trade in Trade.get_open_trades():
logger.info("Found open trade: %s", trade) logger.info(f"Found open trade: {trade}")
# skip case if trailing-stop changed the stoploss already. # skip case if trailing-stop changed the stoploss already.
if (trade.stop_loss == trade.initial_stop_loss if (trade.stop_loss == trade.initial_stop_loss
@@ -1201,7 +1200,7 @@ class LocalTrade:
logger.info(f"Stoploss for {trade} needs adjustment...") logger.info(f"Stoploss for {trade} needs adjustment...")
# Force reset of stoploss # Force reset of stoploss
trade.stop_loss = None trade.stop_loss = 0.0
trade.initial_stop_loss_pct = None trade.initial_stop_loss_pct = None
trade.adjust_stop_loss(trade.open_rate, desired_stoploss) trade.adjust_stop_loss(trade.open_rate, desired_stoploss)
logger.info(f"New stoploss: {trade.stop_loss}.") logger.info(f"New stoploss: {trade.stop_loss}.")

View File

@@ -55,7 +55,7 @@ def init_plotscript(config, markets: List, startup_candles: int = 0):
timeframe=config['timeframe'], timeframe=config['timeframe'],
timerange=timerange, timerange=timerange,
startup_candles=startup_candles, startup_candles=startup_candles,
data_format=config.get('dataformat_ohlcv', 'json'), data_format=config['dataformat_ohlcv'],
candle_type=config.get('candle_type_def', CandleType.SPOT) candle_type=config.get('candle_type_def', CandleType.SPOT)
) )

View File

@@ -42,7 +42,7 @@ class IProtection(LoggingMixin, ABC):
self._stop_duration = (tf_in_min * self._stop_duration_candles) self._stop_duration = (tf_in_min * self._stop_duration_candles)
else: else:
self._stop_duration_candles = None self._stop_duration_candles = None
self._stop_duration = protection_config.get('stop_duration', 60) self._stop_duration = int(protection_config.get('stop_duration', 60))
if 'lookback_period_candles' in protection_config: if 'lookback_period_candles' in protection_config:
self._lookback_period_candles = int(protection_config.get('lookback_period_candles', 1)) self._lookback_period_candles = int(protection_config.get('lookback_period_candles', 1))
self._lookback_period = tf_in_min * self._lookback_period_candles self._lookback_period = tf_in_min * self._lookback_period_candles

View File

@@ -10,17 +10,19 @@ from fastapi.exceptions import HTTPException
from freqtrade.configuration.config_validation import validate_config_consistency from freqtrade.configuration.config_validation import validate_config_consistency
from freqtrade.constants import Config from freqtrade.constants import Config
from freqtrade.data.btanalysis import (delete_backtest_result, get_backtest_resultlist, from freqtrade.data.btanalysis import (delete_backtest_result, get_backtest_result,
load_and_merge_backtest_result) get_backtest_resultlist, load_and_merge_backtest_result,
update_backtest_metadata)
from freqtrade.enums import BacktestState from freqtrade.enums import BacktestState
from freqtrade.exceptions import DependencyException, OperationalException from freqtrade.exceptions import DependencyException, OperationalException
from freqtrade.exchange.common import remove_exchange_credentials from freqtrade.exchange.common import remove_exchange_credentials
from freqtrade.misc import deep_merge_dicts, is_file_in_dir from freqtrade.misc import deep_merge_dicts, is_file_in_dir
from freqtrade.rpc.api_server.api_schemas import (BacktestHistoryEntry, BacktestRequest, from freqtrade.rpc.api_server.api_schemas import (BacktestHistoryEntry, BacktestMetadataUpdate,
BacktestResponse) BacktestRequest, BacktestResponse)
from freqtrade.rpc.api_server.deps import get_config from freqtrade.rpc.api_server.deps import get_config
from freqtrade.rpc.api_server.webserver_bgwork import ApiBG from freqtrade.rpc.api_server.webserver_bgwork import ApiBG
from freqtrade.rpc.rpc import RPCException from freqtrade.rpc.rpc import RPCException
from freqtrade.types import get_BacktestResultType_default
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -69,14 +71,15 @@ def __run_backtest_bg(btconfig: Config):
ApiBG.bt['bt'].enable_protections = btconfig.get('enable_protections', False) ApiBG.bt['bt'].enable_protections = btconfig.get('enable_protections', False)
ApiBG.bt['bt'].strategylist = [strat] ApiBG.bt['bt'].strategylist = [strat]
ApiBG.bt['bt'].results = {} ApiBG.bt['bt'].results = get_BacktestResultType_default()
ApiBG.bt['bt'].load_prior_backtest() ApiBG.bt['bt'].load_prior_backtest()
ApiBG.bt['bt'].abort = False ApiBG.bt['bt'].abort = False
strategy_name = strat.get_strategy_name()
if (ApiBG.bt['bt'].results and if (ApiBG.bt['bt'].results and
strat.get_strategy_name() in ApiBG.bt['bt'].results['strategy']): strategy_name in ApiBG.bt['bt'].results['strategy']):
# When previous result hash matches - reuse that result and skip backtesting. # When previous result hash matches - reuse that result and skip backtesting.
logger.info(f'Reusing result of previous backtest for {strat.get_strategy_name()}') logger.info(f'Reusing result of previous backtest for {strategy_name}')
else: else:
min_date, max_date = ApiBG.bt['bt'].backtest_one_strategy( min_date, max_date = ApiBG.bt['bt'].backtest_one_strategy(
strat, ApiBG.bt['data'], ApiBG.bt['timerange']) strat, ApiBG.bt['data'], ApiBG.bt['timerange'])
@@ -86,10 +89,12 @@ def __run_backtest_bg(btconfig: Config):
min_date=min_date, max_date=max_date) min_date=min_date, max_date=max_date)
if btconfig.get('export', 'none') == 'trades': if btconfig.get('export', 'none') == 'trades':
store_backtest_stats( fn = store_backtest_stats(
btconfig['exportfilename'], ApiBG.bt['bt'].results, btconfig['exportfilename'], ApiBG.bt['bt'].results,
datetime.now().strftime("%Y-%m-%d_%H-%M-%S") datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
) )
ApiBG.bt['bt'].results['metadata'][strategy_name]['filename'] = str(fn.name)
ApiBG.bt['bt'].results['metadata'][strategy_name]['strategy'] = strategy_name
logger.info("Backtest finished.") logger.info("Backtest finished.")
@@ -280,3 +285,24 @@ def api_delete_backtest_history_entry(file: str, config=Depends(get_config)):
delete_backtest_result(file_abs) delete_backtest_result(file_abs)
return get_backtest_resultlist(config['user_data_dir'] / 'backtest_results') return get_backtest_resultlist(config['user_data_dir'] / 'backtest_results')
@router.patch('/backtest/history/{file}', response_model=List[BacktestHistoryEntry],
tags=['webserver', 'backtest'])
def api_update_backtest_history_entry(file: str, body: BacktestMetadataUpdate,
config=Depends(get_config)):
# Get backtest result history, read from metadata files
bt_results_base: Path = config['user_data_dir'] / 'backtest_results'
file_abs = (bt_results_base / file).with_suffix('.json')
# Ensure file is in backtest_results directory
if not is_file_in_dir(file_abs, bt_results_base):
raise HTTPException(status_code=404, detail="File not found.")
content = {
'notes': body.notes
}
try:
update_backtest_metadata(file_abs, body.strategy, content)
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
return get_backtest_result(file_abs)

View File

@@ -1,7 +1,7 @@
from datetime import date, datetime from datetime import date, datetime
from typing import Any, Dict, List, Optional, Union from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel from pydantic import BaseModel, ConfigDict, RootModel, SerializeAsAny
from freqtrade.constants import DATETIME_PRINT_FORMAT, IntOrInf from freqtrade.constants import DATETIME_PRINT_FORMAT, IntOrInf
from freqtrade.enums import MarginMode, OrderTypeValues, SignalDirection, TradingMode from freqtrade.enums import MarginMode, OrderTypeValues, SignalDirection, TradingMode
@@ -9,9 +9,9 @@ from freqtrade.types import ValidExchangesType
class ExchangeModePayloadMixin(BaseModel): class ExchangeModePayloadMixin(BaseModel):
trading_mode: Optional[TradingMode] trading_mode: Optional[TradingMode] = None
margin_mode: Optional[MarginMode] margin_mode: Optional[MarginMode] = None
exchange: Optional[str] exchange: Optional[str] = None
class Ping(BaseModel): class Ping(BaseModel):
@@ -43,11 +43,11 @@ class BackgroundTaskStatus(BaseModel):
job_category: str job_category: str
status: str status: str
running: bool running: bool
progress: Optional[float] progress: Optional[float] = None
class BackgroundTaskResult(BaseModel): class BackgroundTaskResult(BaseModel):
error: Optional[str] error: Optional[str] = None
status: str status: str
@@ -60,9 +60,9 @@ class Balance(BaseModel):
free: float free: float
balance: float balance: float
used: float used: float
bot_owned: Optional[float] bot_owned: Optional[float] = None
est_stake: float est_stake: float
est_stake_bot: Optional[float] est_stake_bot: Optional[float] = None
stake: str stake: str
# Starting with 2.x # Starting with 2.x
side: str side: str
@@ -141,7 +141,7 @@ class Profit(BaseModel):
expectancy_ratio: float expectancy_ratio: float
max_drawdown: float max_drawdown: float
max_drawdown_abs: float max_drawdown_abs: float
trading_volume: Optional[float] trading_volume: Optional[float] = None
bot_start_timestamp: int bot_start_timestamp: int
bot_start_date: str bot_start_date: str
@@ -173,50 +173,50 @@ class Daily(BaseModel):
class UnfilledTimeout(BaseModel): class UnfilledTimeout(BaseModel):
entry: Optional[int] entry: Optional[int] = None
exit: Optional[int] exit: Optional[int] = None
unit: Optional[str] unit: Optional[str] = None
exit_timeout_count: Optional[int] exit_timeout_count: Optional[int] = None
class OrderTypes(BaseModel): class OrderTypes(BaseModel):
entry: OrderTypeValues entry: OrderTypeValues
exit: OrderTypeValues exit: OrderTypeValues
emergency_exit: Optional[OrderTypeValues] emergency_exit: Optional[OrderTypeValues] = None
force_exit: Optional[OrderTypeValues] force_exit: Optional[OrderTypeValues] = None
force_entry: Optional[OrderTypeValues] force_entry: Optional[OrderTypeValues] = None
stoploss: OrderTypeValues stoploss: OrderTypeValues
stoploss_on_exchange: bool stoploss_on_exchange: bool
stoploss_on_exchange_interval: Optional[int] stoploss_on_exchange_interval: Optional[int] = None
class ShowConfig(BaseModel): class ShowConfig(BaseModel):
version: str version: str
strategy_version: Optional[str] strategy_version: Optional[str] = None
api_version: float api_version: float
dry_run: bool dry_run: bool
trading_mode: str trading_mode: str
short_allowed: bool short_allowed: bool
stake_currency: str stake_currency: str
stake_amount: str stake_amount: str
available_capital: Optional[float] available_capital: Optional[float] = None
stake_currency_decimals: int stake_currency_decimals: int
max_open_trades: IntOrInf max_open_trades: IntOrInf
minimal_roi: Dict[str, Any] minimal_roi: Dict[str, Any]
stoploss: Optional[float] stoploss: Optional[float] = None
stoploss_on_exchange: bool stoploss_on_exchange: bool
trailing_stop: Optional[bool] trailing_stop: Optional[bool] = None
trailing_stop_positive: Optional[float] trailing_stop_positive: Optional[float] = None
trailing_stop_positive_offset: Optional[float] trailing_stop_positive_offset: Optional[float] = None
trailing_only_offset_is_reached: Optional[bool] trailing_only_offset_is_reached: Optional[bool] = None
unfilledtimeout: Optional[UnfilledTimeout] # Empty in webserver mode unfilledtimeout: Optional[UnfilledTimeout] = None # Empty in webserver mode
order_types: Optional[OrderTypes] order_types: Optional[OrderTypes] = None
use_custom_stoploss: Optional[bool] use_custom_stoploss: Optional[bool] = None
timeframe: Optional[str] timeframe: Optional[str] = None
timeframe_ms: int timeframe_ms: int
timeframe_min: int timeframe_min: int
exchange: str exchange: str
strategy: Optional[str] strategy: Optional[str] = None
force_entry_enable: bool force_entry_enable: bool
exit_pricing: Dict[str, Any] exit_pricing: Dict[str, Any]
entry_pricing: Dict[str, Any] entry_pricing: Dict[str, Any]
@@ -231,17 +231,17 @@ class OrderSchema(BaseModel):
pair: str pair: str
order_id: str order_id: str
status: str status: str
remaining: Optional[float] remaining: Optional[float] = None
amount: float amount: float
safe_price: float safe_price: float
cost: float cost: float
filled: Optional[float] filled: Optional[float] = None
ft_order_side: str ft_order_side: str
order_type: str order_type: str
is_open: bool is_open: bool
order_timestamp: Optional[int] order_timestamp: Optional[int] = None
order_filled_timestamp: Optional[int] order_filled_timestamp: Optional[int] = None
ft_fee_base: Optional[float] ft_fee_base: Optional[float] = None
class TradeSchema(BaseModel): class TradeSchema(BaseModel):
@@ -255,81 +255,81 @@ class TradeSchema(BaseModel):
amount: float amount: float
amount_requested: float amount_requested: float
stake_amount: float stake_amount: float
max_stake_amount: Optional[float] max_stake_amount: Optional[float] = None
strategy: str strategy: str
enter_tag: Optional[str] enter_tag: Optional[str] = None
timeframe: int timeframe: int
fee_open: Optional[float] fee_open: Optional[float] = None
fee_open_cost: Optional[float] fee_open_cost: Optional[float] = None
fee_open_currency: Optional[str] fee_open_currency: Optional[str] = None
fee_close: Optional[float] fee_close: Optional[float] = None
fee_close_cost: Optional[float] fee_close_cost: Optional[float] = None
fee_close_currency: Optional[str] fee_close_currency: Optional[str] = None
open_date: str open_date: str
open_timestamp: int open_timestamp: int
open_rate: float open_rate: float
open_rate_requested: Optional[float] open_rate_requested: Optional[float] = None
open_trade_value: float open_trade_value: float
close_date: Optional[str] close_date: Optional[str] = None
close_timestamp: Optional[int] close_timestamp: Optional[int] = None
close_rate: Optional[float] close_rate: Optional[float] = None
close_rate_requested: Optional[float] close_rate_requested: Optional[float] = None
close_profit: Optional[float] close_profit: Optional[float] = None
close_profit_pct: Optional[float] close_profit_pct: Optional[float] = None
close_profit_abs: Optional[float] close_profit_abs: Optional[float] = None
profit_ratio: Optional[float] profit_ratio: Optional[float] = None
profit_pct: Optional[float] profit_pct: Optional[float] = None
profit_abs: Optional[float] profit_abs: Optional[float] = None
profit_fiat: Optional[float] profit_fiat: Optional[float] = None
realized_profit: float realized_profit: float
realized_profit_ratio: Optional[float] realized_profit_ratio: Optional[float] = None
exit_reason: Optional[str] exit_reason: Optional[str] = None
exit_order_status: Optional[str] exit_order_status: Optional[str] = None
stop_loss_abs: Optional[float] stop_loss_abs: Optional[float] = None
stop_loss_ratio: Optional[float] stop_loss_ratio: Optional[float] = None
stop_loss_pct: Optional[float] stop_loss_pct: Optional[float] = None
stoploss_order_id: Optional[str] stoploss_order_id: Optional[str] = None
stoploss_last_update: Optional[str] stoploss_last_update: Optional[str] = None
stoploss_last_update_timestamp: Optional[int] stoploss_last_update_timestamp: Optional[int] = None
initial_stop_loss_abs: Optional[float] initial_stop_loss_abs: Optional[float] = None
initial_stop_loss_ratio: Optional[float] initial_stop_loss_ratio: Optional[float] = None
initial_stop_loss_pct: Optional[float] initial_stop_loss_pct: Optional[float] = None
min_rate: Optional[float] min_rate: Optional[float] = None
max_rate: Optional[float] max_rate: Optional[float] = None
open_order_id: Optional[str] open_order_id: Optional[str] = None
orders: List[OrderSchema] orders: List[OrderSchema]
leverage: Optional[float] leverage: Optional[float] = None
interest_rate: Optional[float] interest_rate: Optional[float] = None
liquidation_price: Optional[float] liquidation_price: Optional[float] = None
funding_fees: Optional[float] funding_fees: Optional[float] = None
trading_mode: Optional[TradingMode] trading_mode: Optional[TradingMode] = None
amount_precision: Optional[float] amount_precision: Optional[float] = None
price_precision: Optional[float] price_precision: Optional[float] = None
precision_mode: Optional[int] precision_mode: Optional[int] = None
class OpenTradeSchema(TradeSchema): class OpenTradeSchema(TradeSchema):
stoploss_current_dist: Optional[float] stoploss_current_dist: Optional[float] = None
stoploss_current_dist_pct: Optional[float] stoploss_current_dist_pct: Optional[float] = None
stoploss_current_dist_ratio: Optional[float] stoploss_current_dist_ratio: Optional[float] = None
stoploss_entry_dist: Optional[float] stoploss_entry_dist: Optional[float] = None
stoploss_entry_dist_ratio: Optional[float] stoploss_entry_dist_ratio: Optional[float] = None
current_rate: float current_rate: float
total_profit_abs: float total_profit_abs: float
total_profit_fiat: Optional[float] total_profit_fiat: Optional[float] = None
total_profit_ratio: Optional[float] total_profit_ratio: Optional[float] = None
open_order: Optional[str] open_order: Optional[str] = None
class TradeResponse(BaseModel): class TradeResponse(BaseModel):
@@ -339,8 +339,7 @@ class TradeResponse(BaseModel):
total_trades: int total_trades: int
class ForceEnterResponse(BaseModel): ForceEnterResponse = RootModel[Union[TradeSchema, StatusMsg]]
__root__: Union[TradeSchema, StatusMsg]
class LockModel(BaseModel): class LockModel(BaseModel):
@@ -352,7 +351,7 @@ class LockModel(BaseModel):
lock_timestamp: int lock_timestamp: int
pair: str pair: str
side: str side: str
reason: Optional[str] reason: Optional[str] = None
class Locks(BaseModel): class Locks(BaseModel):
@@ -361,8 +360,8 @@ class Locks(BaseModel):
class DeleteLockRequest(BaseModel): class DeleteLockRequest(BaseModel):
pair: Optional[str] pair: Optional[str] = None
lockid: Optional[int] lockid: Optional[int] = None
class Logs(BaseModel): class Logs(BaseModel):
@@ -373,17 +372,17 @@ class Logs(BaseModel):
class ForceEnterPayload(BaseModel): class ForceEnterPayload(BaseModel):
pair: str pair: str
side: SignalDirection = SignalDirection.LONG side: SignalDirection = SignalDirection.LONG
price: Optional[float] price: Optional[float] = None
ordertype: Optional[OrderTypeValues] ordertype: Optional[OrderTypeValues] = None
stakeamount: Optional[float] stakeamount: Optional[float] = None
entry_tag: Optional[str] entry_tag: Optional[str] = None
leverage: Optional[float] leverage: Optional[float] = None
class ForceExitPayload(BaseModel): class ForceExitPayload(BaseModel):
tradeid: str tradeid: str
ordertype: Optional[OrderTypeValues] ordertype: Optional[OrderTypeValues] = None
amount: Optional[float] amount: Optional[float] = None
class BlacklistPayload(BaseModel): class BlacklistPayload(BaseModel):
@@ -405,7 +404,7 @@ class WhitelistResponse(BaseModel):
class WhitelistEvaluateResponse(BackgroundTaskResult): class WhitelistEvaluateResponse(BackgroundTaskResult):
result: Optional[WhitelistResponse] result: Optional[WhitelistResponse] = None
class DeleteTrade(BaseModel): class DeleteTrade(BaseModel):
@@ -420,8 +419,7 @@ class PlotConfig_(BaseModel):
subplots: Dict[str, Any] subplots: Dict[str, Any]
class PlotConfig(BaseModel): PlotConfig = RootModel[Union[PlotConfig_, Dict]]
__root__: Union[PlotConfig_, Dict]
class StrategyListResponse(BaseModel): class StrategyListResponse(BaseModel):
@@ -470,7 +468,7 @@ class PairHistory(BaseModel):
timeframe: str timeframe: str
timeframe_ms: int timeframe_ms: int
columns: List[str] columns: List[str]
data: List[Any] data: SerializeAsAny[List[Any]]
length: int length: int
buy_signals: int buy_signals: int
sell_signals: int sell_signals: int
@@ -484,11 +482,11 @@ class PairHistory(BaseModel):
data_start: str data_start: str
data_stop: str data_stop: str
data_stop_ts: int data_stop_ts: int
# TODO[pydantic]: The following keys were removed: `json_encoders`.
class Config: # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
json_encoders = { model_config = ConfigDict(json_encoders={
datetime: lambda v: v.strftime(DATETIME_PRINT_FORMAT), datetime: lambda v: v.strftime(DATETIME_PRINT_FORMAT),
} })
class BacktestFreqAIInputs(BaseModel): class BacktestFreqAIInputs(BaseModel):
@@ -497,16 +495,16 @@ class BacktestFreqAIInputs(BaseModel):
class BacktestRequest(BaseModel): class BacktestRequest(BaseModel):
strategy: str strategy: str
timeframe: Optional[str] timeframe: Optional[str] = None
timeframe_detail: Optional[str] timeframe_detail: Optional[str] = None
timerange: Optional[str] timerange: Optional[str] = None
max_open_trades: Optional[IntOrInf] max_open_trades: Optional[IntOrInf] = None
stake_amount: Optional[str] stake_amount: Optional[Union[str, float]] = None
enable_protections: bool enable_protections: bool
dry_run_wallet: Optional[float] dry_run_wallet: Optional[float] = None
backtest_cache: Optional[str] backtest_cache: Optional[str] = None
freqaimodel: Optional[str] freqaimodel: Optional[str] = None
freqai: Optional[BacktestFreqAIInputs] freqai: Optional[BacktestFreqAIInputs] = None
class BacktestResponse(BaseModel): class BacktestResponse(BaseModel):
@@ -515,16 +513,23 @@ class BacktestResponse(BaseModel):
status_msg: str status_msg: str
step: str step: str
progress: float progress: float
trade_count: Optional[float] trade_count: Optional[float] = None
# TODO: Properly type backtestresult... # TODO: Properly type backtestresult...
backtest_result: Optional[Dict[str, Any]] backtest_result: Optional[Dict[str, Any]] = None
# TODO: This is a copy of BacktestHistoryEntryType
class BacktestHistoryEntry(BaseModel): class BacktestHistoryEntry(BaseModel):
filename: str filename: str
strategy: str strategy: str
run_id: str run_id: str
backtest_start_time: int backtest_start_time: int
notes: Optional[str] = ''
class BacktestMetadataUpdate(BaseModel):
strategy: str
notes: str = ''
class SysInfo(BaseModel): class SysInfo(BaseModel):
@@ -533,5 +538,5 @@ class SysInfo(BaseModel):
class Health(BaseModel): class Health(BaseModel):
last_process: Optional[datetime] last_process: Optional[datetime] = None
last_process_ts: Optional[int] last_process_ts: Optional[int] = None

View File

@@ -50,7 +50,8 @@ logger = logging.getLogger(__name__)
# 2.29: Add /exchanges endpoint # 2.29: Add /exchanges endpoint
# 2.30: new /pairlists endpoint # 2.30: new /pairlists endpoint
# 2.31: new /backtest/history/ delete endpoint # 2.31: new /backtest/history/ delete endpoint
API_VERSION = 2.31 # 2.32: new /backtest/history/ patch endpoint
API_VERSION = 2.32
# Public API, requires no auth. # Public API, requires no auth.
router_public = APIRouter() router_public = APIRouter()
@@ -174,9 +175,9 @@ def force_entry(payload: ForceEnterPayload, rpc: RPC = Depends(get_rpc)):
leverage=payload.leverage) leverage=payload.leverage)
if trade: if trade:
return ForceEnterResponse.parse_obj(trade.to_json()) return ForceEnterResponse.model_validate(trade.to_json())
else: else:
return ForceEnterResponse.parse_obj( return ForceEnterResponse.model_validate(
{"status": f"Error entering {payload.side} trade for pair {payload.pair}."}) {"status": f"Error entering {payload.side} trade for pair {payload.pair}."})
@@ -281,14 +282,14 @@ def plot_config(strategy: Optional[str] = None, config=Depends(get_config),
if not strategy: if not strategy:
if not rpc: if not rpc:
raise RPCException("Strategy is mandatory in webserver mode.") raise RPCException("Strategy is mandatory in webserver mode.")
return PlotConfig.parse_obj(rpc._rpc_plot_config()) return PlotConfig.model_validate(rpc._rpc_plot_config())
else: else:
config1 = deepcopy(config) config1 = deepcopy(config)
config1.update({ config1.update({
'strategy': strategy 'strategy': strategy
}) })
try: try:
return PlotConfig.parse_obj(RPC._rpc_plot_config_with_strategy(config1)) return PlotConfig.model_validate(RPC._rpc_plot_config_with_strategy(config1))
except Exception as e: except Exception as e:
raise HTTPException(status_code=502, detail=str(e)) raise HTTPException(status_code=502, detail=str(e))

View File

@@ -65,7 +65,7 @@ async def _process_consumer_request(
""" """
# Validate the request, makes sure it matches the schema # Validate the request, makes sure it matches the schema
try: try:
websocket_request = WSRequestSchema.parse_obj(request) websocket_request = WSRequestSchema.model_validate(request)
except ValidationError as e: except ValidationError as e:
logger.error(f"Invalid request from {channel}: {e}") logger.error(f"Invalid request from {channel}: {e}")
return return
@@ -94,7 +94,7 @@ async def _process_consumer_request(
# Format response # Format response
response = WSWhitelistMessage(data=whitelist) response = WSWhitelistMessage(data=whitelist)
await channel.send(response.dict(exclude_none=True)) await channel.send(response.model_dump(exclude_none=True))
elif type_ == RPCRequestType.ANALYZED_DF: elif type_ == RPCRequestType.ANALYZED_DF:
# Limit the amount of candles per dataframe to 'limit' or 1500 # Limit the amount of candles per dataframe to 'limit' or 1500
@@ -105,7 +105,7 @@ async def _process_consumer_request(
for message in rpc._ws_request_analyzed_df(limit, pair): for message in rpc._ws_request_analyzed_df(limit, pair):
# Format response # Format response
response = WSAnalyzedDFMessage(data=message) response = WSAnalyzedDFMessage(data=message)
await channel.send(response.dict(exclude_none=True)) await channel.send(response.model_dump(exclude_none=True))
@router.websocket("/message/ws") @router.websocket("/message/ws")

View File

@@ -8,6 +8,7 @@ from fastapi import Depends, FastAPI
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from starlette.responses import JSONResponse from starlette.responses import JSONResponse
from freqtrade.configuration import running_in_docker
from freqtrade.constants import Config from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
from freqtrade.rpc.api_server.uvicorn_threaded import UvicornServer from freqtrade.rpc.api_server.uvicorn_threaded import UvicornServer
@@ -182,7 +183,7 @@ class ApiServer(RPCHandler):
rest_port = self._config['api_server']['listen_port'] rest_port = self._config['api_server']['listen_port']
logger.info(f'Starting HTTP Server at {rest_ip}:{rest_port}') logger.info(f'Starting HTTP Server at {rest_ip}:{rest_port}')
if not IPv4Address(rest_ip).is_loopback: if not IPv4Address(rest_ip).is_loopback and not running_in_docker():
logger.warning("SECURITY WARNING - Local Rest Server listening to external connections") logger.warning("SECURITY WARNING - Local Rest Server listening to external connections")
logger.warning("SECURITY WARNING - This is insecure please set to your loopback," logger.warning("SECURITY WARNING - This is insecure please set to your loopback,"
"e.g 127.0.0.1 in config.json") "e.g 127.0.0.1 in config.json")

View File

@@ -2,15 +2,14 @@ from datetime import datetime
from typing import Any, Dict, List, Optional, TypedDict from typing import Any, Dict, List, Optional, TypedDict
from pandas import DataFrame from pandas import DataFrame
from pydantic import BaseModel from pydantic import BaseModel, ConfigDict
from freqtrade.constants import PairWithTimeframe from freqtrade.constants import PairWithTimeframe
from freqtrade.enums.rpcmessagetype import RPCMessageType, RPCRequestType from freqtrade.enums.rpcmessagetype import RPCMessageType, RPCRequestType
class BaseArbitraryModel(BaseModel): class BaseArbitraryModel(BaseModel):
class Config: model_config = ConfigDict(arbitrary_types_allowed=True)
arbitrary_types_allowed = True
class WSRequestSchema(BaseArbitraryModel): class WSRequestSchema(BaseArbitraryModel):
@@ -27,9 +26,7 @@ class WSMessageSchemaType(TypedDict):
class WSMessageSchema(BaseArbitraryModel): class WSMessageSchema(BaseArbitraryModel):
type: RPCMessageType type: RPCMessageType
data: Optional[Any] = None data: Optional[Any] = None
model_config = ConfigDict(extra='allow')
class Config:
extra = 'allow'
# ------------------------------ REQUEST SCHEMAS ---------------------------- # ------------------------------ REQUEST SCHEMAS ----------------------------

View File

@@ -20,6 +20,7 @@ class Discord(Webhook):
self._format = 'json' self._format = 'json'
self._retries = 1 self._retries = 1
self._retry_delay = 0.1 self._retry_delay = 0.1
self._timeout = self._config['discord'].get('timeout', 10)
def cleanup(self) -> None: def cleanup(self) -> None:
""" """

View File

@@ -41,7 +41,7 @@ logger = logging.getLogger(__name__)
def schema_to_dict(schema: Union[WSMessageSchema, WSRequestSchema]): def schema_to_dict(schema: Union[WSMessageSchema, WSRequestSchema]):
return schema.dict(exclude_none=True) return schema.model_dump(exclude_none=True)
class ExternalMessageConsumer: class ExternalMessageConsumer:
@@ -322,7 +322,7 @@ class ExternalMessageConsumer:
producer_name = producer.get('name', 'default') producer_name = producer.get('name', 'default')
try: try:
producer_message = WSMessageSchema.parse_obj(message) producer_message = WSMessageSchema.model_validate(message)
except ValidationError as e: except ValidationError as e:
logger.error(f"Invalid message from `{producer_name}`: {e}") logger.error(f"Invalid message from `{producer_name}`: {e}")
return return
@@ -344,7 +344,7 @@ class ExternalMessageConsumer:
def _consume_whitelist_message(self, producer_name: str, message: WSMessageSchema): def _consume_whitelist_message(self, producer_name: str, message: WSMessageSchema):
try: try:
# Validate the message # Validate the message
whitelist_message = WSWhitelistMessage.parse_obj(message) whitelist_message = WSWhitelistMessage.model_validate(message.model_dump())
except ValidationError as e: except ValidationError as e:
logger.error(f"Invalid message from `{producer_name}`: {e}") logger.error(f"Invalid message from `{producer_name}`: {e}")
return return
@@ -356,7 +356,7 @@ class ExternalMessageConsumer:
def _consume_analyzed_df_message(self, producer_name: str, message: WSMessageSchema): def _consume_analyzed_df_message(self, producer_name: str, message: WSMessageSchema):
try: try:
df_message = WSAnalyzedDFMessage.parse_obj(message) df_message = WSAnalyzedDFMessage.model_validate(message.model_dump())
except ValidationError as e: except ValidationError as e:
logger.error(f"Invalid message from `{producer_name}`: {e}") logger.error(f"Invalid message from `{producer_name}`: {e}")
return return

View File

@@ -26,6 +26,7 @@ coingecko_mapping = {
'sol': 'solana', 'sol': 'solana',
'usdt': 'tether', 'usdt': 'tether',
'busd': 'binance-usd', 'busd': 'binance-usd',
'tusd': 'true-usd',
} }

View File

@@ -605,17 +605,13 @@ class RPC:
est_stake = balance.free est_stake = balance.free
est_bot_stake = amount est_bot_stake = amount
else: else:
try: pair = self._freqtrade.exchange.get_valid_pair_combination(coin, stake_currency)
pair = self._freqtrade.exchange.get_valid_pair_combination(coin, stake_currency) rate: Optional[float] = tickers.get(pair, {}).get('last', None)
rate: Optional[float] = tickers.get(pair, {}).get('last', None) if rate:
if rate: if pair.startswith(stake_currency) and not pair.endswith(stake_currency):
if pair.startswith(stake_currency) and not pair.endswith(stake_currency): rate = 1.0 / rate
rate = 1.0 / rate est_stake = rate * balance.total
est_stake = rate * balance.total est_bot_stake = rate * amount
est_bot_stake = rate * amount
except (ExchangeError):
logger.warning(f"Could not get rate for pair {coin}.")
raise ValueError()
return est_stake, est_bot_stake return est_stake, est_bot_stake
@@ -1262,7 +1258,7 @@ class RPC:
pairs=[pair], pairs=[pair],
timeframe=timeframe, timeframe=timeframe,
timerange=timerange_parsed, timerange=timerange_parsed,
data_format=config.get('dataformat_ohlcv', 'json'), data_format=config['dataformat_ohlcv'],
candle_type=config.get('candle_type_def', CandleType.SPOT), candle_type=config.get('candle_type_def', CandleType.SPOT),
startup_candles=startup_candles, startup_candles=startup_candles,
) )

View File

@@ -381,7 +381,7 @@ class IStrategy(ABC, HyperStrategyMixin):
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-advanced/ For full documentation please go to https://www.freqtrade.io/en/latest/strategy-advanced/
When not implemented by a strategy, returns the initial stoploss value When not implemented by a strategy, returns the initial stoploss value.
Only called when use_custom_stoploss is set to True. Only called when use_custom_stoploss is set to True.
:param pair: Pair that's currently analyzed :param pair: Pair that's currently analyzed
@@ -1181,7 +1181,8 @@ class IStrategy(ABC, HyperStrategyMixin):
bound = (low if trade.is_short else high) bound = (low if trade.is_short else high)
bound_profit = current_profit if not bound else trade.calc_profit_ratio(bound) bound_profit = current_profit if not bound else trade.calc_profit_ratio(bound)
if self.use_custom_stoploss and dir_correct: if self.use_custom_stoploss and dir_correct:
stop_loss_value = strategy_safe_wrapper(self.custom_stoploss, default_retval=None stop_loss_value = strategy_safe_wrapper(self.custom_stoploss, default_retval=None,
supress_error=True
)(pair=trade.pair, trade=trade, )(pair=trade.pair, trade=trade,
current_time=current_time, current_time=current_time,
current_rate=(bound or current_rate), current_rate=(bound or current_rate),

View File

@@ -78,19 +78,7 @@ class {{ strategy }}(IStrategy):
buy_rsi = IntParameter(10, 40, default=30, space="buy") buy_rsi = IntParameter(10, 40, default=30, space="buy")
sell_rsi = IntParameter(60, 90, default=70, space="sell") sell_rsi = IntParameter(60, 90, default=70, space="sell")
# Optional order type mapping. {{ attributes | indent(4) }}
order_types = {
'entry': 'limit',
'exit': 'limit',
'stoploss': 'market',
'stoploss_on_exchange': False
}
# Optional order time in force.
order_time_in_force = {
'entry': 'GTC',
'exit': 'GTC'
}
{{ plot_config | indent(4) }} {{ plot_config | indent(4) }}
def informative_pairs(self): def informative_pairs(self):

View File

@@ -0,0 +1,13 @@
# Optional order type mapping.
order_types = {
'entry': 'limit',
'exit': 'limit',
'stoploss': 'market',
'stoploss_on_exchange': False
}
# Optional order time in force.
order_time_in_force = {
'entry': 'GTC',
'exit': 'GTC'
}

View File

@@ -1 +1,5 @@
from freqtrade.types.valid_exchanges_type import ValidExchangesType # noqa: F401 # flake8: noqa: F401
from freqtrade.types.backtest_result_type import (BacktestHistoryEntryType, BacktestMetadataType,
BacktestResultType,
get_BacktestResultType_default)
from freqtrade.types.valid_exchanges_type import ValidExchangesType

View File

@@ -0,0 +1,28 @@
from typing import Any, Dict, List
from typing_extensions import TypedDict
class BacktestMetadataType(TypedDict):
run_id: str
backtest_start_time: int
class BacktestResultType(TypedDict):
metadata: Dict[str, Any] # BacktestMetadataType
strategy: Dict[str, Any]
strategy_comparison: List[Any]
def get_BacktestResultType_default() -> BacktestResultType:
return {
'metadata': {},
'strategy': {},
'strategy_comparison': [],
}
class BacktestHistoryEntryType(BacktestMetadataType):
filename: str
strategy: str
notes: str

View File

@@ -2,6 +2,7 @@ from freqtrade.util.datetime_helpers import (dt_floor_day, dt_from_ts, dt_humani
dt_utc, format_ms_time, shorten_date) dt_utc, format_ms_time, shorten_date)
from freqtrade.util.ft_precise import FtPrecise from freqtrade.util.ft_precise import FtPrecise
from freqtrade.util.periodic_cache import PeriodicCache from freqtrade.util.periodic_cache import PeriodicCache
from freqtrade.util.template_renderer import render_template, render_template_with_fallback # noqa
__all__ = [ __all__ = [

View File

@@ -64,7 +64,7 @@ def migrate_binance_futures_data(config: Config):
return return
from freqtrade.data.history.idatahandler import get_datahandler from freqtrade.data.history.idatahandler import get_datahandler
dhc = get_datahandler(config['datadir'], config.get('dataformat_ohlcv', 'json')) dhc = get_datahandler(config['datadir'], config['dataformat_ohlcv'])
paircombs = dhc.ohlcv_get_available_data( paircombs = dhc.ohlcv_get_available_data(
config['datadir'], config['datadir'],

View File

@@ -0,0 +1,27 @@
"""
Jinja2 rendering utils, used to generate new strategy and configurations.
"""
def render_template(templatefile: str, arguments: dict = {}) -> str:
from jinja2 import Environment, PackageLoader, select_autoescape
env = Environment(
loader=PackageLoader('freqtrade', 'templates'),
autoescape=select_autoescape(['html', 'xml'])
)
template = env.get_template(templatefile)
return template.render(**arguments)
def render_template_with_fallback(templatefile: str, templatefallbackfile: str,
arguments: dict = {}) -> str:
"""
Use templatefile if possible, otherwise fall back to templatefallbackfile
"""
from jinja2.exceptions import TemplateNotFound
try:
return render_template(templatefile, arguments)
except TemplateNotFound:
return render_template(templatefallbackfile, arguments)

View File

@@ -47,7 +47,6 @@ nav:
- Advanced Hyperopt: advanced-hyperopt.md - Advanced Hyperopt: advanced-hyperopt.md
- Producer/Consumer mode: producer-consumer.md - Producer/Consumer mode: producer-consumer.md
- Edge Positioning: edge.md - Edge Positioning: edge.md
- Sandbox Testing: sandbox-testing.md
- FAQ: faq.md - FAQ: faq.md
- SQL Cheat-sheet: sql_cheatsheet.md - SQL Cheat-sheet: sql_cheatsheet.md
- Strategy migration: strategy_migration.md - Strategy migration: strategy_migration.md

View File

@@ -63,7 +63,7 @@ ignore = ["freqtrade/vendor/**"]
[tool.ruff] [tool.ruff]
line-length = 100 line-length = 100
extend-exclude = [".env"] extend-exclude = [".env", ".venv"]
target-version = "py38" target-version = "py38"
extend-select = [ extend-select = [
"C90", # mccabe "C90", # mccabe

View File

@@ -7,8 +7,8 @@
-r docs/requirements-docs.txt -r docs/requirements-docs.txt
coveralls==3.3.1 coveralls==3.3.1
ruff==0.0.280 ruff==0.0.285
mypy==1.4.1 mypy==1.5.1
pre-commit==3.3.3 pre-commit==3.3.3
pytest==7.4.0 pytest==7.4.0
pytest-asyncio==0.21.1 pytest-asyncio==0.21.1
@@ -17,10 +17,10 @@ pytest-mock==3.11.1
pytest-random-order==1.1.0 pytest-random-order==1.1.0
isort==5.12.0 isort==5.12.0
# For datetime mocking # For datetime mocking
time-machine==2.11.0 time-machine==2.12.0
# Convert jupyter notebooks to markdown documents # Convert jupyter notebooks to markdown documents
nbconvert==7.7.2 nbconvert==7.7.4
# mypy types # mypy types
types-cachetools==5.3.0.6 types-cachetools==5.3.0.6

View File

@@ -4,8 +4,8 @@
# Required for freqai-rl # Required for freqai-rl
torch==2.0.1 torch==2.0.1
#until these branches will be released we can use this #until these branches will be released we can use this
gymnasium==0.28.1 gymnasium==0.29.1
stable_baselines3==2.0.0 stable_baselines3==2.1.0
sb3_contrib>=2.0.0a9 sb3_contrib>=2.0.0a9
# Progress bar for stable-baselines3 and sb3-contrib # Progress bar for stable-baselines3 and sb3-contrib
tqdm==4.65.0 tqdm==4.66.1

View File

@@ -4,9 +4,9 @@
# Required for freqai # Required for freqai
scikit-learn==1.1.3 scikit-learn==1.1.3
joblib==1.3.1 joblib==1.3.2
catboost==1.2; 'arm' not in platform_machine catboost==1.2; 'arm' not in platform_machine
lightgbm==4.0.0 lightgbm==4.0.0
xgboost==1.7.6 xgboost==1.7.6
tensorboard==2.13.0 tensorboard==2.14.0
datasieve==0.1.7 datasieve==0.1.7

View File

@@ -2,7 +2,7 @@
-r requirements.txt -r requirements.txt
# Required for hyperopt # Required for hyperopt
scipy==1.11.1; python_version >= '3.9' scipy==1.11.2; python_version >= '3.9'
scipy==1.10.1; python_version < '3.9' scipy==1.10.1; python_version < '3.9'
scikit-learn==1.1.3 scikit-learn==1.1.3
scikit-optimize==0.9.0 scikit-optimize==0.9.0

Some files were not shown because too many files have changed in this diff Show More