Merge remote-tracking branch 'upstream/develop' into feature/fetch-public-trades

This commit is contained in:
Joe Schr
2023-12-18 10:34:20 +01:00
124 changed files with 7802 additions and 2380 deletions

View File

@@ -31,7 +31,7 @@ jobs:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
@@ -90,7 +90,7 @@ jobs:
- name: Backtesting (multi)
run: |
cp config_examples/config_bittrex.example.json config.json
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade new-strategy -s AwesomeStrategy
freqtrade new-strategy -s AwesomeStrategyMin --template minimal
@@ -98,7 +98,7 @@ jobs:
- name: Hyperopt
run: |
cp config_examples/config_bittrex.example.json config.json
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 6 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
@@ -108,7 +108,7 @@ jobs:
- name: Run Ruff
run: |
ruff check --format=github .
ruff check --output-format=github .
- name: Mypy
run: |
@@ -122,18 +122,18 @@ jobs:
details: Freqtrade CI failed on ${{ matrix.os }}
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
build_macos:
build-macos:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ macos-latest ]
os: [ "macos-latest" ]
python-version: ["3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@@ -143,14 +143,13 @@ jobs:
id: cache
with:
path: ~/dependencies/
key: ${{ runner.os }}-dependencies
key: ${{ matrix.os }}-dependencies
- name: pip cache (macOS)
uses: actions/cache@v3
if: runner.os == 'macOS'
with:
path: ~/Library/Caches/pip
key: test-${{ matrix.os }}-${{ matrix.python-version }}-pip
key: ${{ matrix.os }}-${{ matrix.python-version }}-pip
- name: TA binary *nix
if: steps.cache.outputs.cache-hit != 'true'
@@ -158,7 +157,6 @@ jobs:
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
- name: Installation - macOS
if: runner.os == 'macOS'
run: |
# brew update
# TODO: Should be the brew upgrade
@@ -175,7 +173,7 @@ jobs:
rm /usr/local/bin/python3-config || true
rm /usr/local/bin/python3.11-config || true
brew install hdf5 c-blosc
brew install hdf5 c-blosc libomp
python -m pip install --upgrade pip wheel
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
@@ -200,14 +198,14 @@ jobs:
- name: Backtesting
run: |
cp config_examples/config_bittrex.example.json config.json
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade new-strategy -s AwesomeStrategyAdv --template advanced
freqtrade backtesting --datadir tests/testdata --strategy AwesomeStrategyAdv
- name: Hyperopt
run: |
cp config_examples/config_bittrex.example.json config.json
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
@@ -217,7 +215,7 @@ jobs:
- name: Run Ruff
run: |
ruff check --format=github .
ruff check --output-format=github .
- name: Mypy
run: |
@@ -231,7 +229,7 @@ jobs:
details: Test Succeeded!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
build_windows:
build-windows:
runs-on: ${{ matrix.os }}
strategy:
@@ -243,7 +241,7 @@ jobs:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
@@ -275,19 +273,19 @@ jobs:
- name: Backtesting
run: |
cp config_examples/config_bittrex.example.json config.json
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade backtesting --datadir tests/testdata --strategy SampleStrategy
- name: Hyperopt
run: |
cp config_examples/config_bittrex.example.json config.json
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
- name: Run Ruff
run: |
ruff check --format=github .
ruff check --output-format=github .
- name: Mypy
run: |
@@ -301,13 +299,13 @@ jobs:
details: Test Failed
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
mypy_version_check:
mypy-version-check:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: "3.10"
@@ -321,12 +319,12 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
- uses: actions/setup-python@v5
with:
python-version: "3.10"
- uses: pre-commit/action@v3.0.0
docs_check:
docs-check:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
@@ -336,7 +334,7 @@ jobs:
./tests/test_docs.sh
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: "3.11"
@@ -362,7 +360,7 @@ jobs:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: "3.9"
@@ -406,10 +404,10 @@ jobs:
notify-complete:
needs: [
build_linux,
build_macos,
build_windows,
docs_check,
mypy_version_check,
build-macos,
build-windows,
docs-check,
mypy-version-check,
pre-commit,
build_linux_online
]
@@ -436,8 +434,63 @@ jobs:
details: Test Completed!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
deploy:
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check, pre-commit ]
build:
name: "Build"
needs: [ build_linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Build distribution
run: |
pip install -U build
python -m build --sdist --wheel
- name: Upload artifacts 📦
uses: actions/upload-artifact@v4
with:
name: freqtrade-build
path: |
dist
retention-days: 10
deploy-pypi:
name: "Deploy to PyPI"
needs: [ build ]
runs-on: ubuntu-22.04
if: (github.event_name == 'release')
environment:
name: release
url: https://pypi.org/p/freqtrade
permissions:
id-token: write
steps:
- uses: actions/checkout@v4
- name: Download artifact 📦
uses: actions/download-artifact@v4
with:
name: freqtrade-build
path: dist
- name: Publish to PyPI (Test)
uses: pypa/gh-action-pypi-publish@v1.8.11
with:
repository-url: https://test.pypi.org/legacy/
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@v1.8.11
deploy-docker:
needs: [ build_linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
runs-on: ubuntu-22.04
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'
@@ -446,7 +499,7 @@ jobs:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: "3.11"
@@ -455,26 +508,6 @@ jobs:
run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF##*/})"
id: extract_branch
- name: Build distribution
run: |
pip install -U setuptools wheel
python setup.py sdist bdist_wheel
- name: Publish to PyPI (Test)
uses: pypa/gh-action-pypi-publish@v1.8.10
if: (github.event_name == 'release')
with:
user: __token__
password: ${{ secrets.pypi_test_password }}
repository_url: https://test.pypi.org/legacy/
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@v1.8.10
if: (github.event_name == 'release')
with:
user: __token__
password: ${{ secrets.pypi_password }}
- name: Dockerhub login
env:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
@@ -506,10 +539,11 @@ jobs:
run: |
build_helpers/publish_docker_multi.sh
deploy_arm:
deploy-arm:
name: "Deploy Docker"
permissions:
packages: write
needs: [ deploy ]
needs: [ deploy-docker ]
# Only run on 64bit machines
runs-on: [self-hosted, linux, ARM64]
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'

View File

@@ -5,20 +5,21 @@ repos:
rev: "6.0.0"
hooks:
- id: flake8
additional_dependencies: [Flake8-pyproject]
# stages: [push]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.5.1"
rev: "v1.7.0"
hooks:
- id: mypy
exclude: build_helpers
additional_dependencies:
- types-cachetools==5.3.0.6
- types-cachetools==5.3.0.7
- types-filelock==3.2.7
- types-requests==2.31.0.8
- types-requests==2.31.0.10
- types-tabulate==0.9.0.3
- types-python-dateutil==2.8.19.14
- SQLAlchemy==2.0.21
- SQLAlchemy==2.0.23
# stages: [push]
- repo: https://github.com/pycqa/isort
@@ -30,7 +31,7 @@ repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.270'
rev: 'v0.1.1'
hooks:
- id: ruff

View File

@@ -125,7 +125,7 @@ Exceptions:
Contributors may be given commit privileges. Preference will be given to those with:
1. Past contributions to Freqtrade and other related open-source projects. Contributions to Freqtrade include both code (both accepted and pending) and friendly participation in the issue tracker and Pull request reviews. Quantity and quality are considered.
1. Past contributions to Freqtrade and other related open-source projects. Contributions to Freqtrade include both code (both accepted and pending) and friendly participation in the issue tracker and Pull request reviews. Both quantity and quality are considered.
1. A coding style that the other core committers find simple, minimal, and clean.
1. Access to resources for cross-platform development and testing.
1. Time to devote to the project regularly.

View File

@@ -1,4 +1,4 @@
FROM python:3.11.5-slim-bullseye as base
FROM python:3.11.6-slim-bookworm as base
# Setup env
ENV LANG C.UTF-8

View File

@@ -5,3 +5,5 @@ recursive-include freqtrade/templates/ *.j2 *.ipynb
include freqtrade/exchange/binance_leverage_tiers.json
include freqtrade/rpc/api_server/ui/fallback_file.html
include freqtrade/rpc/api_server/ui/favicon.ico
prune tests

View File

@@ -28,7 +28,7 @@ hesitate to read the source code and understand the mechanism of this bot.
Please read the [exchange specific notes](docs/exchanges.md) to learn about eventual, special configurations needed for each exchange.
- [X] [Binance](https://www.binance.com/)
- [X] [Bittrex](https://bittrex.com/)
- [X] [Bitmart](https://bitmart.com/)
- [X] [Gate.io](https://www.gate.io/ref/6266643)
- [X] [Huobi](http://huobi.com/)
- [X] [Kraken](https://kraken.com/)

Binary file not shown.

Binary file not shown.

View File

@@ -54,7 +54,7 @@ docker tag freqtrade:$TAG_FREQAI_ARM ${CACHE_IMAGE}:$TAG_FREQAI_ARM
docker tag freqtrade:$TAG_FREQAI_RL_ARM ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
# Run backtest
docker run --rm -v $(pwd)/config_examples/config_bittrex.example.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG_ARM} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG_ARM} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
if [ $? -ne 0 ]; then
echo "failed running backtest"

View File

@@ -67,7 +67,7 @@ docker tag freqtrade:$TAG_FREQAI ${CACHE_IMAGE}:$TAG_FREQAI
docker tag freqtrade:$TAG_FREQAI_RL ${CACHE_IMAGE}:$TAG_FREQAI_RL
# Run backtest
docker run --rm -v $(pwd)/config_examples/config_bittrex.example.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
if [ $? -ne 0 ]; then
echo "failed running backtest"

View File

@@ -1,6 +1,6 @@
{
"max_open_trades": 3,
"stake_currency": "BTC",
"stake_currency": "USDT",
"stake_amount": 0.05,
"tradable_balance_ratio": 0.99,
"fiat_display_currency": "USD",
@@ -36,21 +36,21 @@
"ccxt_async_config": {
},
"pair_whitelist": [
"ALGO/BTC",
"ATOM/BTC",
"BAT/BTC",
"BCH/BTC",
"BRD/BTC",
"EOS/BTC",
"ETH/BTC",
"IOTA/BTC",
"LINK/BTC",
"LTC/BTC",
"NEO/BTC",
"NXS/BTC",
"XMR/BTC",
"XRP/BTC",
"XTZ/BTC"
"ALGO/USDT",
"ATOM/USDT",
"BAT/USDT",
"BCH/USDT",
"BRD/USDT",
"EOS/USDT",
"ETH/USDT",
"IOTA/USDT",
"LINK/USDT",
"LTC/USDT",
"NEO/USDT",
"NXS/USDT",
"XMR/USDT",
"XRP/USDT",
"XTZ/USDT"
],
"pair_blacklist": [
"BNB/.*"

View File

@@ -1,4 +1,4 @@
FROM python:3.9.16-slim-bullseye as base
FROM python:3.11.6-slim-bookworm as base
# Setup env
ENV LANG C.UTF-8
@@ -11,34 +11,31 @@ ENV FT_APP_ENV="docker"
# Prepare environment
RUN mkdir /freqtrade \
&& apt-get update \
&& apt-get -y install sudo libatlas3-base curl sqlite3 libhdf5-dev libutf8proc-dev libsnappy-dev \
&& apt-get -y install sudo libatlas3-base libopenblas-dev curl sqlite3 libhdf5-dev libutf8proc-dev libsnappy-dev \
&& apt-get clean \
&& useradd -u 1000 -G sudo -U -m ftuser \
&& chown ftuser:ftuser /freqtrade \
# Allow sudoers
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers \
&& pip install --upgrade pip
WORKDIR /freqtrade
# Install dependencies
FROM base as python-deps
RUN apt-get update \
&& apt-get -y install build-essential libssl-dev libffi-dev libopenblas-dev libgfortran5 pkg-config cmake gcc \
&& apt-get -y install build-essential libssl-dev libffi-dev libgfortran5 pkg-config cmake gcc \
&& apt-get clean \
&& pip install --upgrade pip \
&& echo "[global]\nextra-index-url=https://www.piwheels.org/simple" > /etc/pip.conf
# Install TA-lib
COPY build_helpers/* /tmp/
RUN cd /tmp && /tmp/install_ta-lib.sh && rm -r /tmp/*ta-lib*
ENV LD_LIBRARY_PATH /usr/local/lib
# Install dependencies
COPY --chown=ftuser:ftuser requirements.txt /freqtrade/
USER ftuser
RUN pip install --user --no-cache-dir numpy==1.25.2 \
&& pip install --user /tmp/pyarrow-*.whl \
&& pip install --user --no-build-isolation TA-Lib==0.4.28 \
RUN pip install --user --no-cache-dir numpy \
&& pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib==0.4.28 \
&& pip install --user --no-cache-dir -r requirements.txt
# Copy dependencies to runtime-image

View File

@@ -170,11 +170,11 @@ freqtrade backtesting --strategy AwesomeStrategy --dry-run-wallet 1000
Using a different on-disk historical candle (OHLCV) data source
Assume you downloaded the history data from the Bittrex exchange and kept it in the `user_data/data/bittrex-20180101` directory.
Assume you downloaded the history data from the Binance exchange and kept it in the `user_data/data/binance-20180101` directory.
You can then use this data for backtesting as follows:
```bash
freqtrade backtesting --strategy AwesomeStrategy --datadir user_data/data/bittrex-20180101
freqtrade backtesting --strategy AwesomeStrategy --datadir user_data/data/binance-20180101
```
---
@@ -618,13 +618,13 @@ To compare multiple strategies, a list of Strategies can be provided to backtest
This is limited to 1 timeframe value per run. However, data is only loaded once from disk so if you have multiple
strategies you'd like to compare, this will give a nice runtime boost.
All listed Strategies need to be in the same directory.
All listed Strategies need to be in the same directory, unless also `--recursive-strategy-search` is specified, where sub-directories within the strategy directory are also considered.
``` bash
freqtrade backtesting --timerange 20180401-20180410 --timeframe 5m --strategy-list Strategy001 Strategy002 --export trades
```
This will save the results to `user_data/backtest_results/backtest-result-<strategy>.json`, injecting the strategy-name into the target filename.
This will save the results to `user_data/backtest_results/backtest-result-<datetime>.json`, including results for both `Strategy001` and `Strategy002`.
There will be an additional table comparing win/losses of the different strategies (identical to the "Total" row in the first table).
Detailed output for all strategies one after the other will be available, so make sure to scroll up to see the details per strategy.

View File

@@ -594,7 +594,7 @@ creating trades on the exchange.
```json
"exchange": {
"name": "bittrex",
"name": "binance",
"key": "key",
"secret": "secret",
...
@@ -644,7 +644,7 @@ API Keys are usually only required for live trading (trading for real money, bot
```json
{
"exchange": {
"name": "bittrex",
"name": "binance",
"key": "af8ddd35195e9dc500b9a6f799f6f5c93d89193b",
"secret": "08a9dc6db3d7b53e1acebd9275677f4b0a04f1a5",
//"password": "", // Optional, not needed by all exchanges)

View File

@@ -318,6 +318,7 @@ Additional tests / steps to complete:
* Check if balance shows correctly (*)
* Create market order (*)
* Create limit order (*)
* Cancel order (*)
* Complete trade (enter + exit) (*)
* Compare result calculation between exchange and bot
* Ensure fees are applied correctly (check the database against the exchange)
@@ -418,6 +419,9 @@ This part of the documentation is aimed at maintainers, and shows how to create
### Create release branch
!!! Note
Make sure that the `stable` branch is up-to-date!
First, pick a commit that's about one week old (to not include latest additions to releases).
``` bash
@@ -430,14 +434,11 @@ Determine if crucial bugfixes have been made between this commit and the current
* Merge the release branch (stable) into this branch.
* Edit `freqtrade/__init__.py` and add the version matching the current date (for example `2019.7` for July 2019). Minor versions can be `2019.7.1` should we need to do a second release that month. Version numbers must follow allowed versions from PEP0440 to avoid failures pushing to pypi.
* Commit this part.
* push that branch to the remote and create a PR against the stable branch.
* Push that branch to the remote and create a PR against the **stable branch**.
* Update develop version to next version following the pattern `2019.8-dev`.
### Create changelog from git commits
!!! Note
Make sure that the `stable` branch is up-to-date!
``` bash
# Needs to be done before merging / pulling that branch.
git log --oneline --no-decorate --no-merges stable..new_release

View File

@@ -302,6 +302,24 @@ We do strongly recommend to limit all API keys to the IP you're going to use it
Bybit (futures only) supports `stoploss_on_exchange` and uses `stop-loss-limit` orders. It provides great advantages, so we recommend to benefit from it by enabling stoploss on exchange.
On futures, Bybit supports both `stop-limit` as well as `stop-market` orders. You can use either `"limit"` or `"market"` in the `order_types.stoploss` configuration setting to decide which type to use.
## Bitmart
Bitmart requires the API key Memo (the name you give the API key) to go along with the exchange key and secret.
It's therefore required to pass the UID as well.
```json
"exchange": {
"name": "bitmart",
"uid": "your_bitmart_api_key_memo",
"secret": "your_exchange_secret",
"password": "your_exchange_api_key_password",
// ...
}
```
!!! Warning "Necessary Verification"
Bitmart requires Verification Lvl2 to successfully trade on the spot market through the API - even though trading via UI works just fine with just Lvl1 verification.
## All exchanges
Should you experience constant errors with Nonce (like `InvalidNonce`), it is best to regenerate the API keys. Resetting Nonce is difficult and it's usually easier to regenerate the API keys.

View File

@@ -128,12 +128,6 @@ This warning can point to one of the below problems:
* Barely traded pair -> Check the pair on the exchange webpage, look at the timeframe your strategy uses. If the pair does not have any volume in some candles (usually visualized with a "volume 0" bar, and a "_" as candle), this pair did not have any trades in this timeframe. These pairs should ideally be avoided, as they can cause problems with order-filling.
* API problem -> API returns wrong data (this only here for completeness, and should not happen with supported exchanges).
### I'm getting the "RESTRICTED_MARKET" message in the log
Currently known to happen for US Bittrex users.
Read [the Bittrex section about restricted markets](exchanges.md#restricted-markets) for more information.
### I'm getting the "Exchange XXX does not support market orders." message and cannot run my strategy
As the message says, your exchange does not support market orders and you have one of the [order types](configuration.md/#understand-order_types) set to "market". Your strategy was probably written with other exchanges in mind and sets "market" orders for "stoploss" orders, which is correct and preferable for most of the exchanges supporting market orders (but not for Bittrex and Gate.io).

View File

@@ -7,7 +7,7 @@ Low level feature engineering is performed in the user strategy within a set of
| Function | Description |
|---------------|-------------|
| `feature_engineering_expand_all()` | This optional function will automatically expand the defined features on the config defined `indicator_periods_candles`, `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`.
| `feature_engineering_expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `include_periods_candles`.
| `feature_engineering_expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `indicator_periods_candles`.
| `feature_engineering_standard()` | This optional function will be called once with the dataframe of the base timeframe. This is the final function to be called, which means that the dataframe entering this function will contain all the features and columns from the base asset created by the other `feature_engineering_expand` functions. This function is a good place to do custom exotic feature extractions (e.g. tsfresh). This function is also a good place for any feature that should not be auto-expanded upon (e.g., day of the week).
| `set_freqai_targets()` | Required function to set the targets for the model. All targets must be prepended with `&` to be recognized by the FreqAI internals.

View File

@@ -74,7 +74,6 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
| | **Reinforcement Learning Parameters within the `freqai.rl_config` sub dictionary**
| `rl_config` | A dictionary containing the control parameters for a Reinforcement Learning model. <br> **Datatype:** Dictionary.
| `train_cycles` | Training time steps will be set based on the `train_cycles * number of training data points. <br> **Datatype:** Integer.
| `cpu_count` | Number of processors to dedicate to the Reinforcement Learning training process. <br> **Datatype:** int.
| `max_trade_duration_candles`| Guides the agent training to keep trades below desired length. Example usage shown in `prediction_models/ReinforcementLearner.py` within the customizable `calculate_reward()` function. <br> **Datatype:** int.
| `model_type` | Model string from stable_baselines3 or SBcontrib. Available strings include: `'TRPO', 'ARS', 'RecurrentPPO', 'MaskablePPO', 'PPO', 'A2C', 'DQN'`. User should ensure that `model_training_parameters` match those available to the corresponding stable_baselines3 model by visiting their documentaiton. [PPO doc](https://stable-baselines3.readthedocs.io/en/master/modules/ppo.html) (external website) <br> **Datatype:** string.
| `policy_type` | One of the available policy types from stable_baselines3 <br> **Datatype:** string.

View File

@@ -337,11 +337,15 @@ There are four parameter types each suited for different purposes.
* `CategoricalParameter` - defines a parameter with a predetermined number of choices.
* `BooleanParameter` - Shorthand for `CategoricalParameter([True, False])` - great for "enable" parameters.
!!! Tip "Disabling parameter optimization"
Each parameter takes two boolean parameters:
* `load` - when set to `False` it will not load values configured in `buy_params` and `sell_params`.
* `optimize` - when set to `False` parameter will not be included in optimization process.
Use these parameters to quickly prototype various ideas.
### Parameter options
There are two parameter options that can help you to quickly test various ideas:
* `optimize` - when set to `False`, the parameter will not be included in optimization process. (Default: True)
* `load` - when set to `False`, results of a previous hyperopt run (in `buy_params` and `sell_params` either in your strategy or the JSON output file) will not be used as the starting value for subsequent hyperopts. The default value specified in the parameter will be used instead. (Default: True)
!!! Tip "Effects of `load=False` on backtesting"
Be aware that setting the `load` option to `False` will mean backtesting will also use the default value specified in the parameter and *not* the value found through hyperoptimisation.
!!! Warning
Hyperoptable parameters cannot be used in `populate_indicators` - as hyperopt does not recalculate indicators for each epoch, so the starting value would be used in this case.

View File

@@ -40,7 +40,7 @@ Freqtrade is a free and open source crypto trading bot written in Python. It is
Please read the [exchange specific notes](exchanges.md) to learn about eventual, special configurations needed for each exchange.
- [X] [Binance](https://www.binance.com/)
- [X] [Bittrex](https://bittrex.com/)
- [X] [Bitmart](https://bitmart.com/)
- [X] [Gate.io](https://www.gate.io/ref/6266643)
- [X] [Huobi](http://huobi.com/)
- [X] [Kraken](https://kraken.com/)

View File

@@ -1,6 +1,6 @@
markdown==3.5
markdown==3.5.1
mkdocs==1.5.3
mkdocs-material==9.4.4
mkdocs-material==9.5.2
mdx_truly_sane_lists==1.3
pymdown-extensions==10.3
pymdown-extensions==10.5
jinja2==3.1.2

View File

@@ -134,13 +134,16 @@ python3 scripts/rest_client.py --config rest_config.json <command> [optional par
| `reload_config` | Reloads the configuration file.
| `trades` | List last trades. Limited to 500 trades per call.
| `trade/<tradeid>` | Get specific trade.
| `trade/<tradeid>` | DELETE - Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
| `trade/<tradeid>/open-order` | DELETE - Cancel open order for this trade.
| `trade/<tradeid>/reload` | GET - Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.
| `trades/<tradeid>` | DELETE - Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
| `trades/<tradeid>/open-order` | DELETE - Cancel open order for this trade.
| `trades/<tradeid>/reload` | GET - Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.
| `show_config` | Shows part of the current configuration with relevant settings to operation.
| `logs` | Shows last log messages.
| `status` | Lists all open trades.
| `count` | Displays number of trades used and available.
| `entries [pair]` | Shows profit statistics for each enter tags for given pair (or all pairs if pair isn't given). Pair is optional.
| `exits [pair]` | Shows profit statistics for each exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.
| `mix_tags [pair]` | Shows profit statistics for each combinations of enter tag + exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.
| `locks` | Displays currently locked pairs.
| `delete_lock <lock_id>` | Deletes (disables) the lock by id.
| `profit` | Display a summary of your profit/loss from close trades and some stats about your performance.

View File

@@ -760,9 +760,9 @@ The `position_adjustment_enable` strategy property enables the usage of `adjust_
For performance reasons, it's disabled by default and freqtrade will show a warning message on startup if enabled.
`adjust_trade_position()` can be used to perform additional orders, for example to manage risk with DCA (Dollar Cost Averaging) or to increase or decrease positions.
`max_entry_position_adjustment` property is used to limit the number of additional buys per trade (on top of the first buy) that the bot can execute. By default, the value is -1 which means the bot have no limit on number of adjustment buys.
`max_entry_position_adjustment` property is used to limit the number of additional entries per trade (on top of the first entry order) that the bot can execute. By default, the value is -1 which means the bot have no limit on number of adjustment entries.
The strategy is expected to return a stake_amount (in stake currency) between `min_stake` and `max_stake` if and when an additional buy order should be made (position is increased).
The strategy is expected to return a stake_amount (in stake currency) between `min_stake` and `max_stake` if and when an additional entry order should be made (position is increased -> buy order for long trades, sell order for short trades).
If there are not enough funds in the wallet (the return value is above `max_stake`) then the signal will be ignored.
Additional orders also result in additional fees and those orders don't count towards `max_open_trades`.
@@ -770,9 +770,11 @@ This callback is **not** called when there is an open order (either buy or sell)
`adjust_trade_position()` is called very frequently for the duration of a trade, so you must keep your implementation as performant as possible.
Additional Buys are ignored once you have reached the maximum amount of extra buys that you have set on `max_entry_position_adjustment`, but the callback is called anyway looking for partial exits.
Additional entries are ignored once you have reached the maximum amount of extra entries that you have set on `max_entry_position_adjustment`, but the callback is called anyway looking for partial exits.
Position adjustments will always be applied in the direction of the trade, so a positive value will always increase your position (negative values will decrease your position), no matter if it's a long or short trade. Modifications to leverage are not possible, and the stake-amount is assumed to be before applying leverage.
Position adjustments will always be applied in the direction of the trade, so a positive value will always increase your position (negative values will decrease your position), no matter if it's a long or short trade.
Modifications to leverage are not possible, and the stake-amount returned is assumed to be before applying leverage.
!!! Note "About stake size"
Using fixed stake size means it will be the amount used for the first order, just like without position adjustment.

View File

@@ -173,7 +173,7 @@ You can use [recursive-analysis](recursive-analysis.md) to check and find the co
In this example strategy, this should be set to 400 (`startup_candle_count = 400`), since the minimum needed history for ema100 calculation to make sure the value is correct is 400 candles.
``` python
dataframe['ema100'] = ta.EMA(dataframe, timeperiod=400)
dataframe['ema100'] = ta.EMA(dataframe, timeperiod=100)
```
By letting the bot know how much history is needed, backtest trades can start at the specified timerange during backtesting and hyperopt.
@@ -486,17 +486,18 @@ for more information.
:param timeframe: Informative timeframe. Must always be equal or higher than strategy timeframe.
:param asset: Informative asset, for example BTC, BTC/USDT, ETH/BTC. Do not specify to use
current pair.
current pair. Also supports limited pair format strings (see below)
:param fmt: Column format (str) or column formatter (callable(name, asset, timeframe)). When not
specified, defaults to:
* {base}_{quote}_{column}_{timeframe} if asset is specified.
* {column}_{timeframe} if asset is not specified.
Format string supports these format variables:
* {asset} - full name of the asset, for example 'BTC/USDT'.
Pair format supports these format variables:
* {base} - base currency in lower case, for example 'eth'.
* {BASE} - same as {base}, except in upper case.
* {quote} - quote currency in lower case, for example 'usdt'.
* {QUOTE} - same as {quote}, except in upper case.
Format string additionally supports this variables.
* {asset} - full name of the asset, for example 'BTC/USDT'.
* {column} - name of dataframe column.
* {timeframe} - timeframe of informative dataframe.
:param ffill: ffill dataframe after merging informative pair.
@@ -1008,6 +1009,10 @@ The following lists some common patterns which should be avoided to prevent frus
- don't use `dataframe['volume'].mean()`. This uses the full DataFrame for backtesting, including data from the future. Use `dataframe['volume'].rolling(<window>).mean()` instead
- don't use `.resample('1h')`. This uses the left border of the interval, so moves data from an hour to the start of the hour. Use `.resample('1h', label='right')` instead.
!!! Tip "Identifying problems"
You may also want to check the 2 helper commands [lookahead-analysis](lookahead-analysis.md) and [recursive-analysis](recursive-analysis.md), which can each help you figure out problems with your strategy in different ways.
Please treat them as what they are - helpers to identify most common problems. A negative result of each does not guarantee that there's none of the above errors included.
### Colliding signals
When conflicting signals collide (e.g. both `'enter_long'` and `'exit_long'` are 1), freqtrade will do nothing and ignore the entry signal. This will avoid trades that enter, and exit immediately. Obviously, this can potentially lead to missed entries.

View File

@@ -570,7 +570,7 @@ def populate_any_indicators(
```
1. Features - Move to `feature_engineering_expand_all`
2. Basic features, not expanded across `include_periods_candles` - move to`feature_engineering_expand_basic()`.
2. Basic features, not expanded across `indicator_periods_candles` - move to`feature_engineering_expand_basic()`.
3. Standard features which should not be expanded - move to `feature_engineering_standard()`.
4. Targets - Move this part to `set_freqai_targets()`.

View File

@@ -175,6 +175,7 @@ official commands. You can ask at any moment for help with `/help`.
| `/status` | Lists all open trades
| `/status <trade_id>` | Lists one or more specific trade. Separate multiple <trade_id> with a blank space.
| `/status table` | List all open trades in a table format. Pending buy orders are marked with an asterisk (*) Pending sell orders are marked with a double asterisk (**)
| `/order <trade_id>` | Lists orders of one or more specific trade. Separate multiple <trade_id> with a blank space.
| `/trades [limit]` | List all recently closed trades in a table format.
| `/count` | Displays number of trades used and available
| `/locks` | Show currently locked pairs.

View File

@@ -427,25 +427,33 @@ zb True missing opt: fetchMyTrades
Use the `list-timeframes` subcommand to see the list of timeframes available for the exchange.
```
usage: freqtrade list-timeframes [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH] [--userdir PATH] [--exchange EXCHANGE] [-1]
usage: freqtrade list-timeframes [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH]
[--exchange EXCHANGE] [-1]
optional arguments:
options:
-h, --help show this help message and exit
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no config is provided.
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
-1, --one-column Print output in one column.
Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--logfile FILE Log to the file specified. Special values are: 'syslog', 'journald'. See the documentation for more details.
--logfile FILE, --log-file FILE
Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more
details.
-V, --version show program's version number and exit
-c PATH, --config PATH
Specify configuration file (default: `config.json`). Multiple --config options may be used. Can be set to `-`
to read config from stdin.
-d PATH, --datadir PATH
Specify configuration file (default:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
```
* Example: see the timeframes for the 'binance' exchange, set in the configuration file:
@@ -479,20 +487,17 @@ usage: freqtrade list-markets [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH] [--exchange EXCHANGE]
[--print-list] [--print-json] [-1] [--print-csv]
[--base BASE_CURRENCY [BASE_CURRENCY ...]]
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]] [-a]
[--trading-mode {spot,margin,futures}]
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]]
[-a] [--trading-mode {spot,margin,futures}]
usage: freqtrade list-pairs [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH] [--exchange EXCHANGE]
[--print-list] [--print-json] [-1] [--print-csv]
[--base BASE_CURRENCY [BASE_CURRENCY ...]]
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]] [-a]
[--trading-mode {spot,margin,futures}]
optional arguments:
options:
-h, --help show this help message and exit
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
--print-list Print list of pairs or market symbols. By default data
is printed in the tabular format.
--print-json Print list of pairs or market symbols in JSON format.
@@ -504,20 +509,22 @@ optional arguments:
Specify quote currency(-ies). Space-separated list.
-a, --all Print all pairs or market symbols. By default only
active ones are shown.
--trading-mode {spot,margin,futures}
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
Select Trading mode
Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--logfile FILE Log to the file specified. Special values are:
--logfile FILE, --log-file FILE
Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more
details.
-V, --version show program's version number and exit
-c PATH, --config PATH
Specify configuration file (default: `config.json`).
Multiple --config options may be used. Can be set to
`-` to read config from stdin.
-d PATH, --datadir PATH
Specify configuration file (default:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@@ -532,7 +539,7 @@ Pairs/markets are sorted by its symbol string in the printed output.
### Examples
* Print the list of active pairs with quote currency USD on exchange, specified in the default
configuration file (i.e. pairs on the "Bittrex" exchange) in JSON format:
configuration file (i.e. pairs on the "Binance" exchange) in JSON format:
```
$ freqtrade list-pairs --quote USD --print-json
@@ -564,7 +571,7 @@ usage: freqtrade test-pairlist [-h] [--userdir PATH] [-v] [-c PATH]
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]]
[-1] [--print-json] [--exchange EXCHANGE]
optional arguments:
options:
-h, --help show this help message and exit
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@@ -578,8 +585,7 @@ optional arguments:
Specify quote currency(-ies). Space-separated list.
-1, --one-column Print output in one column.
--print-json Print list of pairs or market symbols in JSON format.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
```

View File

@@ -302,6 +302,7 @@ You can configure this as follows:
```
The above represents the default (`exit_fill` and `entry_fill` are optional and will default to the above configuration) - modifications are obviously possible.
To disable either of the two default values (`entry_fill` / `exit_fill`), you can assign them an empty array (`exit_fill: []`).
Available fields correspond to the fields for webhooks and are documented in the corresponding webhook sections.

View File

@@ -1,5 +1,5 @@
""" Freqtrade bot """
__version__ = '2023.10-dev'
__version__ = '2023.12-dev'
if 'dev' in __version__:
from pathlib import Path

View File

@@ -108,7 +108,6 @@ def ask_user_config() -> Dict[str, Any]:
"choices": [
"binance",
"binanceus",
"bittrex",
"gate",
"huobi",
"kraken",

View File

@@ -67,7 +67,7 @@ def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> D
)
def validate_config_consistency(conf: Dict[str, Any], preliminary: bool = False) -> None:
def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = False) -> None:
"""
Validate the configuration consistency.
Should be ran after loading both configuration and strategy,
@@ -86,7 +86,7 @@ def validate_config_consistency(conf: Dict[str, Any], preliminary: bool = False)
_validate_ask_orderbook(conf)
_validate_freqai_hyperopt(conf)
_validate_freqai_backtest(conf)
_validate_freqai_include_timeframes(conf)
_validate_freqai_include_timeframes(conf, preliminary=preliminary)
_validate_consumers(conf)
validate_migrated_strategy_settings(conf)
@@ -335,7 +335,7 @@ def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
'Using analyze-per-epoch parameter is not supported with a FreqAI strategy.')
def _validate_freqai_include_timeframes(conf: Dict[str, Any]) -> None:
def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool) -> None:
freqai_enabled = conf.get('freqai', {}).get('enabled', False)
if freqai_enabled:
main_tf = conf.get('timeframe', '5m')
@@ -355,7 +355,7 @@ def _validate_freqai_include_timeframes(conf: Dict[str, Any]) -> None:
f"`include_timeframes`.Offending include-timeframes: {', '.join(offending_lines)}")
# Ensure that the base timeframe is included in the include_timeframes list
if main_tf not in freqai_include_timeframes:
if not preliminary and main_tf not in freqai_include_timeframes:
feature_parameters = conf.get('freqai', {}).get('feature_parameters', {})
include_timeframes = [main_tf] + freqai_include_timeframes
conf.get('freqai', {}).get('feature_parameters', {}) \

View File

@@ -464,8 +464,8 @@ def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str)
pct_missing = (len_after - len_before) / \
len_before if len_before > 0 else 0
if len_before != len_after:
message = (f"Missing data fillup for {pair}: before: {len_before} - after: {len_after}"
f" - {pct_missing:.2%}")
message = (f"Missing data fillup for {pair}, {timeframe}: "
f"before: {len_before} - after: {len_after} - {pct_missing:.2%}")
if pct_missing > 0.01:
logger.info(message)
else:

View File

@@ -211,8 +211,9 @@ def prepare_results(analysed_trades, stratname,
timerange=None):
res_df = pd.DataFrame()
for pair, trades in analysed_trades[stratname].items():
trades.dropna(subset=['close_date'], inplace=True)
res_df = pd.concat([res_df, trades], ignore_index=True)
if (trades.shape[0] > 0):
trades.dropna(subset=['close_date'], inplace=True)
res_df = pd.concat([res_df, trades], ignore_index=True)
res_df = _select_rows_within_dates(res_df, timerange)

View File

@@ -4,6 +4,7 @@ from freqtrade.exchange.common import remove_exchange_credentials, MAP_EXCHANGE_
from freqtrade.exchange.exchange import Exchange
# isort: on
from freqtrade.exchange.binance import Binance
from freqtrade.exchange.bitmart import Bitmart
from freqtrade.exchange.bitpanda import Bitpanda
from freqtrade.exchange.bittrex import Bittrex
from freqtrade.exchange.bitvavo import Bitvavo

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,20 @@
""" Bitmart exchange subclass """
import logging
from typing import Dict
from freqtrade.exchange import Exchange
logger = logging.getLogger(__name__)
class Bitmart(Exchange):
"""
Bitmart exchange class. Contains adjustments needed for Freqtrade to work
with this exchange.
"""
_ft_has: Dict = {
"stoploss_on_exchange": False, # Bitmart API does not support stoploss orders
"ohlcv_candle_limit": 200,
}

View File

@@ -7,7 +7,7 @@ import ccxt
from freqtrade.constants import BuySell
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
from freqtrade.exceptions import DDosProtection, ExchangeError, OperationalException, TemporaryError
from freqtrade.exchange import Exchange
from freqtrade.exchange.common import retrier
from freqtrade.util.datetime_helpers import dt_now, dt_ts
@@ -29,6 +29,7 @@ class Bybit(Exchange):
_ft_has: Dict = {
"ohlcv_candle_limit": 1000,
"ohlcv_has_history": True,
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
}
_ft_has_futures: Dict = {
"ohlcv_has_history": True,
@@ -202,8 +203,11 @@ class Bybit(Exchange):
"""
# Bybit does not provide "applied" funding fees per position.
if self.trading_mode == TradingMode.FUTURES:
return self._fetch_and_calculate_funding_fees(
pair, amount, is_short, open_date)
try:
return self._fetch_and_calculate_funding_fees(
pair, amount, is_short, open_date)
except ExchangeError:
logger.warning(f"Could not update funding fees for {pair}.")
return 0.0
def fetch_orders(self, pair: str, since: datetime, params: Optional[Dict] = None) -> List[Dict]:

View File

@@ -52,7 +52,7 @@ MAP_EXCHANGE_CHILDCLASS = {
SUPPORTED_EXCHANGES = [
'binance',
'bittrex',
'bitmart',
'gate',
'huobi',
'kraken',

View File

@@ -521,11 +521,14 @@ class Exchange:
except ccxt.BaseError:
logger.exception('Unable to initialize markets.')
def reload_markets(self) -> None:
def reload_markets(self, force: bool = False) -> None:
"""Reload markets both sync and async if refresh interval has passed """
# Check whether markets have to be reloaded
if (self._last_markets_refresh > 0) and (
self._last_markets_refresh + self.markets_refresh_interval > dt_ts()):
if (
not force
and self._last_markets_refresh > 0
and (self._last_markets_refresh + self.markets_refresh_interval > dt_ts())
):
return None
logger.debug("Performing scheduled market reload..")
try:
@@ -1263,16 +1266,16 @@ class Exchange:
return order
except ccxt.InsufficientFunds as e:
raise InsufficientFundsError(
f'Insufficient funds to create {ordertype} sell order on market {pair}. '
f'Tried to sell amount {amount} at rate {limit_rate}. '
f'Message: {e}') from e
except ccxt.InvalidOrder as e:
f'Insufficient funds to create {ordertype} {side} order on market {pair}. '
f'Tried to {side} amount {amount} at rate {limit_rate} with '
f'stop-price {stop_price_norm}. Message: {e}') from e
except (ccxt.InvalidOrder, ccxt.BadRequest) as e:
# Errors:
# `Order would trigger immediately.`
raise InvalidOrderException(
f'Could not create {ordertype} sell order on market {pair}. '
f'Tried to sell amount {amount} at rate {limit_rate}. '
f'Message: {e}') from e
f'Could not create {ordertype} {side} order on market {pair}. '
f'Tried to {side} amount {amount} at rate {limit_rate} with '
f'stop-price {stop_price_norm}. Message: {e}') from e
except ccxt.DDoSProtection as e:
raise DDosProtection(e) from e
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
@@ -1531,8 +1534,9 @@ class Exchange:
@retrier
def fetch_bids_asks(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
"""
:param symbols: List of symbols to fetch
:param cached: Allow cached result
:return: fetch_tickers result
:return: fetch_bids_asks result
"""
if not self.exchange_has('fetchBidsAsks'):
return {}
@@ -1581,6 +1585,12 @@ class Exchange:
raise OperationalException(
f'Exchange {self._api.name} does not support fetching tickers in batch. '
f'Message: {e}') from e
except ccxt.BadSymbol as e:
logger.warning(f"Could not load tickers due to {e.__class__.__name__}. Message: {e} ."
"Reloading markets.")
self.reload_markets(True)
# Re-raise exception to repeat the call.
raise TemporaryError from e
except ccxt.DDoSProtection as e:
raise DDosProtection(e) from e
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
@@ -2032,7 +2042,7 @@ class Exchange:
results = await asyncio.gather(*input_coro, return_exceptions=True)
for res in results:
if isinstance(res, Exception):
if isinstance(res, BaseException):
logger.warning(f"Async code raised an exception: {repr(res)}")
if raise_:
raise
@@ -2642,6 +2652,7 @@ class Exchange:
from_id = t[-1][1]
else:
logger.debug("Stopping as no more trades were returned.")
break
except asyncio.CancelledError:
logger.debug("Async operation Interrupted, breaking trades DL loop.")
@@ -2667,6 +2678,11 @@ class Exchange:
try:
t = await self._async_fetch_trades(pair, since=since)
if t:
# No more trades to download available at the exchange,
# So we repeatedly get the same trade over and over again.
if since == t[-1][0] and len(t) == 1:
logger.debug("Stopping because no more trades are available.")
break
since = t[-1][0]
trades.extend(t)
# Reached the end of the defined-download period
@@ -2675,6 +2691,7 @@ class Exchange:
f"Stopping because until was reached. {t[-1][0]} > {until}")
break
else:
logger.debug("Stopping as no more trades were returned.")
break
except asyncio.CancelledError:
logger.debug("Async operation Interrupted, breaking trades DL loop.")
@@ -3208,17 +3225,19 @@ class Exchange:
:param amount: Trade amount
:param open_date: Open date of the trade
:return: funding fee since open_date
:raises: ExchangeError if something goes wrong.
"""
if self.trading_mode == TradingMode.FUTURES:
if self._config['dry_run']:
funding_fees = self._fetch_and_calculate_funding_fees(
pair, amount, is_short, open_date)
else:
funding_fees = self._get_funding_fees_from_exchange(pair, open_date)
return funding_fees
else:
return 0.0
try:
if self._config['dry_run']:
funding_fees = self._fetch_and_calculate_funding_fees(
pair, amount, is_short, open_date)
else:
funding_fees = self._get_funding_fees_from_exchange(pair, open_date)
return funding_fees
except ExchangeError:
logger.warning(f"Could not update funding fees for {pair}.")
return 0.0
def get_liquidation_price(
self,

View File

@@ -159,7 +159,7 @@ class BaseEnvironment(gym.Env):
function is designed for tracking incremented objects,
events, actions inside the training environment.
For example, a user can call this to track the
frequency of occurence of an `is_valid` call in
frequency of occurrence of an `is_valid` call in
their `calculate_reward()`:
def calculate_reward(self, action: int) -> float:

View File

@@ -12,7 +12,6 @@ import numpy as np
import pandas as pd
import psutil
import rapidjson
from joblib import dump, load
from joblib.externals import cloudpickle
from numpy.typing import NDArray
from pandas import DataFrame
@@ -285,6 +284,10 @@ class FreqaiDataDrawer:
new_pred["date_pred"] = dataframe["date"]
hist_preds = self.historic_predictions[pair].copy()
# ensure both dataframes have the same date format so they can be merged
new_pred["date_pred"] = pd.to_datetime(new_pred["date_pred"])
hist_preds["date_pred"] = pd.to_datetime(hist_preds["date_pred"])
# find the closest common date between new_pred and historic predictions
# and cut off the new_pred dataframe at that date
common_dates = pd.merge(new_pred, hist_preds, on="date_pred", how="inner")
@@ -295,9 +298,10 @@ class FreqaiDataDrawer:
"predictions. You likely left your FreqAI instance offline "
f"for more than {len(dataframe.index)} candles.")
df_concat = pd.concat([hist_preds, new_pred], ignore_index=True, keys=hist_preds.keys())
# remove last row because we will append that later in append_model_predictions()
df_concat = df_concat.iloc[:-1]
# reindex new_pred columns to match the historic predictions dataframe
new_pred_reindexed = new_pred.reindex(columns=hist_preds.columns)
df_concat = pd.concat([hist_preds, new_pred_reindexed], ignore_index=True)
# any missing values will get zeroed out so users can see the exact
# downtime in FreqUI
df_concat = df_concat.fillna(0)
@@ -319,9 +323,9 @@ class FreqaiDataDrawer:
index = self.historic_predictions[pair].index[-1:]
columns = self.historic_predictions[pair].columns
nan_df = pd.DataFrame(np.nan, index=index, columns=columns)
zeros_df = pd.DataFrame(np.zeros((1, len(columns))), index=index, columns=columns)
self.historic_predictions[pair] = pd.concat(
[self.historic_predictions[pair], nan_df], ignore_index=True, axis=0)
[self.historic_predictions[pair], zeros_df], ignore_index=True, axis=0)
df = self.historic_predictions[pair]
# model outputs and associated statistics
@@ -472,7 +476,8 @@ class FreqaiDataDrawer:
# Save the trained model
if self.model_type == 'joblib':
dump(model, save_path / f"{dk.model_filename}_model.joblib")
with (save_path / f"{dk.model_filename}_model.joblib").open("wb") as fp:
cloudpickle.dump(model, fp)
elif self.model_type == 'keras':
model.save(save_path / f"{dk.model_filename}_model.h5")
elif self.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
@@ -559,7 +564,8 @@ class FreqaiDataDrawer:
if dk.live and coin in self.model_dictionary:
model = self.model_dictionary[coin]
elif self.model_type == 'joblib':
model = load(dk.data_path / f"{dk.model_filename}_model.joblib")
with (dk.data_path / f"{dk.model_filename}_model.joblib").open("rb") as fp:
model = cloudpickle.load(fp)
elif 'stable_baselines' in self.model_type or 'sb3_contrib' == self.model_type:
mod = importlib.import_module(
self.model_type, self.freqai_info['rl_config']['model_type'])

View File

@@ -244,7 +244,7 @@ class FreqaiDataKitchen:
f"{self.pair}: dropped {len(unfiltered_df) - len(filtered_df)} training points"
f" due to NaNs in populated dataset {len(unfiltered_df)}."
)
if len(unfiltered_df) == 0 and not self.live:
if len(filtered_df) == 0 and not self.live:
raise OperationalException(
f"{self.pair}: all training data dropped due to NaNs. "
"You likely did not download enough training data prior "

View File

@@ -27,6 +27,12 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
...
"freqai": {
...
"conv_width": 30, // PyTorchTransformer is based on windowing
"feature_parameters": {
...
"include_shifted_candles": 0, // which removes the need for shifted candles
...
},
"model_training_parameters" : {
"learning_rate": 3e-4,
"trainer_kwargs": {
@@ -120,16 +126,16 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
# create empty torch tensor
self.model.model.eval()
yb = torch.empty(0).to(self.device)
if x.shape[1] > 1:
if x.shape[1] > self.window_size:
ws = self.window_size
for i in range(0, x.shape[1] - ws):
xb = x[:, i:i + ws, :].to(self.device)
y = self.model.model(xb)
yb = torch.cat((yb, y), dim=0)
yb = torch.cat((yb, y), dim=1)
else:
yb = self.model.model(x)
yb = yb.cpu().squeeze()
yb = yb.cpu().squeeze(0)
pred_df = pd.DataFrame(yb.detach().numpy(), columns=dk.label_list)
pred_df, _, _ = dk.label_pipeline.inverse_transform(pred_df)

View File

@@ -1,8 +1,9 @@
import logging
from pathlib import Path
from typing import Any, Dict, Type
from typing import Any, Dict, List, Optional, Type
import torch as th
from stable_baselines3.common.callbacks import ProgressBarCallback
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
from freqtrade.freqai.RL.Base5ActionRLEnv import Actions, Base5ActionRLEnv, Positions
@@ -73,19 +74,27 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
'trained agent.')
model = self.dd.model_dictionary[dk.pair]
model.set_env(self.train_env)
callbacks: List[Any] = [self.eval_callback, self.tensorboard_callback]
progressbar_callback: Optional[ProgressBarCallback] = None
if self.rl_config.get('progress_bar', False):
progressbar_callback = ProgressBarCallback()
callbacks.insert(0, progressbar_callback)
model.learn(
total_timesteps=int(total_timesteps),
callback=[self.eval_callback, self.tensorboard_callback],
progress_bar=self.rl_config.get('progress_bar', False)
)
try:
model.learn(
total_timesteps=int(total_timesteps),
callback=callbacks,
)
finally:
if progressbar_callback:
progressbar_callback.on_training_end()
if Path(dk.data_path / "best_model.zip").is_file():
logger.info('Callback found a best model.')
best_model = self.MODELCLASS.load(dk.data_path / "best_model")
return best_model
logger.info('Couldnt find best model, using final model instead.')
logger.info("Couldn't find best model, using final model instead.")
return model

View File

@@ -45,7 +45,7 @@ class XGBoostRFRegressor(BaseRegressionModel):
model = XGBRFRegressor(**self.model_training_parameters)
model.set_params(callbacks=[TBCallback(dk.data_path)], activate=self.activate_tensorboard)
model.set_params(callbacks=[TBCallback(dk.data_path)])
model.fit(X=X, y=y, sample_weight=sample_weight, eval_set=eval_set,
sample_weight_eval_set=eval_weights, xgb_model=xgb_model)
# set the callbacks to empty so that we can serialize to disk later

View File

@@ -45,7 +45,7 @@ class XGBoostRegressor(BaseRegressionModel):
model = XGBRegressor(**self.model_training_parameters)
model.set_params(callbacks=[TBCallback(dk.data_path)], activate=self.activate_tensorboard)
model.set_params(callbacks=[TBCallback(dk.data_path)])
model.fit(X=X, y=y, sample_weight=sample_weight, eval_set=eval_set,
sample_weight_eval_set=eval_weights, xgb_model=xgb_model)
# set the callbacks to empty so that we can serialize to disk later

View File

@@ -3,7 +3,6 @@ from typing import Any, Dict, Type, Union
from stable_baselines3.common.callbacks import BaseCallback
from stable_baselines3.common.logger import HParam
from stable_baselines3.common.vec_env import VecEnv
from freqtrade.freqai.RL.BaseEnvironment import BaseActions
@@ -13,13 +12,9 @@ class TensorboardCallback(BaseCallback):
Custom callback for plotting additional values in tensorboard and
episodic summary reports.
"""
# Override training_env type to fix type errors
training_env: Union[VecEnv, None] = None
def __init__(self, verbose=1, actions: Type[Enum] = BaseActions):
super().__init__(verbose)
self.model: Any = None
self.logger: Any = None
self.actions: Type[Enum] = actions
def _on_training_start(self) -> None:
@@ -47,9 +42,13 @@ class TensorboardCallback(BaseCallback):
def _on_step(self) -> bool:
local_info = self.locals["infos"][0]
if self.training_env is None:
return True
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
if hasattr(self.training_env, 'envs'):
tensorboard_metrics = self.training_env.envs[0].unwrapped.tensorboard_metrics
else:
# For RL-multiproc - usage of [0] might need to be evaluated
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
for metric in local_info:
if metric not in ["episode", "terminal_observation"]:

View File

@@ -132,7 +132,7 @@ class FreqtradeBot(LoggingMixin):
# TODO: This would be more efficient if scheduled in utc time, and performed at each
# TODO: funding interval, specified by funding_fee_times on the exchange classes
for time_slot in range(0, 24):
for minutes in [0, 15, 30, 45]:
for minutes in [1, 31]:
t = str(time(time_slot, minutes, 2))
self._schedule.every().day.at(t).do(update)
self.last_process: Optional[datetime] = None
@@ -199,6 +199,7 @@ class FreqtradeBot(LoggingMixin):
# Only update open orders on startup
# This will update the database after the initial migration
self.startup_update_open_orders()
self.update_funding_fees()
def process(self) -> None:
"""
@@ -315,17 +316,14 @@ class FreqtradeBot(LoggingMixin):
def update_funding_fees(self) -> None:
if self.trading_mode == TradingMode.FUTURES:
trades: List[Trade] = Trade.get_open_trades()
try:
for trade in trades:
funding_fees = self.exchange.get_funding_fees(
for trade in trades:
trade.set_funding_fees(
self.exchange.get_funding_fees(
pair=trade.pair,
amount=trade.amount,
is_short=trade.is_short,
open_date=trade.date_last_filled_utc
)
trade.funding_fees = funding_fees
except ExchangeError:
logger.warning("Could not update funding fees for open trades.")
open_date=trade.date_last_filled_utc)
)
def startup_backpopulate_precision(self) -> None:
@@ -374,17 +372,13 @@ class FreqtradeBot(LoggingMixin):
fo = order.to_ccxt_object()
fo['status'] = 'canceled'
self.handle_cancel_order(
fo, order.order_id, order.trade,
constants.CANCEL_REASON['TIMEOUT']
fo, order, order.trade, constants.CANCEL_REASON['TIMEOUT']
)
except ExchangeError as e:
logger.warning(f"Error updating Order {order.order_id} due to {e}")
if self.trading_mode == TradingMode.FUTURES:
self._schedule.run_pending()
def update_trades_without_assigned_fees(self) -> None:
"""
Update closed trades without close fees assigned.
@@ -586,7 +580,8 @@ class FreqtradeBot(LoggingMixin):
else:
self.log_once(f"Pair {pair} is currently locked.", logger.info)
return False
stake_amount = self.wallets.get_trade_stake_amount(pair, self.edge)
stake_amount = self.wallets.get_trade_stake_amount(
pair, self.config['max_open_trades'], self.edge)
bid_check_dom = self.config.get('entry_pricing', {}).get('check_depth_of_market', {})
if ((bid_check_dom.get('enabled', False)) and
@@ -836,14 +831,15 @@ class FreqtradeBot(LoggingMixin):
base_currency = self.exchange.get_pair_base_currency(pair)
open_date = datetime.now(timezone.utc)
funding_fees = self.exchange.get_funding_fees(
pair=pair,
amount=amount + trade.amount if trade else amount,
is_short=is_short,
open_date=trade.date_last_filled_utc if trade else open_date
)
# This is a new trade
if trade is None:
funding_fees = 0.0
try:
funding_fees = self.exchange.get_funding_fees(
pair=pair, amount=amount, is_short=is_short, open_date=open_date)
except ExchangeError:
logger.warning("Could not find funding fee.")
trade = Trade(
pair=pair,
@@ -879,6 +875,7 @@ class FreqtradeBot(LoggingMixin):
trade.is_open = True
trade.fee_open_currency = None
trade.open_rate_requested = enter_limit_requested
trade.set_funding_fees(funding_fees)
trade.orders.append(order_obj)
trade.recalc_trade_from_orders()
@@ -1336,6 +1333,7 @@ class FreqtradeBot(LoggingMixin):
:return: None
"""
for trade in Trade.get_open_trades():
open_order: Order
for open_order in trade.open_orders:
try:
order = self.exchange.fetch_order(open_order.order_id, trade.pair)
@@ -1356,22 +1354,23 @@ class FreqtradeBot(LoggingMixin):
)
):
self.handle_cancel_order(
order, open_order.order_id, trade, constants.CANCEL_REASON['TIMEOUT']
order, open_order, trade, constants.CANCEL_REASON['TIMEOUT']
)
else:
self.replace_order(order, open_order, trade)
def handle_cancel_order(self, order: Dict, order_id: str, trade: Trade, reason: str) -> None:
def handle_cancel_order(self, order: Dict, order_obj: Order, trade: Trade, reason: str) -> None:
"""
Check if current analyzed order timed out and cancel if necessary.
:param order: Order dict grabbed with exchange.fetch_order()
:param order_obj: Order object from the database.
:param trade: Trade object.
:return: None
"""
if order['side'] == trade.entry_side:
self.handle_cancel_enter(trade, order, order_id, reason)
self.handle_cancel_enter(trade, order, order_obj, reason)
else:
canceled = self.handle_cancel_exit(trade, order, order_id, reason)
canceled = self.handle_cancel_exit(trade, order, order_obj, reason)
canceled_count = trade.get_canceled_exit_order_count()
max_timeouts = self.config.get('unfilledtimeout', {}).get('exit_timeout_count', 0)
if (canceled and max_timeouts > 0 and canceled_count >= max_timeouts):
@@ -1445,7 +1444,7 @@ class FreqtradeBot(LoggingMixin):
cancel_reason = constants.CANCEL_REASON['USER_CANCEL']
if order_obj.price != adjusted_entry_price:
# cancel existing order if new price is supplied or None
res = self.handle_cancel_enter(trade, order, order_obj.order_id, cancel_reason,
res = self.handle_cancel_enter(trade, order, order_obj, cancel_reason,
replacing=replacing)
if not res:
self.replace_order_failed(
@@ -1486,25 +1485,27 @@ class FreqtradeBot(LoggingMixin):
if order['side'] == trade.entry_side:
self.handle_cancel_enter(
trade, order, open_order.order_id, constants.CANCEL_REASON['ALL_CANCELLED']
trade, order, open_order, constants.CANCEL_REASON['ALL_CANCELLED']
)
elif order['side'] == trade.exit_side:
self.handle_cancel_exit(
trade, order, open_order.order_id, constants.CANCEL_REASON['ALL_CANCELLED']
trade, order, open_order, constants.CANCEL_REASON['ALL_CANCELLED']
)
Trade.commit()
def handle_cancel_enter(
self, trade: Trade, order: Dict, order_id: str,
self, trade: Trade, order: Dict, order_obj: Order,
reason: str, replacing: Optional[bool] = False
) -> bool:
"""
entry cancel - cancel order
:param order_obj: Order object from the database.
:param replacing: Replacing order - prevent trade deletion.
:return: True if trade was fully cancelled
"""
was_trade_fully_canceled = False
order_id = order_obj.order_id
side = trade.entry_side.capitalize()
if order['status'] not in constants.NON_OPEN_EXCHANGE_STATES:
@@ -1518,8 +1519,8 @@ class FreqtradeBot(LoggingMixin):
f"Order {order_id} for {trade.pair} not cancelled, "
f"as the filled amount of {filled_val} would result in an unexitable trade.")
return False
corder = self.exchange.cancel_order_with_result(order_id, trade.pair,
trade.amount)
corder = self.exchange.cancel_order_with_result(order_id, trade.pair, trade.amount)
order_obj.ft_cancel_reason = reason
# if replacing, retry fetching the order 3 times if the status is not what we need
if replacing:
retry_count = 0
@@ -1540,9 +1541,10 @@ class FreqtradeBot(LoggingMixin):
else:
# Order was cancelled already, so we can reuse the existing dict
corder = order
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
if order_obj.ft_cancel_reason is None:
order_obj.ft_cancel_reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
logger.info(f'{side} order {reason} for {trade}.')
logger.info(f'{side} order {order_obj.ft_cancel_reason} for {trade}.')
# Using filled to determine the filled amount
filled_amount = safe_value_fallback2(corder, order, 'filled', 'filled')
@@ -1555,7 +1557,7 @@ class FreqtradeBot(LoggingMixin):
if open_order_count < 1 and trade.nr_of_successful_entries == 0 and not replacing:
logger.info(f'{side} order fully cancelled. Removing {trade} from database.')
trade.delete()
reason += f", {constants.CANCEL_REASON['FULLY_CANCELLED']}"
order_obj.ft_cancel_reason += f", {constants.CANCEL_REASON['FULLY_CANCELLED']}"
else:
self.update_trade_state(trade, order_id, corder)
logger.info(f'{side} Order timeout for {trade}.')
@@ -1565,21 +1567,21 @@ class FreqtradeBot(LoggingMixin):
self.update_trade_state(trade, order_id, corder)
logger.info(f'Partial {trade.entry_side} order timeout for {trade}.')
reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}"
order_obj.ft_cancel_reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}"
self.wallets.update()
self._notify_enter_cancel(trade, order_type=self.strategy.order_types['entry'],
reason=reason)
reason=order_obj.ft_cancel_reason)
return was_trade_fully_canceled
def handle_cancel_exit(
self, trade: Trade, order: Dict, order_id: str,
reason: str
self, trade: Trade, order: Dict, order_obj: Order, reason: str
) -> bool:
"""
exit order cancel - cancel order and update trade
:return: True if exit order was cancelled, false otherwise
"""
order_id = order_obj.order_id
cancelled = False
# Cancelled orders may have the status of 'canceled' or 'closed'
if order['status'] not in constants.NON_OPEN_EXCHANGE_STATES:
@@ -1604,7 +1606,7 @@ class FreqtradeBot(LoggingMixin):
sub_trade=trade.amount != order['amount']
)
return False
order_obj.ft_cancel_reason = reason
try:
order = self.exchange.cancel_order_with_result(
order['id'], trade.pair, trade.amount)
@@ -1623,19 +1625,22 @@ class FreqtradeBot(LoggingMixin):
trade.exit_reason = exit_reason_prev
cancelled = True
else:
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
if order_obj.ft_cancel_reason is None:
order_obj.ft_cancel_reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
trade.exit_reason = None
self.update_trade_state(trade, order['id'], order)
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
logger.info(
f'{trade.exit_side.capitalize()} order {order_obj.ft_cancel_reason} for {trade}.')
trade.close_rate = None
trade.close_rate_requested = None
self._notify_exit_cancel(
trade,
order_type=self.strategy.order_types['exit'],
reason=reason, order_id=order['id'], sub_trade=trade.amount != order['amount']
reason=order_obj.ft_cancel_reason, order_id=order['id'],
sub_trade=trade.amount != order['amount']
)
return cancelled
@@ -1687,15 +1692,13 @@ class FreqtradeBot(LoggingMixin):
:param exit_check: CheckTuple with signal and reason
:return: True if it succeeds False
"""
try:
trade.funding_fees = self.exchange.get_funding_fees(
trade.set_funding_fees(
self.exchange.get_funding_fees(
pair=trade.pair,
amount=trade.amount,
is_short=trade.is_short,
open_date=trade.date_last_filled_utc,
)
except ExchangeError:
logger.warning("Could not update funding fee.")
open_date=trade.date_last_filled_utc)
)
exit_type = 'exit'
exit_reason = exit_tag or exit_check.exit_reason

View File

@@ -3,6 +3,7 @@ Various tool function for Freqtrade and scripts
"""
import gzip
import logging
from io import StringIO
from pathlib import Path
from typing import Any, Dict, Iterator, List, Mapping, Optional, TextIO, Union
from urllib.parse import urlparse
@@ -231,7 +232,7 @@ def json_to_dataframe(data: str) -> pd.DataFrame:
:param data: A JSON string
:returns: A pandas DataFrame from the JSON string
"""
dataframe = pd.read_json(data, orient='split')
dataframe = pd.read_json(StringIO(data), orient='split')
if 'date' in dataframe.columns:
dataframe['date'] = pd.to_datetime(dataframe['date'], unit='ms', utc=True)

View File

@@ -94,8 +94,8 @@ class LookaheadAnalysis(BaseAnalysis):
# compare_df now comprises tuples with [1] having either 'self' or 'other'
if 'other' in col_name[1]:
continue
self_value = compare_df_row[col_idx]
other_value = compare_df_row[col_idx + 1]
self_value = compare_df_row.iloc[col_idx]
other_value = compare_df_row.iloc[col_idx + 1]
# output differences
if self_value != other_value:

View File

@@ -276,11 +276,13 @@ class Backtesting:
else:
self.detail_data = {}
if self.trading_mode == TradingMode.FUTURES:
self.funding_fee_timeframe: str = self.exchange.get_option('mark_ohlcv_timeframe')
self.funding_fee_timeframe_secs: int = timeframe_to_seconds(self.funding_fee_timeframe)
# Load additional futures data.
funding_rates_dict = history.load_data(
datadir=self.config['datadir'],
pairs=self.pairlists.whitelist,
timeframe=self.exchange.get_option('mark_ohlcv_timeframe'),
timeframe=self.funding_fee_timeframe,
timerange=self.timerange,
startup_candles=0,
fail_without_data=True,
@@ -292,7 +294,7 @@ class Backtesting:
mark_rates_dict = history.load_data(
datadir=self.config['datadir'],
pairs=self.pairlists.whitelist,
timeframe=self.exchange.get_option('mark_ohlcv_timeframe'),
timeframe=self.funding_fee_timeframe,
timerange=self.timerange,
startup_candles=0,
fail_without_data=True,
@@ -525,10 +527,10 @@ class Backtesting:
# This should not be reached...
return row[OPEN_IDX]
def _get_adjust_trade_entry_for_candle(self, trade: LocalTrade, row: Tuple
) -> LocalTrade:
def _get_adjust_trade_entry_for_candle(
self, trade: LocalTrade, row: Tuple, current_time: datetime
) -> LocalTrade:
current_rate = row[OPEN_IDX]
current_date = row[DATE_IDX].to_pydatetime()
current_profit = trade.calc_profit_ratio(current_rate)
min_stake = self.exchange.get_min_pair_stake_amount(trade.pair, current_rate, -0.1)
max_stake = self.exchange.get_max_pair_stake_amount(trade.pair, current_rate)
@@ -536,7 +538,7 @@ class Backtesting:
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
default_retval=None, supress_error=True)(
trade=trade, # type: ignore[arg-type]
current_time=current_date, current_rate=current_rate,
current_time=current_time, current_rate=current_rate,
current_profit=current_profit, min_stake=min_stake,
max_stake=min(max_stake, stake_available),
current_entry_rate=current_rate, current_exit_rate=current_rate,
@@ -569,10 +571,10 @@ class Backtesting:
# Remaining stake is too low to be sold.
return trade
exit_ = ExitCheckTuple(ExitType.PARTIAL_EXIT)
pos_trade = self._get_exit_for_signal(trade, row, exit_, amount)
pos_trade = self._get_exit_for_signal(trade, row, exit_, current_time, amount)
if pos_trade is not None:
order = pos_trade.orders[-1]
if self._try_close_open_order(order, trade, current_date, row):
if self._try_close_open_order(order, trade, current_time, row):
trade.recalc_trade_from_orders()
self.wallets.update()
return pos_trade
@@ -597,6 +599,8 @@ class Backtesting:
"""
if order and self._get_order_filled(order.ft_price, row):
order.close_bt_order(current_date, trade)
self._run_funding_fees(trade, current_date, force=True)
if not (order.ft_order_side == trade.exit_side and order.safe_amount == trade.amount):
# trade is still open
trade.set_liquidation_price(self.exchange.get_liquidation_price(
@@ -615,11 +619,11 @@ class Backtesting:
def _get_exit_for_signal(
self, trade: LocalTrade, row: Tuple, exit_: ExitCheckTuple,
current_time: datetime,
amount: Optional[float] = None) -> Optional[LocalTrade]:
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
if exit_.exit_flag:
trade.close_date = exit_candle_time
trade.close_date = current_time
exit_reason = exit_.exit_reason
amount_ = amount if amount is not None else trade.amount
trade_dur = int((trade.close_date_utc - trade.open_date_utc).total_seconds() // 60)
@@ -647,10 +651,10 @@ class Backtesting:
default_retval=close_rate)(
pair=trade.pair,
trade=trade, # type: ignore[arg-type]
current_time=exit_candle_time,
current_time=current_time,
proposed_rate=close_rate, current_profit=current_profit,
exit_tag=exit_reason)
if rate != close_rate:
if rate is not None and rate != close_rate:
close_rate = price_to_precision(rate, trade.price_precision,
self.precision_mode)
# We can't place orders lower than current low.
@@ -673,7 +677,7 @@ class Backtesting:
time_in_force=time_in_force,
sell_reason=exit_reason, # deprecated
exit_reason=exit_reason,
current_time=exit_candle_time)):
current_time=current_time)):
return None
trade.exit_reason = exit_reason
@@ -714,21 +718,15 @@ class Backtesting:
trade.orders.append(order)
return trade
def _check_trade_exit(self, trade: LocalTrade, row: Tuple) -> Optional[LocalTrade]:
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
def _check_trade_exit(
self, trade: LocalTrade, row: Tuple, current_time: datetime
) -> Optional[LocalTrade]:
if self.trading_mode == TradingMode.FUTURES:
trade.funding_fees = self.exchange.calculate_funding_fees(
self.futures_data[trade.pair],
amount=trade.amount,
is_short=trade.is_short,
open_date=trade.date_last_filled_utc,
close_date=exit_candle_time,
)
self._run_funding_fees(trade, current_time)
# Check if we need to adjust our current positions
if self.strategy.position_adjustment_enable:
trade = self._get_adjust_trade_entry_for_candle(trade, row)
trade = self._get_adjust_trade_entry_for_candle(trade, row, current_time)
enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX]
exit_sig = row[ESHORT_IDX] if trade.is_short else row[ELONG_IDX]
@@ -738,11 +736,32 @@ class Backtesting:
low=row[LOW_IDX], high=row[HIGH_IDX]
)
for exit_ in exits:
t = self._get_exit_for_signal(trade, row, exit_)
t = self._get_exit_for_signal(trade, row, exit_, current_time)
if t:
return t
return None
def _run_funding_fees(self, trade: LocalTrade, current_time: datetime, force: bool = False):
"""
Calculate funding fees if necessary and add them to the trade.
"""
if self.trading_mode == TradingMode.FUTURES:
if (
force
or (current_time.timestamp() % self.funding_fee_timeframe_secs) == 0
):
# Funding fee interval.
trade.set_funding_fees(
self.exchange.calculate_funding_fees(
self.futures_data[trade.pair],
amount=trade.amount,
is_short=trade.is_short,
open_date=trade.date_last_filled_utc,
close_date=current_time
)
)
def get_valid_price_and_stake(
self, pair: str, row: Tuple, propose_rate: float, stake_amount: float,
direction: LongShort, current_time: datetime, entry_tag: Optional[str],
@@ -760,7 +779,7 @@ class Backtesting:
) # default value is the open rate
# We can't place orders higher than current high (otherwise it'd be a stop limit entry)
# which freqtrade does not support in live.
if new_rate != propose_rate:
if new_rate is not None and new_rate != propose_rate:
propose_rate = price_to_precision(new_rate, price_precision,
self.precision_mode)
if direction == "short":
@@ -772,7 +791,8 @@ class Backtesting:
leverage = trade.leverage if trade else 1.0
if not pos_adjust:
try:
stake_amount = self.wallets.get_trade_stake_amount(pair, None, update=False)
stake_amount = self.wallets.get_trade_stake_amount(
pair, self.strategy.max_open_trades, update=False)
except DependencyException:
return 0, 0, 0, 0
@@ -954,7 +974,7 @@ class Backtesting:
def trade_slot_available(self, open_trade_count: int) -> bool:
# Always allow trades when max_open_trades is enabled.
max_open_trades: IntOrInf = self.config['max_open_trades']
max_open_trades: IntOrInf = self.strategy.max_open_trades
if max_open_trades <= 0 or open_trade_count < max_open_trades:
return True
# Rejected trade
@@ -1145,7 +1165,7 @@ class Backtesting:
# 4. Create exit orders (if any)
if not trade.has_open_orders:
self._check_trade_exit(trade, row) # Place exit order if necessary
self._check_trade_exit(trade, row, current_time) # Place exit order if necessary
# 5. Process exit orders.
order = trade.select_order(trade.exit_side, is_open=True)

View File

@@ -500,7 +500,7 @@ class Hyperopt:
while i < 5 and len(asked_non_tried) < n_points:
if i < 3:
self.opt.cache_ = {}
asked = unique_list(self.opt.ask(n_points=n_points * 5))
asked = unique_list(self.opt.ask(n_points=n_points * 5 if i > 0 else n_points))
is_random = [False for _ in range(len(asked))]
else:
asked = unique_list(self.opt.space.rvs(n_samples=n_points * 5))
@@ -637,6 +637,10 @@ class Hyperopt:
HyperoptTools.show_epoch_details(self.current_best_epoch, self.total_epochs,
self.print_json)
elif self.num_epochs_saved > 0:
print(
f"No good result found for given optimization function in {self.num_epochs_saved} "
f"{plural(self.num_epochs_saved, 'epoch')}.")
else:
# This is printed when Ctrl+C is pressed quickly, before first epochs have
# a chance to be evaluated.

View File

@@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
def _format_exception_message(space: str, ignore_missing_space: bool) -> None:
msg = (f"The '{space}' space is included into the hyperoptimization "
f"but no parameter for this space was not found in your Strategy. "
f"but no parameter for this space was found in your Strategy. "
)
if ignore_missing_space:
logger.warning(msg + "This space will be ignored.")

View File

@@ -429,14 +429,18 @@ class HyperoptTools:
trials = trials.drop(columns=['Total profit'])
if print_colorized:
trials2 = trials.astype(str)
for i in range(len(trials)):
if trials.loc[i]['is_profit']:
for j in range(len(trials.loc[i]) - 3):
trials.iat[i, j] = f"{Fore.GREEN}{str(trials.loc[i][j])}{Fore.RESET}"
trials2.iat[i, j] = f"{Fore.GREEN}{str(trials.iloc[i, j])}{Fore.RESET}"
if trials.loc[i]['is_best'] and highlight_best:
for j in range(len(trials.loc[i]) - 3):
trials.iat[i, j] = f"{Style.BRIGHT}{str(trials.loc[i][j])}{Style.RESET_ALL}"
trials2.iat[i, j] = (
f"{Style.BRIGHT}{str(trials.iloc[i, j])}{Style.RESET_ALL}"
)
trials = trials2
del trials2
trials = trials.drop(columns=['is_initial_point', 'is_best', 'is_profit', 'is_random'])
if remove_header > 0:
table = tabulate.tabulate(

View File

@@ -219,8 +219,10 @@ def _get_resample_from_period(period: str) -> str:
raise ValueError(f"Period {period} is not supported.")
def generate_periodic_breakdown_stats(trade_list: List, period: str) -> List[Dict[str, Any]]:
results = DataFrame.from_records(trade_list)
def generate_periodic_breakdown_stats(
trade_list: Union[List, DataFrame], period: str) -> List[Dict[str, Any]]:
results = trade_list if not isinstance(trade_list, list) else DataFrame.from_records(trade_list)
if len(results) == 0:
return []
results['close_date'] = to_datetime(results['close_date'], utc=True)

View File

@@ -115,6 +115,7 @@ def migrate_trades_and_orders_table(
# Futures Properties
interest_rate = get_column_def(cols, 'interest_rate', '0.0')
funding_fees = get_column_def(cols, 'funding_fees', '0.0')
funding_fee_running = get_column_def(cols, 'funding_fee_running', 'null')
max_stake_amount = get_column_def(cols, 'max_stake_amount', 'stake_amount')
# If ticker-interval existed use that, else null.
@@ -163,7 +164,7 @@ def migrate_trades_and_orders_table(
max_rate, min_rate, exit_reason, exit_order_status, strategy, enter_tag,
timeframe, open_trade_value, close_profit_abs,
trading_mode, leverage, liquidation_price, is_short,
interest_rate, funding_fees, realized_profit,
interest_rate, funding_fees, funding_fee_running, realized_profit,
amount_precision, price_precision, precision_mode, contract_size,
max_stake_amount
)
@@ -192,7 +193,8 @@ def migrate_trades_and_orders_table(
{open_trade_value} open_trade_value, {close_profit_abs} close_profit_abs,
{trading_mode} trading_mode, {leverage} leverage, {liquidation_price} liquidation_price,
{is_short} is_short, {interest_rate} interest_rate,
{funding_fees} funding_fees, {realized_profit} realized_profit,
{funding_fees} funding_fees, {funding_fee_running} funding_fee_running,
{realized_profit} realized_profit,
{amount_precision} amount_precision, {price_precision} price_precision,
{precision_mode} precision_mode, {contract_size} contract_size,
{max_stake_amount} max_stake_amount
@@ -220,6 +222,7 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
funding_fee = get_column_def(cols_order, 'funding_fee', '0.0')
ft_amount = get_column_def(cols_order, 'ft_amount', 'coalesce(amount, 0.0)')
ft_price = get_column_def(cols_order, 'ft_price', 'coalesce(price, 0.0)')
ft_cancel_reason = get_column_def(cols_order, 'ft_cancel_reason', 'null')
# sqlite does not support literals for booleans
with engine.begin() as connection:
@@ -227,13 +230,13 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
insert into orders (id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
status, symbol, order_type, side, price, amount, filled, average, remaining, cost,
stop_price, order_date, order_filled_date, order_update_date, ft_fee_base, funding_fee,
ft_amount, ft_price
ft_amount, ft_price, ft_cancel_reason
)
select id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
status, symbol, order_type, side, price, amount, filled, {average} average, remaining,
cost, {stop_price} stop_price, order_date, order_filled_date,
order_update_date, {ft_fee_base} ft_fee_base, {funding_fee} funding_fee,
{ft_amount} ft_amount, {ft_price} ft_price
{ft_amount} ft_amount, {ft_price} ft_price, {ft_cancel_reason} ft_cancel_reason
from {table_back_name}
"""))
@@ -328,8 +331,8 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
# if ('orders' not in previous_tables
# or not has_column(cols_orders, 'funding_fee')):
migrating = False
# if not has_column(cols_orders, 'ft_price'):
if not has_column(cols_trades, 'is_stop_loss_trailing'):
# if not has_column(cols_orders, 'ft_cancel_reason'):
if not has_column(cols_trades, 'funding_fee_running'):
migrating = True
logger.info(f"Running database migration for trades - "
f"backup: {table_back_name}, {order_table_bak_name}")

View File

@@ -68,6 +68,7 @@ class Order(ModelBase):
ft_is_open: Mapped[bool] = mapped_column(nullable=False, default=True, index=True)
ft_amount: Mapped[float] = mapped_column(Float(), nullable=False)
ft_price: Mapped[float] = mapped_column(Float(), nullable=False)
ft_cancel_reason: Mapped[str] = mapped_column(String(CUSTOM_TAG_MAX_LENGTH), nullable=True)
order_id: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
status: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
@@ -173,10 +174,6 @@ class Order(ModelBase):
self.ft_is_open = True
if self.status in NON_OPEN_EXCHANGE_STATES:
self.ft_is_open = False
if self.trade:
# Assign funding fee up to this point
# (represents the funding fee since the last order)
self.funding_fee = self.trade.funding_fees
if (order.get('filled', 0.0) or 0.0) > 0 and not self.order_filled_date:
self.order_filled_date = dt_from_ts(
safe_value_fallback(order, 'lastTradeTimestamp', default_value=dt_ts())
@@ -249,7 +246,8 @@ class Order(ModelBase):
self.ft_is_open = False
# Assign funding fees to Order.
# Assumes backtesting will use date_last_filled_utc to calculate future funding fees.
self.funding_fee = trade.funding_fees
self.funding_fee = trade.funding_fee_running
trade.funding_fee_running = 0.0
if (self.ft_order_side == trade.entry_side and self.price):
trade.open_rate = self.price
@@ -396,6 +394,9 @@ class LocalTrade:
# Futures properties
funding_fees: Optional[float] = None
# Used to keep running funding fees - between the last filled order and now
# Shall not be used for calculations!
funding_fee_running: Optional[float] = None
@property
def stoploss_or_liquidation(self) -> float:
@@ -535,6 +536,7 @@ class LocalTrade:
for key in kwargs:
setattr(self, key, kwargs[key])
self.recalc_open_trade_value()
self.orders = []
if self.trading_mode == TradingMode.MARGIN and self.interest_rate is None:
raise OperationalException(
f"{self.trading_mode.value} trading requires param interest_rate on trades")
@@ -661,6 +663,16 @@ class LocalTrade:
return
self.liquidation_price = liquidation_price
def set_funding_fees(self, funding_fee: float) -> None:
"""
Assign funding fees to Trade.
"""
if funding_fee is None:
return
self.funding_fee_running = funding_fee
prior_funding_fees = sum([o.funding_fee for o in self.orders if o.funding_fee])
self.funding_fees = prior_funding_fees + funding_fee
def __set_stop_loss(self, stop_loss: float, percent: float):
"""
Method used internally to set self.stop_loss.
@@ -741,6 +753,10 @@ class LocalTrade:
return
logger.info(f'Updating trade (id={self.id}) ...')
if order.ft_order_side != 'stoploss':
order.funding_fee = self.funding_fee_running
# Reset running funding fees
self.funding_fee_running = 0.0
if order.ft_order_side == self.entry_side:
# Update open rate and actual amount
@@ -1037,7 +1053,7 @@ class LocalTrade:
price = avg_price if is_exit else tmp_price
current_stake += price * tmp_amount * side
if current_amount > ZERO:
if current_amount > ZERO and not is_exit:
avg_price = current_stake / current_amount
if is_exit:
@@ -1050,7 +1066,10 @@ class LocalTrade:
exit_amount = o.safe_amount_after_fee
prof = self.calculate_profit(exit_rate, exit_amount, float(avg_price))
close_profit_abs += prof.profit_abs
close_profit = prof.profit_ratio
if total_stake > 0:
# This needs to be calculated based on the last occuring exit to be aligned
# with realized_profit.
close_profit = (close_profit_abs / total_stake) * self.leverage
else:
total_stake = total_stake + self._calc_open_trade_value(tmp_amount, price)
max_stake_amount += (tmp_amount * price)
@@ -1286,6 +1305,99 @@ class LocalTrade:
trade.adjust_stop_loss(trade.open_rate, desired_stoploss)
logger.info(f"New stoploss: {trade.stop_loss}.")
@classmethod
def from_json(cls, json_str: str) -> Self:
"""
Create a Trade instance from a json string.
Used for debugging purposes - please keep.
:param json_str: json string to parse
:return: Trade instance
"""
import rapidjson
data = rapidjson.loads(json_str)
trade = cls(
__FROM_JSON=True,
id=data["trade_id"],
pair=data["pair"],
base_currency=data["base_currency"],
stake_currency=data["quote_currency"],
is_open=data["is_open"],
exchange=data["exchange"],
amount=data["amount"],
amount_requested=data["amount_requested"],
stake_amount=data["stake_amount"],
strategy=data["strategy"],
enter_tag=data["enter_tag"],
timeframe=data["timeframe"],
fee_open=data["fee_open"],
fee_open_cost=data["fee_open_cost"],
fee_open_currency=data["fee_open_currency"],
fee_close=data["fee_close"],
fee_close_cost=data["fee_close_cost"],
fee_close_currency=data["fee_close_currency"],
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
open_rate=data["open_rate"],
open_rate_requested=data["open_rate_requested"],
open_trade_value=data["open_trade_value"],
close_date=(datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
if data["close_timestamp"] else None),
realized_profit=data["realized_profit"],
close_rate=data["close_rate"],
close_rate_requested=data["close_rate_requested"],
close_profit=data["close_profit"],
close_profit_abs=data["close_profit_abs"],
exit_reason=data["exit_reason"],
exit_order_status=data["exit_order_status"],
stop_loss=data["stop_loss_abs"],
stop_loss_pct=data["stop_loss_ratio"],
stoploss_order_id=data["stoploss_order_id"],
stoploss_last_update=(
datetime.fromtimestamp(data["stoploss_last_update_timestamp"] // 1000,
tz=timezone.utc)
if data["stoploss_last_update_timestamp"] else None),
initial_stop_loss=data["initial_stop_loss_abs"],
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
min_rate=data["min_rate"],
max_rate=data["max_rate"],
leverage=data["leverage"],
interest_rate=data["interest_rate"],
liquidation_price=data["liquidation_price"],
is_short=data["is_short"],
trading_mode=data["trading_mode"],
funding_fees=data["funding_fees"],
amount_precision=data.get('amount_precision', None),
price_precision=data.get('price_precision', None),
precision_mode=data.get('precision_mode', None),
contract_size=data.get('contract_size', None),
)
for order in data["orders"]:
order_obj = Order(
amount=order["amount"],
ft_amount=order["amount"],
ft_order_side=order["ft_order_side"],
ft_pair=order["pair"],
ft_is_open=order["is_open"],
order_id=order["order_id"],
status=order["status"],
average=order["average"],
cost=order["cost"],
filled=order["filled"],
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT),
order_filled_date=(datetime.fromtimestamp(
order["order_filled_timestamp"] // 1000, tz=timezone.utc)
if order["order_filled_timestamp"] else None),
order_type=order["order_type"],
price=order["price"],
ft_price=order["price"],
remaining=order["remaining"],
funding_fee=order.get("funding_fee", None),
)
trade.orders.append(order_obj)
return trade
class Trade(ModelBase, LocalTrade):
"""
@@ -1389,6 +1501,8 @@ class Trade(ModelBase, LocalTrade):
# Futures properties
funding_fees: Mapped[Optional[float]] = mapped_column(
Float(), nullable=True, default=None) # type: ignore
funding_fee_running: Mapped[Optional[float]] = mapped_column(
Float(), nullable=True, default=None) # type: ignore
def __init__(self, **kwargs):
from_json = kwargs.pop('__FROM_JSON', None)
@@ -1669,7 +1783,7 @@ class Trade(ModelBase, LocalTrade):
.order_by(desc('profit_sum_abs'))
).all()
return_list: List[Dict] = []
resp: List[Dict] = []
for id, enter_tag, exit_reason, profit, profit_abs, count in mix_tag_perf:
enter_tag = enter_tag if enter_tag is not None else "Other"
exit_reason = exit_reason if exit_reason is not None else "Other"
@@ -1677,24 +1791,25 @@ class Trade(ModelBase, LocalTrade):
if (exit_reason is not None and enter_tag is not None):
mix_tag = enter_tag + " " + exit_reason
i = 0
if not any(item["mix_tag"] == mix_tag for item in return_list):
return_list.append({'mix_tag': mix_tag,
'profit': profit,
'profit_pct': round(profit * 100, 2),
'profit_abs': profit_abs,
'count': count})
if not any(item["mix_tag"] == mix_tag for item in resp):
resp.append({'mix_tag': mix_tag,
'profit_ratio': profit,
'profit_pct': round(profit * 100, 2),
'profit_abs': profit_abs,
'count': count})
else:
while i < len(return_list):
if return_list[i]["mix_tag"] == mix_tag:
return_list[i] = {
while i < len(resp):
if resp[i]["mix_tag"] == mix_tag:
resp[i] = {
'mix_tag': mix_tag,
'profit': profit + return_list[i]["profit"],
'profit_pct': round(profit + return_list[i]["profit"] * 100, 2),
'profit_abs': profit_abs + return_list[i]["profit_abs"],
'count': 1 + return_list[i]["count"]}
'profit_ratio': profit + resp[i]["profit_ratio"],
'profit_pct': round(profit + resp[i]["profit_ratio"] * 100, 2),
'profit_abs': profit_abs + resp[i]["profit_abs"],
'count': 1 + resp[i]["count"]
}
i += 1
return return_list
return resp
@staticmethod
def get_best_pair(start_date: datetime = datetime.fromtimestamp(0)):
@@ -1729,96 +1844,3 @@ class Trade(ModelBase, LocalTrade):
Order.status == 'closed'
)).scalar_one()
return trading_volume
@classmethod
def from_json(cls, json_str: str) -> Self:
"""
Create a Trade instance from a json string.
Used for debugging purposes - please keep.
:param json_str: json string to parse
:return: Trade instance
"""
import rapidjson
data = rapidjson.loads(json_str)
trade = cls(
__FROM_JSON=True,
id=data["trade_id"],
pair=data["pair"],
base_currency=data["base_currency"],
stake_currency=data["quote_currency"],
is_open=data["is_open"],
exchange=data["exchange"],
amount=data["amount"],
amount_requested=data["amount_requested"],
stake_amount=data["stake_amount"],
strategy=data["strategy"],
enter_tag=data["enter_tag"],
timeframe=data["timeframe"],
fee_open=data["fee_open"],
fee_open_cost=data["fee_open_cost"],
fee_open_currency=data["fee_open_currency"],
fee_close=data["fee_close"],
fee_close_cost=data["fee_close_cost"],
fee_close_currency=data["fee_close_currency"],
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
open_rate=data["open_rate"],
open_rate_requested=data["open_rate_requested"],
open_trade_value=data["open_trade_value"],
close_date=(datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
if data["close_timestamp"] else None),
realized_profit=data["realized_profit"],
close_rate=data["close_rate"],
close_rate_requested=data["close_rate_requested"],
close_profit=data["close_profit"],
close_profit_abs=data["close_profit_abs"],
exit_reason=data["exit_reason"],
exit_order_status=data["exit_order_status"],
stop_loss=data["stop_loss_abs"],
stop_loss_pct=data["stop_loss_ratio"],
stoploss_order_id=data["stoploss_order_id"],
stoploss_last_update=(
datetime.fromtimestamp(data["stoploss_last_update_timestamp"] // 1000,
tz=timezone.utc)
if data["stoploss_last_update_timestamp"] else None),
initial_stop_loss=data["initial_stop_loss_abs"],
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
min_rate=data["min_rate"],
max_rate=data["max_rate"],
leverage=data["leverage"],
interest_rate=data["interest_rate"],
liquidation_price=data["liquidation_price"],
is_short=data["is_short"],
trading_mode=data["trading_mode"],
funding_fees=data["funding_fees"],
amount_precision=data.get('amount_precision', None),
price_precision=data.get('price_precision', None),
precision_mode=data.get('precision_mode', None),
contract_size=data.get('contract_size', None),
)
for order in data["orders"]:
order_obj = Order(
amount=order["amount"],
ft_amount=order["amount"],
ft_order_side=order["ft_order_side"],
ft_pair=order["pair"],
ft_is_open=order["is_open"],
order_id=order["order_id"],
status=order["status"],
average=order["average"],
cost=order["cost"],
filled=order["filled"],
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT),
order_filled_date=(datetime.fromtimestamp(
order["order_filled_timestamp"] // 1000, tz=timezone.utc)
if order["order_filled_timestamp"] else None),
order_type=order["order_type"],
price=order["price"],
ft_price=order["price"],
remaining=order["remaining"],
funding_fee=order.get("funding_fee", None),
)
trade.orders.append(order_obj)
return trade

View File

@@ -21,6 +21,7 @@ from freqtrade.misc import pair_to_filename
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
from freqtrade.strategy import IStrategy
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
logger = logging.getLogger(__name__)
@@ -636,7 +637,7 @@ def load_and_plot_trades(config: Config):
exchange = ExchangeResolver.load_exchange(config)
IStrategy.dp = DataProvider(config, exchange)
strategy.ft_bot_start()
strategy.bot_loop_start(datetime.now(timezone.utc))
strategy_safe_wrapper(strategy.bot_loop_start)(current_time=datetime.now(timezone.utc))
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
timerange = plot_elements['timerange']
trades = plot_elements['trades']

View File

@@ -1,9 +1,9 @@
from datetime import date, datetime
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel, ConfigDict, RootModel, SerializeAsAny
from pydantic import BaseModel, RootModel, SerializeAsAny
from freqtrade.constants import DATETIME_PRINT_FORMAT, IntOrInf
from freqtrade.constants import IntOrInf
from freqtrade.enums import MarginMode, OrderTypeValues, SignalDirection, TradingMode
from freqtrade.types import ValidExchangesType
@@ -95,15 +95,30 @@ class Count(BaseModel):
total_stake: float
class PerformanceEntry(BaseModel):
pair: str
profit: float
class __BaseStatsModel(BaseModel):
profit_ratio: float
profit_pct: float
profit_abs: float
count: int
class Entry(__BaseStatsModel):
enter_tag: str
class Exit(__BaseStatsModel):
exit_reason: str
class MixTag(__BaseStatsModel):
mix_tag: str
class PerformanceEntry(__BaseStatsModel):
pair: str
profit: float
class Profit(BaseModel):
profit_closed_coin: float
profit_closed_percent_mean: float
@@ -456,6 +471,7 @@ class FreqAIModelListResponse(BaseModel):
class StrategyResponse(BaseModel):
strategy: str
code: str
timeframe: Optional[str]
class AvailablePairs(BaseModel):
@@ -484,11 +500,6 @@ class PairHistory(BaseModel):
data_start: str
data_stop: str
data_stop_ts: int
# TODO[pydantic]: The following keys were removed: `json_encoders`.
# Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
model_config = ConfigDict(json_encoders={
datetime: lambda v: v.strftime(DATETIME_PRINT_FORMAT),
})
class BacktestFreqAIInputs(BaseModel):

View File

@@ -12,15 +12,15 @@ from freqtrade.exceptions import OperationalException
from freqtrade.rpc import RPC
from freqtrade.rpc.api_server.api_schemas import (AvailablePairs, Balances, BlacklistPayload,
BlacklistResponse, Count, DailyWeeklyMonthly,
DeleteLockRequest, DeleteTrade,
ExchangeListResponse, ForceEnterPayload,
DeleteLockRequest, DeleteTrade, Entry,
ExchangeListResponse, Exit, ForceEnterPayload,
ForceEnterResponse, ForceExitPayload,
FreqAIModelListResponse, Health, Locks, Logs,
OpenTradeSchema, PairHistory, PerformanceEntry,
Ping, PlotConfig, Profit, ResultMsg, ShowConfig,
Stats, StatusMsg, StrategyListResponse,
StrategyResponse, SysInfo, Version,
WhitelistResponse)
MixTag, OpenTradeSchema, PairHistory,
PerformanceEntry, Ping, PlotConfig, Profit,
ResultMsg, ShowConfig, Stats, StatusMsg,
StrategyListResponse, StrategyResponse, SysInfo,
Version, WhitelistResponse)
from freqtrade.rpc.api_server.deps import get_config, get_exchange, get_rpc, get_rpc_optional
from freqtrade.rpc.rpc import RPCException
@@ -52,7 +52,8 @@ logger = logging.getLogger(__name__)
# 2.31: new /backtest/history/ delete endpoint
# 2.32: new /backtest/history/ patch endpoint
# 2.33: Additional weekly/monthly metrics
API_VERSION = 2.33
# 2.34: new entries/exits/mix_tags endpoints
API_VERSION = 2.34
# Public API, requires no auth.
router_public = APIRouter()
@@ -83,6 +84,21 @@ def count(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_count()
@router.get('/entries', response_model=List[Entry], tags=['info'])
def entries(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_enter_tag_performance(pair)
@router.get('/exits', response_model=List[Exit], tags=['info'])
def exits(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_exit_reason_performance(pair)
@router.get('/mix_tags', response_model=List[MixTag], tags=['info'])
def mix_tags(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_mix_tag_performance(pair)
@router.get('/performance', response_model=List[PerformanceEntry], tags=['info'])
def performance(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_performance()
@@ -334,6 +350,7 @@ def get_strategy(strategy: str, config=Depends(get_config)):
return {
'strategy': strategy_obj.get_strategy_name(),
'code': strategy_obj.__source__,
'timeframe': getattr(strategy_obj, 'timeframe', None),
}

View File

@@ -56,7 +56,7 @@ def get_exchange(config=Depends(get_config)):
if not (exchange := ApiBG.exchanges.get(exchange_key)):
from freqtrade.resolvers import ExchangeResolver
exchange = ExchangeResolver.load_exchange(
config, load_leverage_tiers=False)
config, validate=False, load_leverage_tiers=False)
ApiBG.exchanges[exchange_key] = exchange
return exchange

View File

@@ -15,6 +15,7 @@ class Discord(Webhook):
self.rpc = rpc
self.strategy = config.get('strategy', '')
self.timeframe = config.get('timeframe', '')
self.bot_name = config.get('bot_name', '')
self._url = config['discord']['webhook_url']
self._format = 'json'
@@ -31,12 +32,12 @@ class Discord(Webhook):
def send_msg(self, msg) -> None:
if msg['type'].value in self._config['discord']:
if (fields := self._config['discord'].get(msg['type'].value)):
logger.info(f"Sending discord message: {msg}")
msg['strategy'] = self.strategy
msg['timeframe'] = self.timeframe
fields = self._config['discord'].get(msg['type'].value)
msg['bot_name'] = self.bot_name
color = 0x0000FF
if msg['type'] in (RPCMessageType.EXIT, RPCMessageType.EXIT_FILL):
profit_ratio = msg.get('profit_ratio')

View File

@@ -27,6 +27,8 @@ coingecko_mapping = {
'usdt': 'tether',
'busd': 'binance-usd',
'tusd': 'true-usd',
'usdc': 'usd-coin',
'btc': 'bitcoin'
}

View File

@@ -121,8 +121,8 @@ class RPC:
'stake_currency_decimals': decimals_per_coin(config['stake_currency']),
'stake_amount': str(config['stake_amount']),
'available_capital': config.get('available_capital'),
'max_open_trades': (config['max_open_trades']
if config['max_open_trades'] != float('inf') else -1),
'max_open_trades': (config.get('max_open_trades', 0)
if config.get('max_open_trades', 0) != float('inf') else -1),
'minimal_roi': config['minimal_roi'].copy() if 'minimal_roi' in config else {},
'stoploss': config.get('stoploss'),
'stoploss_on_exchange': config.get('order_types',
@@ -795,14 +795,14 @@ class RPC:
if order['side'] == trade.entry_side:
fully_canceled = self._freqtrade.handle_cancel_enter(
trade, order, oo.order_id, CANCEL_REASON['FORCE_EXIT'])
trade, order, oo, CANCEL_REASON['FORCE_EXIT'])
trade_entry_cancelation_res['cancel_state'] = fully_canceled
trade_entry_cancelation_registry.append(trade_entry_cancelation_res)
if order['side'] == trade.exit_side:
# Cancel order - so it is placed anew with a fresh price.
self._freqtrade.handle_cancel_exit(
trade, order, oo.order_id, CANCEL_REASON['FORCE_EXIT'])
trade, order, oo, CANCEL_REASON['FORCE_EXIT'])
if all(tocr['cancel_state'] is False for tocr in trade_entry_cancelation_registry):
if trade.has_open_orders:
@@ -914,7 +914,8 @@ class RPC:
if not stake_amount:
# gen stake amount
stake_amount = self._freqtrade.wallets.get_trade_stake_amount(pair)
stake_amount = self._freqtrade.wallets.get_trade_stake_amount(
pair, self._config['max_open_trades'])
# execute buy
if not order_type:
@@ -955,7 +956,7 @@ class RPC:
logger.info(f"Cannot query order for {trade} due to {e}.", exc_info=True)
raise RPCException("Order not found.")
self._freqtrade.handle_cancel_order(
order, open_order.order_id, trade, CANCEL_REASON['USER_CANCEL'])
order, open_order, trade, CANCEL_REASON['USER_CANCEL'])
Trade.commit()
def _rpc_delete(self, trade_id: int) -> Dict[str, Union[str, int]]:

View File

@@ -223,7 +223,8 @@ class Telegram(RPCHandler):
CommandHandler('health', self._health),
CommandHandler('help', self._help),
CommandHandler('version', self._version),
CommandHandler('marketdir', self._changemarketdir)
CommandHandler('marketdir', self._changemarketdir),
CommandHandler('order', self._order),
]
callbacks = [
CallbackQueryHandler(self._status_table, pattern='update_status_table'),
@@ -240,7 +241,7 @@ class Telegram(RPCHandler):
CallbackQueryHandler(self._mix_tag_performance, pattern='update_mix_tag_performance'),
CallbackQueryHandler(self._count, pattern='update_count'),
CallbackQueryHandler(self._force_exit_inline, pattern=r"force_exit__\S+"),
CallbackQueryHandler(self._force_enter_inline, pattern=r"\S+\/\S+"),
CallbackQueryHandler(self._force_enter_inline, pattern=r"force_enter__\S+"),
]
for handle in handles:
self._app.add_handler(handle)
@@ -555,6 +556,47 @@ class Telegram(RPCHandler):
return lines_detail
@authorized_only
async def _order(self, update: Update, context: CallbackContext) -> None:
"""
Handler for /order.
Returns the orders of the trade
:param bot: telegram bot
:param update: message update
:return: None
"""
trade_ids = []
if context.args and len(context.args) > 0:
trade_ids = [int(i) for i in context.args if i.isnumeric()]
results = self._rpc._rpc_trade_status(trade_ids=trade_ids)
for r in results:
lines = [
"*Order List for Trade #*`{trade_id}`"
]
lines_detail = self._prepare_order_details(
r['orders'], r['quote_currency'], r['is_open'])
lines.extend(lines_detail if lines_detail else "")
await self.__send_order_msg(lines, r)
async def __send_order_msg(self, lines: List[str], r: Dict[str, Any]) -> None:
"""
Send status message.
"""
msg = ''
for line in lines:
if line:
if (len(msg) + len(line) + 1) < MAX_MESSAGE_LENGTH:
msg += line + '\n'
else:
await self._send_msg(msg.format(**r))
msg = "*Order List for Trade #*`{trade_id}` - continued\n" + line + '\n'
await self._send_msg(msg.format(**r))
@authorized_only
async def _status(self, update: Update, context: CallbackContext) -> None:
"""
@@ -652,9 +694,6 @@ class Telegram(RPCHandler):
"*Open Order:* `{open_orders}`"
+ ("- `{exit_order_status}`" if r['exit_order_status'] else ""))
lines_detail = self._prepare_order_details(
r['orders'], r['quote_currency'], r['is_open'])
lines.extend(lines_detail if lines_detail else "")
await self.__send_status_msg(lines, r)
async def __send_status_msg(self, lines: List[str], r: Dict[str, Any]) -> None:
@@ -1149,12 +1188,19 @@ class Telegram(RPCHandler):
async def _force_enter_inline(self, update: Update, _: CallbackContext) -> None:
if update.callback_query:
query = update.callback_query
if query.data and '_||_' in query.data:
pair, side = query.data.split('_||_')
order_side = SignalDirection(side)
await query.answer()
await query.edit_message_text(text=f"Manually entering {order_side} for {pair}")
await self._force_enter_action(pair, None, order_side)
if query.data and '__' in query.data:
# Input data is "force_enter__<pair|cancel>_<side>"
payload = query.data.split("__")[1]
if payload == 'cancel':
await query.answer()
await query.edit_message_text(text="Force enter canceled.")
return
if payload and '_||_' in payload:
pair, side = payload.split('_||_')
order_side = SignalDirection(side)
await query.answer()
await query.edit_message_text(text=f"Manually entering {order_side} for {pair}")
await self._force_enter_action(pair, None, order_side)
@staticmethod
def _layout_inline_keyboard(
@@ -1183,12 +1229,14 @@ class Telegram(RPCHandler):
else:
whitelist = self._rpc._rpc_whitelist()['whitelist']
pair_buttons = [
InlineKeyboardButton(text=pair, callback_data=f"{pair}_||_{order_side}")
for pair in sorted(whitelist)
InlineKeyboardButton(
text=pair, callback_data=f"force_enter__{pair}_||_{order_side}"
) for pair in sorted(whitelist)
]
buttons_aligned = self._layout_inline_keyboard(pair_buttons)
buttons_aligned.append([InlineKeyboardButton(text='Cancel', callback_data='cancel')])
buttons_aligned.append([InlineKeyboardButton(text='Cancel',
callback_data='force_enter__cancel')])
await self._send_msg(msg="Which pair?",
keyboard=buttons_aligned,
query=update.callback_query)
@@ -1369,7 +1417,7 @@ class Telegram(RPCHandler):
stat_line = (
f"{i+1}.\t <code>{trade['mix_tag']}\t"
f"{round_coin_value(trade['profit_abs'], self._config['stake_currency'])} "
f"({trade['profit']:.2%}) "
f"({trade['profit_ratio']:.2%}) "
f"({trade['count']})</code>\n")
if len(output + stat_line) >= MAX_MESSAGE_LENGTH:

View File

@@ -1,5 +1,5 @@
from dataclasses import dataclass
from typing import Any, Callable, Optional, Union
from typing import Any, Callable, Dict, Optional, Union
from pandas import DataFrame
@@ -38,17 +38,18 @@ def informative(timeframe: str, asset: str = '',
:param timeframe: Informative timeframe. Must always be equal or higher than strategy timeframe.
:param asset: Informative asset, for example BTC, BTC/USDT, ETH/BTC. Do not specify to use
current pair.
current pair. Also supports limited pair format strings (see below)
:param fmt: Column format (str) or column formatter (callable(name, asset, timeframe)). When not
specified, defaults to:
* {base}_{quote}_{column}_{timeframe} if asset is specified.
* {column}_{timeframe} if asset is not specified.
Format string supports these format variables:
* {asset} - full name of the asset, for example 'BTC/USDT'.
Pair format supports these format variables:
* {base} - base currency in lower case, for example 'eth'.
* {BASE} - same as {base}, except in upper case.
* {quote} - quote currency in lower case, for example 'usdt'.
* {QUOTE} - same as {quote}, except in upper case.
Format string additionally supports this variables.
* {asset} - full name of the asset, for example 'BTC/USDT'.
* {column} - name of dataframe column.
* {timeframe} - timeframe of informative dataframe.
:param ffill: ffill dataframe after merging informative pair.
@@ -68,9 +69,25 @@ def informative(timeframe: str, asset: str = '',
return decorator
def _format_pair_name(config, pair: str) -> str:
return pair.format(stake_currency=config['stake_currency'],
stake=config['stake_currency']).upper()
def __get_pair_formats(market: Optional[Dict[str, Any]]) -> Dict[str, str]:
if not market:
return {}
base = market['base']
quote = market['quote']
return {
'base': base.lower(),
'BASE': base.upper(),
'quote': quote.lower(),
'QUOTE': quote.upper(),
}
def _format_pair_name(config, pair: str, market: Optional[Dict[str, Any]] = None) -> str:
return pair.format(
stake_currency=config['stake_currency'],
stake=config['stake_currency'],
**__get_pair_formats(market),
).upper()
def _create_and_merge_informative_pair(strategy, dataframe: DataFrame, metadata: dict,
@@ -85,7 +102,8 @@ def _create_and_merge_informative_pair(strategy, dataframe: DataFrame, metadata:
if asset:
# Insert stake currency if needed.
asset = _format_pair_name(config, asset)
market1 = strategy.dp.market(metadata['pair'])
asset = _format_pair_name(config, asset, market1)
else:
# Not specifying an asset will define informative dataframe for current pair.
asset = metadata['pair']
@@ -93,8 +111,6 @@ def _create_and_merge_informative_pair(strategy, dataframe: DataFrame, metadata:
market = strategy.dp.market(asset)
if market is None:
raise OperationalException(f'Market {asset} is not available.')
base = market['base']
quote = market['quote']
# Default format. This optimizes for the common case: informative pairs using same stake
# currency. When quote currency matches stake currency, column name will omit base currency.
@@ -117,10 +133,7 @@ def _create_and_merge_informative_pair(strategy, dataframe: DataFrame, metadata:
formatter = fmt.format # A default string formatter.
fmt_args = {
'BASE': base.upper(),
'QUOTE': quote.upper(),
'base': base.lower(),
'quote': quote.lower(),
**__get_pair_formats(market),
'asset': asset,
'timeframe': timeframe,
}

View File

@@ -757,12 +757,23 @@ class IStrategy(ABC, HyperStrategyMixin):
candle_type = (inf_data.candle_type if inf_data.candle_type
else self.config.get('candle_type_def', CandleType.SPOT))
if inf_data.asset:
pair_tf = (
_format_pair_name(self.config, inf_data.asset),
inf_data.timeframe,
candle_type,
)
informative_pairs.append(pair_tf)
if any(s in inf_data.asset for s in ("{BASE}", "{base}")):
for pair in self.dp.current_whitelist():
pair_tf = (
_format_pair_name(self.config, inf_data.asset, self.dp.market(pair)),
inf_data.timeframe,
candle_type,
)
informative_pairs.append(pair_tf)
else:
pair_tf = (
_format_pair_name(self.config, inf_data.asset),
inf_data.timeframe,
candle_type,
)
informative_pairs.append(pair_tf)
else:
for pair in self.dp.current_whitelist():
informative_pairs.append((pair, inf_data.timeframe, candle_type))
@@ -1008,7 +1019,7 @@ class IStrategy(ABC, HyperStrategyMixin):
exit_ = latest.get(SignalType.EXIT_LONG.value, 0) == 1
exit_tag = latest.get(SignalTagType.EXIT_TAG.value, None)
# Tags can be None, which does not resolve to False.
exit_tag = exit_tag if isinstance(exit_tag, str) else None
exit_tag = exit_tag if isinstance(exit_tag, str) and exit_tag != 'nan' else None
logger.debug(f"exit-trigger: {latest['date']} (pair={pair}) "
f"enter={enter} exit={exit_}")
@@ -1040,17 +1051,17 @@ class IStrategy(ABC, HyperStrategyMixin):
exit_short = latest.get(SignalType.EXIT_SHORT.value, 0) == 1
enter_signal: Optional[SignalDirection] = None
enter_tag_value: Optional[str] = None
enter_tag: Optional[str] = None
if enter_long == 1 and not any([exit_long, enter_short]):
enter_signal = SignalDirection.LONG
enter_tag_value = latest.get(SignalTagType.ENTER_TAG.value, None)
enter_tag = latest.get(SignalTagType.ENTER_TAG.value, None)
if (self.config.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT
and self.can_short
and enter_short == 1 and not any([exit_short, enter_long])):
enter_signal = SignalDirection.SHORT
enter_tag_value = latest.get(SignalTagType.ENTER_TAG.value, None)
enter_tag = latest.get(SignalTagType.ENTER_TAG.value, None)
enter_tag_value = enter_tag_value if isinstance(enter_tag_value, str) else None
enter_tag = enter_tag if isinstance(enter_tag, str) and enter_tag != 'nan' else None
timeframe_seconds = timeframe_to_seconds(timeframe)
@@ -1060,11 +1071,11 @@ class IStrategy(ABC, HyperStrategyMixin):
timeframe_seconds=timeframe_seconds,
enter=bool(enter_signal)
):
return None, enter_tag_value
return None, enter_tag
logger.debug(f"entry trigger: {latest['date']} (pair={pair}) "
f"enter={enter_long} enter_tag_value={enter_tag_value}")
return enter_signal, enter_tag_value
f"enter={enter_long} enter_tag_value={enter_tag}")
return enter_signal, enter_tag
def ignore_expired_candle(
self,
@@ -1246,10 +1257,6 @@ class IStrategy(ABC, HyperStrategyMixin):
and trade.liquidation_price <= (high or current_rate)
and trade.is_short)
if (liq_higher_long or liq_lower_short):
logger.debug(f"{trade.pair} - Liquidation price hit. exit_type=ExitType.LIQUIDATION")
return ExitCheckTuple(exit_type=ExitType.LIQUIDATION)
# evaluate if the stoploss was hit if stoploss is not on exchange
# in Dry-Run, this handles stoploss logic as well, as the logic will not be different to
# regular stoploss handling.
@@ -1270,6 +1277,10 @@ class IStrategy(ABC, HyperStrategyMixin):
return ExitCheckTuple(exit_type=exit_type)
if (liq_higher_long or liq_lower_short):
logger.debug(f"{trade.pair} - Liquidation price hit. exit_type=ExitType.LIQUIDATION")
return ExitCheckTuple(exit_type=ExitType.LIQUIDATION)
return ExitCheckTuple(exit_type=ExitType.NONE)
def min_roi_reached_entry(self, trade_dur: int) -> Tuple[Optional[int], Optional[float]]:

View File

@@ -36,7 +36,7 @@ def merge_informative_pair(dataframe: pd.DataFrame, informative: pd.DataFrame,
:return: Merged dataframe
:raise: ValueError if the secondary timeframe is shorter than the dataframe timeframe
"""
informative = informative.copy()
minutes_inf = timeframe_to_minutes(timeframe_inf)
minutes = timeframe_to_minutes(timeframe)
if minutes == minutes_inf:
@@ -46,10 +46,16 @@ def merge_informative_pair(dataframe: pd.DataFrame, informative: pd.DataFrame,
# Subtract "small" timeframe so merging is not delayed by 1 small candle
# Detailed explanation in https://github.com/freqtrade/freqtrade/issues/4073
if not informative.empty:
informative['date_merge'] = (
informative[date_column] + pd.to_timedelta(minutes_inf, 'm') -
pd.to_timedelta(minutes, 'm')
)
if timeframe_inf == '1M':
informative['date_merge'] = (
(informative[date_column] + pd.offsets.MonthBegin(1))
- pd.to_timedelta(minutes, 'm')
)
else:
informative['date_merge'] = (
informative[date_column] + pd.to_timedelta(minutes_inf, 'm') -
pd.to_timedelta(minutes, 'm')
)
else:
informative['date_merge'] = informative[date_column]
else:
@@ -80,9 +86,6 @@ def merge_informative_pair(dataframe: pd.DataFrame, informative: pd.DataFrame,
right_on=date_merge, how='left')
dataframe = dataframe.drop(date_merge, axis=1)
# if ffill:
# dataframe = dataframe.ffill()
return dataframe

View File

@@ -290,9 +290,6 @@ class FreqaiExampleStrategy(IStrategy):
return df
def get_ticker_indicator(self):
return int(self.config["timeframe"][:-1])
def confirm_trade_entry(
self,
pair: str,

View File

@@ -226,7 +226,7 @@ def crossed(series1, series2, direction=None):
series1.shift(1) >= series2.shift(1)))
if direction is None:
return above or below
return above | below
return above if direction == "above" else below

View File

@@ -6,7 +6,7 @@ from copy import deepcopy
from datetime import datetime, timedelta
from typing import Dict, NamedTuple, Optional
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT, Config
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT, Config, IntOrInf
from freqtrade.enums import RunMode, TradingMode
from freqtrade.exceptions import DependencyException
from freqtrade.exchange import Exchange
@@ -262,15 +262,15 @@ class Wallets:
return min(self.get_total_stake_amount() - Trade.total_open_trades_stakes(), free)
def _calculate_unlimited_stake_amount(self, available_amount: float,
val_tied_up: float) -> float:
val_tied_up: float, max_open_trades: IntOrInf) -> float:
"""
Calculate stake amount for "unlimited" stake amount
:return: 0 if max number of trades reached, else stake_amount to use.
"""
if self._config['max_open_trades'] == 0:
if max_open_trades == 0:
return 0
possible_stake = (available_amount + val_tied_up) / self._config['max_open_trades']
possible_stake = (available_amount + val_tied_up) / max_open_trades
# Theoretical amount can be above available amount - therefore limit to available amount!
return min(possible_stake, available_amount)
@@ -298,7 +298,8 @@ class Wallets:
return stake_amount
def get_trade_stake_amount(self, pair: str, edge=None, update: bool = True) -> float:
def get_trade_stake_amount(
self, pair: str, max_open_trades: IntOrInf, edge=None, update: bool = True) -> float:
"""
Calculate stake amount for the trade
:return: float: Stake amount
@@ -322,7 +323,7 @@ class Wallets:
stake_amount = self._config['stake_amount']
if stake_amount == UNLIMITED_STAKE_AMOUNT:
stake_amount = self._calculate_unlimited_stake_amount(
available_amount, val_tied_up)
available_amount, val_tied_up, max_open_trades)
return self._check_available_stake_amount(stake_amount, available_amount)

View File

@@ -2,6 +2,55 @@
requires = ["setuptools >= 64.0.0", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "freqtrade"
dynamic = ["version", "dependencies", "optional-dependencies"]
authors = [
{name = "Freqtrade Team"},
{name = "Freqtrade Team", email = "freqtrade@protonmail.com"},
]
description = "Freqtrade - Crypto Trading Bot"
readme = "README.md"
requires-python = ">=3.9"
license = {text = "GPLv3"}
# license = "GPLv3"
classifiers = [
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Operating System :: MacOS",
"Operating System :: Unix",
"Topic :: Office/Business :: Financial :: Investment",
]
[project.urls]
Homepage = "https://github.com/freqtrade/freqtrade"
Documentation = "https://freqtrade.io"
"Bug Tracker" = "https://github.com/freqtrade/freqtrade/issues"
[project.scripts]
freqtrade = "freqtrade.main:main"
[tool.setuptools]
include-package-data = true
zip-safe = false
[tool.setuptools.packages.find]
where = ["."]
include = ["freqtrade*"]
exclude = ["tests", "tests.*"]
namespaces = true
[tool.setuptools.dynamic]
version = {attr = "freqtrade.__version__"}
[tool.black]
line-length = 100
exclude = '''
@@ -82,9 +131,29 @@ extend-select = [
# "TCH", # flake8-type-checking
"PTH", # flake8-use-pathlib
]
extend-ignore = [
"E241", # Multiple spaces after comma
"E272", # Multiple spaces before keyword
"E221", # Multiple spaces before operator
]
[tool.ruff.mccabe]
max-complexity = 12
[tool.ruff.per-file-ignores]
"tests/*" = ["S"]
[tool.flake8]
# Default from https://flake8.pycqa.org/en/latest/user/options.html#cmdoption-flake8-ignore
# minus E226
ignore = ["E121","E123","E126","E24","E704","W503","W504"]
max-line-length = 100
max-complexity = 12
exclude = [
".git",
"__pycache__",
".eggs",
"user_data",
".venv",
".env",
]

View File

@@ -7,24 +7,24 @@
-r docs/requirements-docs.txt
coveralls==3.3.1
ruff==0.0.292
mypy==1.5.1
pre-commit==3.4.0
pytest==7.4.2
ruff==0.1.8
mypy==1.7.1
pre-commit==3.6.0
pytest==7.4.3
pytest-asyncio==0.21.1
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-mock==3.12.0
pytest-random-order==1.1.0
isort==5.12.0
isort==5.13.2
# For datetime mocking
time-machine==2.13.0
# Convert jupyter notebooks to markdown documents
nbconvert==7.9.2
nbconvert==7.12.0
# mypy types
types-cachetools==5.3.0.6
types-cachetools==5.3.0.7
types-filelock==3.2.7
types-requests==2.31.0.8
types-requests==2.31.0.10
types-tabulate==0.9.0.3
types-python-dateutil==2.8.19.14

View File

@@ -2,10 +2,10 @@
-r requirements-freqai.txt
# Required for freqai-rl
torch==2.0.1
torch==2.1.2
#until these branches will be released we can use this
gymnasium==0.29.1
stable_baselines3==2.1.0
stable_baselines3==2.2.1
sb3_contrib>=2.0.0a9
# Progress bar for stable-baselines3 and sb3-contrib
tqdm==4.66.1

View File

@@ -3,10 +3,10 @@
-r requirements-plot.txt
# Required for freqai
scikit-learn==1.1.3
scikit-learn==1.3.2
joblib==1.3.2
catboost==1.2.2; 'arm' not in platform_machine
lightgbm==4.1.0
xgboost==2.0.0
tensorboard==2.14.1
xgboost==2.0.2
tensorboard==2.15.1
datasieve==0.1.7

View File

@@ -2,7 +2,7 @@
-r requirements.txt
# Required for hyperopt
scipy==1.11.3
scikit-learn==1.1.3
scikit-optimize==0.9.0
filelock==3.12.4
scipy==1.11.4
scikit-learn==1.3.2
ft-scikit-optimize==0.9.2
filelock==3.13.1

View File

@@ -1,4 +1,4 @@
# Include all requirements to run the bot.
-r requirements.txt
plotly==5.17.0
plotly==5.18.0

View File

@@ -1,49 +1,47 @@
numpy==1.26.0; platform_machine != 'armv7l'
numpy==1.25.2; platform_machine == 'armv7l'
pandas==2.0.3
numpy==1.26.2
pandas==2.1.4
pandas-ta==0.3.14b
ccxt==4.1.8
cryptography==41.0.4
aiohttp==3.8.6
SQLAlchemy==2.0.21
python-telegram-bot==20.6
ccxt==4.1.84
cryptography==41.0.7
aiohttp==3.9.1
SQLAlchemy==2.0.23
python-telegram-bot==20.7
# can't be hard-pinned due to telegram-bot pinning httpx with ~
httpx>=0.24.1
arrow==1.3.0
cachetools==5.3.1
cachetools==5.3.2
requests==2.31.0
urllib3==2.0.6
jsonschema==4.19.1
urllib3==2.1.0
jsonschema==4.20.0
TA-Lib==0.4.28
technical==1.4.0
technical==1.4.2
tabulate==0.9.0
pycoingecko==3.1.0
jinja2==3.1.2
tables==3.9.1
blosc==1.11.1
joblib==1.3.2
rich==13.6.0
pyarrow==13.0.0; platform_machine != 'armv7l'
rich==13.7.0
pyarrow==14.0.1; platform_machine != 'armv7l'
# find first, C search in arrays
py_find_1st==1.1.5
py_find_1st==1.1.6
# Load ticker files 30% faster
python-rapidjson==1.12
python-rapidjson==1.14
# Properly format api responses
orjson==3.9.7
orjson==3.9.10
# Notify systemd
sdnotify==0.3.2
# API Server
fastapi==0.103.2
pydantic==2.4.2
uvicorn==0.23.2
fastapi==0.105.0
pydantic==2.5.2
uvicorn==0.24.0.post1
pyjwt==2.8.0
aiofiles==23.2.1
psutil==5.9.5
psutil==5.9.7
# Support for colorized terminal output
colorama==0.4.6
@@ -57,8 +55,8 @@ python-dateutil==2.8.2
schedule==1.2.1
#WS Messages
websockets==11.0.3
websockets==12.0
janus==1.0.0
ast-comments==1.1.0
ast-comments==1.2.0
packaging==23.2

View File

@@ -112,6 +112,30 @@ class FtRestClient:
"""
return self._get("count")
def entries(self, pair=None):
"""Returns List of dicts containing all Trades, based on buy tag performance
Can either be average for all pairs or a specific pair provided
:return: json object
"""
return self._get("entries", params={"pair": pair} if pair else None)
def exits(self, pair=None):
"""Returns List of dicts containing all Trades, based on exit reason performance
Can either be average for all pairs or a specific pair provided
:return: json object
"""
return self._get("exits", params={"pair": pair} if pair else None)
def mix_tags(self, pair=None):
"""Returns List of dicts containing all Trades, based on entry_tag + exit_reason performance
Can either be average for all pairs or a specific pair provided
:return: json object
"""
return self._get("mix_tags", params={"pair": pair} if pair else None)
def locks(self):
"""Return current locks

View File

@@ -1,53 +0,0 @@
[metadata]
name = freqtrade
version = attr: freqtrade.__version__
author = Freqtrade Team
author_email = freqtrade@protonmail.com
description = Freqtrade - Crypto Trading Bot
long_description = file: README.md
long_description_content_type = text/markdown
url = https://github.com/freqtrade/freqtrade
project_urls =
Bug Tracker = https://github.com/freqtrade/freqtrade/issues
license = GPLv3
classifiers =
Environment :: Console
Intended Audience :: Science/Research
License :: OSI Approved :: GNU General Public License v3 (GPLv3)
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Operating System :: MacOS
Operating System :: Unix
Topic :: Office/Business :: Financial :: Investment
[options]
zip_safe = False
include_package_data = True
tests_require =
pytest
pytest-asyncio
pytest-cov
pytest-mock
packages = find:
python_requires = >=3.9
[options.entry_points]
console_scripts =
freqtrade = freqtrade.main:main
[flake8]
# Default from https://flake8.pycqa.org/en/latest/user/options.html#cmdoption-flake8-ignore
# minus E226
ignore = E121,E123,E126,E24,E704,W503,W504
max-line-length = 100
max-complexity = 12
exclude =
.git,
__pycache__,
.eggs,
user_data,
.venv
.env

View File

@@ -5,8 +5,8 @@ from setuptools import setup
plot = ['plotly>=4.0']
hyperopt = [
'scipy',
'scikit-learn<=1.1.3',
'scikit-optimize>=0.7.0',
'scikit-learn',
'ft-scikit-optimize>=0.9.2',
'filelock',
]
@@ -122,4 +122,5 @@ setup(
'freqai_rl': freqai_rl,
'all': all_extra,
},
url="https://github.com/freqtrade/freqtrade",
)

View File

@@ -550,7 +550,7 @@ def test_start_install_ui(mocker):
assert download_mock.call_count == 0
def test_clean_ui_subdir(mocker, tmpdir, caplog):
def test_clean_ui_subdir(mocker, tmp_path, caplog):
mocker.patch("freqtrade.commands.deploy_commands.Path.is_dir",
side_effect=[True, True])
mocker.patch("freqtrade.commands.deploy_commands.Path.is_file",
@@ -560,14 +560,14 @@ def test_clean_ui_subdir(mocker, tmpdir, caplog):
mocker.patch("freqtrade.commands.deploy_commands.Path.glob",
return_value=[Path('test1'), Path('test2'), Path('.gitkeep')])
folder = Path(tmpdir) / "uitests"
folder = tmp_path / "uitests"
clean_ui_subdir(folder)
assert log_has("Removing UI directory content.", caplog)
assert rd_mock.call_count == 1
assert ul_mock.call_count == 1
def test_download_and_install_ui(mocker, tmpdir):
def test_download_and_install_ui(mocker, tmp_path):
# Create zipfile
requests_mock = MagicMock()
file_like_object = BytesIO()
@@ -583,7 +583,7 @@ def test_download_and_install_ui(mocker, tmpdir):
side_effect=[True, False])
wb_mock = mocker.patch("freqtrade.commands.deploy_commands.Path.write_bytes")
folder = Path(tmpdir) / "uitests_dl"
folder = tmp_path / "uitests_dl"
folder.mkdir(exist_ok=True)
assert read_ui_version(folder) is None
@@ -1010,8 +1010,8 @@ def test_start_test_pairlist(mocker, caplog, tickers, default_conf, capsys):
pytest.fail(f'Expected well formed JSON, but failed to parse: {captured.out}')
def test_hyperopt_list(mocker, capsys, caplog, saved_hyperopt_results, tmpdir):
csv_file = Path(tmpdir) / "test.csv"
def test_hyperopt_list(mocker, capsys, caplog, saved_hyperopt_results, tmp_path):
csv_file = tmp_path / "test.csv"
mocker.patch(
'freqtrade.optimize.hyperopt_tools.HyperoptTools._test_hyperopt_results_exist',
return_value=True
@@ -1512,10 +1512,10 @@ def test_backtesting_show(mocker, testdatadir, capsys):
assert "Pairs for Strategy" in out
def test_start_convert_db(mocker, fee, tmpdir, caplog):
db_src_file = Path(f"{tmpdir}/db.sqlite")
def test_start_convert_db(fee, tmp_path):
db_src_file = tmp_path / "db.sqlite"
db_from = f"sqlite:///{db_src_file}"
db_target_file = Path(f"{tmpdir}/db_target.sqlite")
db_target_file = tmp_path / "db_target.sqlite"
db_to = f"sqlite:///{db_target_file}"
args = [
"convert-db",
@@ -1542,13 +1542,13 @@ def test_start_convert_db(mocker, fee, tmpdir, caplog):
assert db_target_file.is_file()
def test_start_strategy_updater(mocker, tmpdir):
def test_start_strategy_updater(mocker, tmp_path):
sc_mock = mocker.patch('freqtrade.commands.strategy_utils_commands.start_conversion')
teststrats = Path(__file__).parent.parent / 'strategy/strats'
args = [
"strategy-updater",
"--userdir",
str(tmpdir),
str(tmp_path),
"--strategy-path",
str(teststrats),
]
@@ -1562,7 +1562,7 @@ def test_start_strategy_updater(mocker, tmpdir):
args = [
"strategy-updater",
"--userdir",
str(tmpdir),
str(tmp_path),
"--strategy-path",
str(teststrats),
"--strategy-list",

View File

@@ -87,11 +87,15 @@ def get_args(args):
def generate_test_data(timeframe: str, size: int, start: str = '2020-07-05'):
np.random.seed(42)
tf_mins = timeframe_to_minutes(timeframe)
base = np.random.normal(20, 2, size=size)
date = pd.date_range(start, periods=size, freq=f'{tf_mins}min', tz='UTC')
if timeframe == '1M':
date = pd.date_range(start, periods=size, freq='1MS', tz='UTC')
elif timeframe == '1w':
date = pd.date_range(start, periods=size, freq='1W-MON', tz='UTC')
else:
tf_mins = timeframe_to_minutes(timeframe)
date = pd.date_range(start, periods=size, freq=f'{tf_mins}min', tz='UTC')
df = pd.DataFrame({
'date': date,
'open': base,
@@ -413,8 +417,8 @@ def patch_gc(mocker) -> None:
@pytest.fixture(autouse=True)
def user_dir(mocker, tmpdir) -> Path:
user_dir = Path(tmpdir) / "user_data"
def user_dir(mocker, tmp_path) -> Path:
user_dir = tmp_path / "user_data"
mocker.patch('freqtrade.configuration.configuration.create_userdata_dir',
return_value=user_dir)
return user_dir

View File

@@ -1,6 +1,5 @@
# pragma pylint: disable=missing-docstring, C0103
import logging
from pathlib import Path
from shutil import copyfile
import numpy as np
@@ -50,8 +49,8 @@ def test_trades_to_ohlcv(trades_history_df, caplog):
assert 'high' in df.columns
assert 'low' in df.columns
assert 'close' in df.columns
assert df.loc[:, 'high'][0] == 0.019627
assert df.loc[:, 'low'][0] == 0.019626
assert df.iloc[0, :]['high'] == 0.019627
assert df.iloc[0, :]['low'] == 0.019626
def test_ohlcv_fill_up_missing_data(testdatadir, caplog):
@@ -65,7 +64,7 @@ def test_ohlcv_fill_up_missing_data(testdatadir, caplog):
# Column names should not change
assert (data.columns == data2.columns).all()
assert log_has_re(f"Missing data fillup for UNITTEST/BTC: before: "
assert log_has_re(f"Missing data fillup for UNITTEST/BTC, 1m: before: "
f"{len(data)} - after: {len(data2)}.*", caplog)
# Test fillup actually fixes invalid backtest data
@@ -129,7 +128,7 @@ def test_ohlcv_fill_up_missing_data2(caplog):
# Column names should not change
assert (data.columns == data2.columns).all()
assert log_has_re(f"Missing data fillup for UNITTEST/BTC: before: "
assert log_has_re(f"Missing data fillup for UNITTEST/BTC, {timeframe}: before: "
f"{len(data)} - after: {len(data2)}.*", caplog)
@@ -323,18 +322,17 @@ def test_trades_dict_to_list(fetch_trades_result):
assert t[6] == fetch_trades_result[i]['cost']
def test_convert_trades_format(default_conf, testdatadir, tmpdir):
tmpdir1 = Path(tmpdir)
files = [{'old': tmpdir1 / "XRP_ETH-trades.json.gz",
'new': tmpdir1 / "XRP_ETH-trades.json"},
{'old': tmpdir1 / "XRP_OLD-trades.json.gz",
'new': tmpdir1 / "XRP_OLD-trades.json"},
def test_convert_trades_format(default_conf, testdatadir, tmp_path):
files = [{'old': tmp_path / "XRP_ETH-trades.json.gz",
'new': tmp_path / "XRP_ETH-trades.json"},
{'old': tmp_path / "XRP_OLD-trades.json.gz",
'new': tmp_path / "XRP_OLD-trades.json"},
]
for file in files:
copyfile(testdatadir / file['old'].name, file['old'])
assert not file['new'].exists()
default_conf['datadir'] = tmpdir1
default_conf['datadir'] = tmp_path
convert_trades_format(default_conf, convert_from='jsongz',
convert_to='json', erase=False)
@@ -362,16 +360,15 @@ def test_convert_trades_format(default_conf, testdatadir, tmpdir):
(['UNITTEST_USDT_USDT-1h-mark', 'XRP_USDT_USDT-1h-mark'], CandleType.MARK),
(['XRP_USDT_USDT-1h-futures'], CandleType.FUTURES),
])
def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, candletype):
tmpdir1 = Path(tmpdir)
def test_convert_ohlcv_format(default_conf, testdatadir, tmp_path, file_base, candletype):
prependix = '' if candletype == CandleType.SPOT else 'futures/'
files_orig = []
files_temp = []
files_new = []
for file in file_base:
file_orig = testdatadir / f"{prependix}{file}.feather"
file_temp = tmpdir1 / f"{prependix}{file}.feather"
file_new = tmpdir1 / f"{prependix}{file}.json.gz"
file_temp = tmp_path / f"{prependix}{file}.feather"
file_new = tmp_path / f"{prependix}{file}.json.gz"
IDataHandler.create_dir_if_needed(file_temp)
copyfile(file_orig, file_temp)
@@ -379,7 +376,7 @@ def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, cand
files_temp.append(file_temp)
files_new.append(file_new)
default_conf['datadir'] = tmpdir1
default_conf['datadir'] = tmp_path
default_conf['candle_types'] = [candletype]
if candletype == CandleType.SPOT:
@@ -445,30 +442,29 @@ def test_reduce_dataframe_footprint():
assert df2['close_copy'].dtype == np.float32
def test_convert_trades_to_ohlcv(testdatadir, tmpdir, caplog):
tmpdir1 = Path(tmpdir)
def test_convert_trades_to_ohlcv(testdatadir, tmp_path, caplog):
pair = 'XRP/ETH'
file1 = tmpdir1 / 'XRP_ETH-1m.feather'
file5 = tmpdir1 / 'XRP_ETH-5m.feather'
filetrades = tmpdir1 / 'XRP_ETH-trades.json.gz'
file1 = tmp_path / 'XRP_ETH-1m.feather'
file5 = tmp_path / 'XRP_ETH-5m.feather'
filetrades = tmp_path / 'XRP_ETH-trades.json.gz'
copyfile(testdatadir / file1.name, file1)
copyfile(testdatadir / file5.name, file5)
copyfile(testdatadir / filetrades.name, filetrades)
# Compare downloaded dataset with converted dataset
dfbak_1m = load_pair_history(datadir=tmpdir1, timeframe="1m", pair=pair)
dfbak_5m = load_pair_history(datadir=tmpdir1, timeframe="5m", pair=pair)
dfbak_1m = load_pair_history(datadir=tmp_path, timeframe="1m", pair=pair)
dfbak_5m = load_pair_history(datadir=tmp_path, timeframe="5m", pair=pair)
tr = TimeRange.parse_timerange('20191011-20191012')
convert_trades_to_ohlcv([pair], timeframes=['1m', '5m'],
data_format_trades='jsongz',
datadir=tmpdir1, timerange=tr, erase=True)
datadir=tmp_path, timerange=tr, erase=True)
assert log_has("Deleting existing data for pair XRP/ETH, interval 1m.", caplog)
# Load new data
df_1m = load_pair_history(datadir=tmpdir1, timeframe="1m", pair=pair)
df_5m = load_pair_history(datadir=tmpdir1, timeframe="5m", pair=pair)
df_1m = load_pair_history(datadir=tmp_path, timeframe="1m", pair=pair)
df_5m = load_pair_history(datadir=tmp_path, timeframe="5m", pair=pair)
assert_frame_equal(dfbak_1m, df_1m, check_exact=True)
assert_frame_equal(dfbak_5m, df_5m, check_exact=True)
@@ -477,5 +473,5 @@ def test_convert_trades_to_ohlcv(testdatadir, tmpdir, caplog):
convert_trades_to_ohlcv(['NoDatapair'], timeframes=['1m', '5m'],
data_format_trades='jsongz',
datadir=tmpdir1, timerange=tr, erase=True)
datadir=tmp_path, timerange=tr, erase=True)
assert log_has(msg, caplog)

View File

@@ -328,17 +328,16 @@ def test_hdf5datahandler_trades_load(testdatadir):
])
def test_hdf5datahandler_ohlcv_load_and_resave(
testdatadir,
tmpdir,
tmp_path,
pair,
timeframe,
candle_type,
candle_append,
startdt, enddt
):
tmpdir1 = Path(tmpdir)
tmpdir2 = tmpdir1
tmpdir2 = tmp_path
if candle_type not in ('', 'spot'):
tmpdir2 = tmpdir1 / 'futures'
tmpdir2 = tmp_path / 'futures'
tmpdir2.mkdir()
dh = get_datahandler(testdatadir, 'hdf5')
ohlcv = dh._ohlcv_load(pair, timeframe, None, candle_type=candle_type)
@@ -348,7 +347,7 @@ def test_hdf5datahandler_ohlcv_load_and_resave(
file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.h5"
assert not file.is_file()
dh1 = get_datahandler(tmpdir1, 'hdf5')
dh1 = get_datahandler(tmp_path, 'hdf5')
dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type)
assert file.is_file()
@@ -379,17 +378,16 @@ def test_hdf5datahandler_ohlcv_load_and_resave(
def test_generic_datahandler_ohlcv_load_and_resave(
datahandler,
testdatadir,
tmpdir,
tmp_path,
pair,
timeframe,
candle_type,
candle_append,
startdt, enddt
):
tmpdir1 = Path(tmpdir)
tmpdir2 = tmpdir1
tmpdir2 = tmp_path
if candle_type not in ('', 'spot'):
tmpdir2 = tmpdir1 / 'futures'
tmpdir2 = tmp_path / 'futures'
tmpdir2.mkdir()
# Load data from one common file
dhbase = get_datahandler(testdatadir, 'feather')
@@ -403,7 +401,7 @@ def test_generic_datahandler_ohlcv_load_and_resave(
file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.{dh._get_file_extension()}"
assert not file.is_file()
dh1 = get_datahandler(tmpdir1, datahandler)
dh1 = get_datahandler(tmp_path, datahandler)
dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type)
assert file.is_file()
@@ -459,15 +457,14 @@ def test_datahandler_trades_load(testdatadir, datahandler):
@pytest.mark.parametrize('datahandler', ['jsongz', 'hdf5', 'feather', 'parquet'])
def test_datahandler_trades_store(testdatadir, tmpdir, datahandler):
tmpdir1 = Path(tmpdir)
def test_datahandler_trades_store(testdatadir, tmp_path, datahandler):
dh = get_datahandler(testdatadir, datahandler)
trades = dh.trades_load('XRP/ETH')
dh1 = get_datahandler(tmpdir1, datahandler)
dh1 = get_datahandler(tmp_path, datahandler)
dh1.trades_store('XRP/NEW', trades)
file = tmpdir1 / f'XRP_NEW-trades.{dh1._get_file_extension()}'
file = tmp_path / f'XRP_NEW-trades.{dh1._get_file_extension()}'
assert file.is_file()
# Load trades back
trades_new = dh1.trades_load('XRP/NEW')

View File

@@ -500,3 +500,89 @@ def test_dp__add_external_df(default_conf_usdt):
# 36 hours - from 2022-01-03 12:00:00+00:00 to 2022-01-05 00:00:00+00:00
assert isinstance(res[1], int)
assert res[1] == 0
def test_dp_get_required_startup(default_conf_usdt):
timeframe = '1h'
default_conf_usdt["timeframe"] = timeframe
dp = DataProvider(default_conf_usdt, None)
# No FreqAI config
assert dp.get_required_startup('5m', False) == 0
assert dp.get_required_startup('1h', False) == 0
assert dp.get_required_startup('1d', False) == 0
assert dp.get_required_startup('1d', True) == 0
assert dp.get_required_startup('1d') == 0
dp._config['startup_candle_count'] = 20
assert dp.get_required_startup('5m', False) == 20
assert dp.get_required_startup('5m', True) == 20
assert dp.get_required_startup('1h', False) == 20
assert dp.get_required_startup('1h') == 20
# With freqAI config
dp._config['freqai'] = {
'enabled': True,
'train_period_days': 20,
'feature_parameters': {
'indicator_periods_candles': [
5,
20,
]
}
}
assert dp.get_required_startup('5m', False) == 20
assert dp.get_required_startup('5m', True) == 5780
assert dp.get_required_startup('1h', False) == 20
assert dp.get_required_startup('1h', True) == 500
assert dp.get_required_startup('1d', False) == 20
assert dp.get_required_startup('1d', True) == 40
assert dp.get_required_startup('1d') == 40
# FreqAI kindof ignores startup_candle_count if it's below indicator_periods_candles
dp._config['startup_candle_count'] = 0
assert dp.get_required_startup('5m', False) == 20
assert dp.get_required_startup('5m', True) == 5780
assert dp.get_required_startup('1h', False) == 20
assert dp.get_required_startup('1h', True) == 500
assert dp.get_required_startup('1d', False) == 20
assert dp.get_required_startup('1d', True) == 40
assert dp.get_required_startup('1d') == 40
dp._config['freqai']['feature_parameters']['indicator_periods_candles'][1] = 50
assert dp.get_required_startup('5m', False) == 50
assert dp.get_required_startup('5m', True) == 5810
assert dp.get_required_startup('1h', False) == 50
assert dp.get_required_startup('1h', True) == 530
assert dp.get_required_startup('1d', False) == 50
assert dp.get_required_startup('1d', True) == 70
assert dp.get_required_startup('1d') == 70
# scenario from issue https://github.com/freqtrade/freqtrade/issues/9432
dp._config['freqai'] = {
'enabled': True,
'train_period_days': 180,
'feature_parameters': {
'indicator_periods_candles': [
10,
20,
]
}
}
dp._config['startup_candle_count'] = 40
assert dp.get_required_startup('5m', False) == 40
assert dp.get_required_startup('5m', True) == 51880
assert dp.get_required_startup('1h', False) == 40
assert dp.get_required_startup('1h', True) == 4360
assert dp.get_required_startup('1d', False) == 40
assert dp.get_required_startup('1d', True) == 220
assert dp.get_required_startup('1d') == 220

View File

@@ -106,17 +106,16 @@ def test_load_data_startup_candles(mocker, testdatadir) -> None:
@pytest.mark.parametrize('candle_type', ['mark', ''])
def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
default_conf, tmpdir, candle_type) -> None:
default_conf, tmp_path, candle_type) -> None:
"""
Test load_pair_history() with 1 min timeframe
"""
tmpdir1 = Path(tmpdir)
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history_list)
exchange = get_patched_exchange(mocker, default_conf)
file = tmpdir1 / 'MEME_BTC-1m.feather'
file = tmp_path / 'MEME_BTC-1m.feather'
# do not download a new pair if refresh_pairs isn't set
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
load_pair_history(datadir=tmp_path, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
assert not file.is_file()
assert log_has(
f"No history for MEME/BTC, {candle_type}, 1m found. "
@@ -124,10 +123,10 @@ def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
)
# download a new pair if refresh_pairs is set
refresh_data(datadir=tmpdir1, timeframe='1m', pairs=['MEME/BTC'],
refresh_data(datadir=tmp_path, timeframe='1m', pairs=['MEME/BTC'],
exchange=exchange, candle_type=CandleType.SPOT
)
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
load_pair_history(datadir=tmp_path, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
assert file.is_file()
assert log_has_re(
r'\(0/1\) - Download history data for "MEME/BTC", 1m, '
@@ -273,27 +272,26 @@ def test_download_pair_history(
ohlcv_history_list,
mocker,
default_conf,
tmpdir,
tmp_path,
candle_type,
subdir,
file_tail
) -> None:
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history_list)
exchange = get_patched_exchange(mocker, default_conf)
tmpdir1 = Path(tmpdir)
file1_1 = tmpdir1 / f'{subdir}MEME_BTC-1m{file_tail}.feather'
file1_5 = tmpdir1 / f'{subdir}MEME_BTC-5m{file_tail}.feather'
file2_1 = tmpdir1 / f'{subdir}CFI_BTC-1m{file_tail}.feather'
file2_5 = tmpdir1 / f'{subdir}CFI_BTC-5m{file_tail}.feather'
file1_1 = tmp_path / f'{subdir}MEME_BTC-1m{file_tail}.feather'
file1_5 = tmp_path / f'{subdir}MEME_BTC-5m{file_tail}.feather'
file2_1 = tmp_path / f'{subdir}CFI_BTC-1m{file_tail}.feather'
file2_5 = tmp_path / f'{subdir}CFI_BTC-5m{file_tail}.feather'
assert not file1_1.is_file()
assert not file2_1.is_file()
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
pair='MEME/BTC',
timeframe='1m',
candle_type=candle_type)
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
pair='CFI/BTC',
timeframe='1m',
candle_type=candle_type)
@@ -308,11 +306,11 @@ def test_download_pair_history(
assert not file1_5.is_file()
assert not file2_5.is_file()
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
pair='MEME/BTC',
timeframe='5m',
candle_type=candle_type)
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
pair='CFI/BTC',
timeframe='5m',
candle_type=candle_type)
@@ -340,13 +338,12 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
assert json_dump_mock.call_count == 3
def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmpdir) -> None:
def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmp_path) -> None:
mocker.patch(f'{EXMS}.get_historic_ohlcv',
side_effect=Exception('File Error'))
tmpdir1 = Path(tmpdir)
exchange = get_patched_exchange(mocker, default_conf)
assert not _download_pair_history(datadir=tmpdir1, exchange=exchange,
assert not _download_pair_history(datadir=tmp_path, exchange=exchange,
pair='MEME/BTC',
timeframe='1m', candle_type='spot')
assert log_has('Failed to download history data for pair: "MEME/BTC", timeframe: 1m.', caplog)
@@ -570,16 +567,15 @@ def test_refresh_backtest_trades_data(mocker, default_conf, markets, caplog, tes
def test_download_trades_history(trades_history, mocker, default_conf, testdatadir, caplog,
tmpdir, time_machine) -> None:
tmp_path, time_machine) -> None:
start_dt = dt_utc(2023, 1, 1)
time_machine.move_to(start_dt, tick=False)
tmpdir1 = Path(tmpdir)
ght_mock = MagicMock(side_effect=lambda pair, *args, **kwargs: (pair, trades_history))
mocker.patch(f'{EXMS}.get_historic_trades', ght_mock)
exchange = get_patched_exchange(mocker, default_conf)
file1 = tmpdir1 / 'ETH_BTC-trades.json.gz'
data_handler = get_datahandler(tmpdir1, data_format='jsongz')
file1 = tmp_path / 'ETH_BTC-trades.json.gz'
data_handler = get_datahandler(tmp_path, data_format='jsongz')
assert not file1.is_file()
@@ -614,7 +610,7 @@ def test_download_trades_history(trades_history, mocker, default_conf, testdatad
pair='ETH/BTC')
assert log_has_re('Failed to download historic trades for pair: "ETH/BTC".*', caplog)
file2 = tmpdir1 / 'XRP_ETH-trades.json.gz'
file2 = tmp_path / 'XRP_ETH-trades.json.gz'
copyfile(testdatadir / file2.name, file2)
ght_mock.reset_mock()

View File

@@ -1,5 +1,4 @@
from datetime import datetime, timezone
from pathlib import Path
from shutil import copytree
from unittest.mock import PropertyMock
@@ -11,7 +10,7 @@ from freqtrade.exceptions import OperationalException
from tests.conftest import EXMS, log_has, log_has_re, patch_exchange
def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf_usdt, mocker):
def test_import_kraken_trades_from_csv(testdatadir, tmp_path, caplog, default_conf_usdt, mocker):
with pytest.raises(OperationalException, match="This function is only for the kraken exchange"):
import_kraken_trades_from_csv(default_conf_usdt, 'feather')
@@ -21,10 +20,9 @@ def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value={
'BCH/EUR': {'symbol': 'BCH/EUR', 'id': 'BCHEUR', 'altname': 'BCHEUR'},
}))
tmpdir1 = Path(tmpdir)
dstfile = tmpdir1 / 'BCH_EUR-trades.feather'
dstfile = tmp_path / 'BCH_EUR-trades.feather'
assert not dstfile.is_file()
default_conf_usdt['datadir'] = tmpdir1
default_conf_usdt['datadir'] = tmp_path
# There's 2 files in this tree, containing a total of 2 days.
# tests/testdata/kraken/
# └── trades_csv
@@ -32,7 +30,7 @@ def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf
# └── incremental_q2
# └── BCHEUR.csv <-- 2023-01-02
copytree(testdatadir / 'kraken/trades_csv', tmpdir1 / 'trades_csv')
copytree(testdatadir / 'kraken/trades_csv', tmp_path / 'trades_csv')
import_kraken_trades_from_csv(default_conf_usdt, 'feather')
assert log_has("Found csv files for BCHEUR.", caplog)
@@ -40,7 +38,7 @@ def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf
assert dstfile.is_file()
dh = get_datahandler(tmpdir1, 'feather')
dh = get_datahandler(tmp_path, 'feather')
trades = dh.trades_load('BCH_EUR')
assert len(trades) == 340

View File

@@ -24,7 +24,7 @@ from tests.conftest import (EXMS, generate_test_data_raw, get_mock_coro, get_pat
# Make sure to always keep one exchange here which is NOT subclassed!!
EXCHANGES = ['bittrex', 'binance', 'kraken', 'gate', 'kucoin', 'bybit', 'okx']
EXCHANGES = ['binance', 'kraken', 'gate', 'kucoin', 'bybit', 'okx']
get_entry_rate_data = [
('other', 20, 19, 10, 0.0, 20), # Full ask side
@@ -1851,7 +1851,7 @@ def test_fetch_bids_asks(default_conf, mocker):
@pytest.mark.parametrize("exchange_name", EXCHANGES)
def test_get_tickers(default_conf, mocker, exchange_name):
def test_get_tickers(default_conf, mocker, exchange_name, caplog):
api_mock = MagicMock()
tick = {'ETH/BTC': {
'symbol': 'ETH/BTC',
@@ -1900,6 +1900,14 @@ def test_get_tickers(default_conf, mocker, exchange_name):
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
exchange.get_tickers()
caplog.clear()
api_mock.fetch_tickers = MagicMock(side_effect=[ccxt.BadSymbol("SomeSymbol"), []])
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
x = exchange.get_tickers()
assert x == []
assert log_has_re(r'Could not load tickers due to BadSymbol\..*SomeSymbol', caplog)
caplog.clear()
api_mock.fetch_tickers = MagicMock(return_value={})
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
exchange.get_tickers()
@@ -1969,6 +1977,34 @@ def test_fetch_ticker(default_conf, mocker, exchange_name):
exchange.fetch_ticker(pair='XRP/ETH')
@pytest.mark.parametrize("exchange_name", EXCHANGES)
def test___now_is_time_to_refresh(default_conf, mocker, exchange_name, time_machine):
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
pair = 'BTC/USDT'
candle_type = CandleType.SPOT
start_dt = datetime(2023, 12, 1, 0, 10, 0, tzinfo=timezone.utc)
time_machine.move_to(start_dt, tick=False)
assert (pair, '5m', candle_type) not in exchange._pairs_last_refresh_time
# not refreshed yet
assert exchange._now_is_time_to_refresh(pair, '5m', candle_type) is True
last_closed_candle = (start_dt - timedelta(minutes=5)).timestamp()
exchange._pairs_last_refresh_time[(pair, '5m', candle_type)] = last_closed_candle
# next candle not closed yet
time_machine.move_to(start_dt + timedelta(minutes=4, seconds=59), tick=False)
assert exchange._now_is_time_to_refresh(pair, '5m', candle_type) is False
# next candle closed
time_machine.move_to(start_dt + timedelta(minutes=5, seconds=0), tick=False)
assert exchange._now_is_time_to_refresh(pair, '5m', candle_type) is True
# 1 second later (last_refresh_time didn't change)
time_machine.move_to(start_dt + timedelta(minutes=5, seconds=1), tick=False)
assert exchange._now_is_time_to_refresh(pair, '5m', candle_type) is True
@pytest.mark.parametrize("exchange_name", EXCHANGES)
@pytest.mark.parametrize('candle_type', ['mark', ''])
def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_type):
@@ -3737,6 +3773,18 @@ def test_calculate_backoff(retrycount, max_retries, expected):
assert calculate_backoff(retrycount, max_retries) == expected
@pytest.mark.parametrize("exchange_name", EXCHANGES)
def test_get_funding_fees(default_conf_usdt, mocker, exchange_name, caplog):
now = datetime.now(timezone.utc)
default_conf_usdt['trading_mode'] = 'futures'
default_conf_usdt['margin_mode'] = 'isolated'
exchange = get_patched_exchange(mocker, default_conf_usdt, id=exchange_name)
exchange._fetch_and_calculate_funding_fees = MagicMock(side_effect=ExchangeError)
assert exchange.get_funding_fees('BTC/USDT:USDT', 1, False, now) == 0.0
assert exchange._fetch_and_calculate_funding_fees.call_count == 1
assert log_has("Could not update funding fees for BTC/USDT:USDT.", caplog)
@pytest.mark.parametrize("exchange_name", ['binance'])
def test__get_funding_fees_from_exchange(default_conf, mocker, exchange_name):
api_mock = MagicMock()
@@ -3853,11 +3901,11 @@ def test_set_margin_mode(mocker, default_conf, margin_mode):
("kraken", TradingMode.SPOT, None, False),
("kraken", TradingMode.MARGIN, MarginMode.ISOLATED, True),
("kraken", TradingMode.FUTURES, MarginMode.ISOLATED, True),
("bittrex", TradingMode.SPOT, None, False),
("bittrex", TradingMode.MARGIN, MarginMode.CROSS, True),
("bittrex", TradingMode.MARGIN, MarginMode.ISOLATED, True),
("bittrex", TradingMode.FUTURES, MarginMode.CROSS, True),
("bittrex", TradingMode.FUTURES, MarginMode.ISOLATED, True),
("bitmart", TradingMode.SPOT, None, False),
("bitmart", TradingMode.MARGIN, MarginMode.CROSS, True),
("bitmart", TradingMode.MARGIN, MarginMode.ISOLATED, True),
("bitmart", TradingMode.FUTURES, MarginMode.CROSS, True),
("bitmart", TradingMode.FUTURES, MarginMode.ISOLATED, True),
("gate", TradingMode.MARGIN, MarginMode.ISOLATED, True),
("okx", TradingMode.SPOT, None, False),
("okx", TradingMode.MARGIN, MarginMode.CROSS, True),

View File

@@ -18,7 +18,7 @@ from tests.conftest import log_has_re
def test_check_exchange(default_conf, caplog) -> None:
# Test an officially supported by Freqtrade team exchange
default_conf['runmode'] = RunMode.DRY_RUN
default_conf.get('exchange').update({'name': 'BITTREX'})
default_conf.get('exchange').update({'name': 'BINANCE'})
assert check_exchange(default_conf)
assert log_has_re(r"Exchange .* is officially supported by the Freqtrade development team\.",
caplog)
@@ -41,14 +41,14 @@ def test_check_exchange(default_conf, caplog) -> None:
caplog.clear()
# Test an officially supported by Freqtrade team exchange - with remapping
default_conf.get('exchange').update({'name': 'okex'})
default_conf.get('exchange').update({'name': 'okx'})
assert check_exchange(default_conf)
assert log_has_re(
r"Exchange \"okex\" is officially supported by the Freqtrade development team\.",
r"Exchange \"okx\" is officially supported by the Freqtrade development team\.",
caplog)
caplog.clear()
# Test an available exchange, supported by ccxt
default_conf.get('exchange').update({'name': 'huobipro'})
default_conf.get('exchange').update({'name': 'huobijp'})
assert check_exchange(default_conf)
assert log_has_re(r"Exchange .* is known to the the ccxt library, available for the bot, "
r"but not officially supported "

View File

@@ -1,5 +1,4 @@
from datetime import datetime, timedelta, timezone
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock, PropertyMock
import ccxt
@@ -269,9 +268,9 @@ def test_additional_exchange_init_okx(default_conf, mocker):
"additional_exchange_init", "fetch_accounts")
def test_load_leverage_tiers_okx(default_conf, mocker, markets, tmpdir, caplog, time_machine):
def test_load_leverage_tiers_okx(default_conf, mocker, markets, tmp_path, caplog, time_machine):
default_conf['datadir'] = Path(tmpdir)
default_conf['datadir'] = tmp_path
# fd_mock = mocker.patch('freqtrade.exchange.exchange.file_dump_json')
api_mock = MagicMock()
type(api_mock).has = PropertyMock(return_value={

View File

@@ -14,14 +14,6 @@ EXCHANGE_FIXTURE_TYPE = Tuple[Exchange, str]
# Exchanges that should be tested online
EXCHANGES = {
'bittrex': {
'pair': 'BTC/USDT',
'stake_currency': 'USDT',
'hasQuoteVolume': False,
'timeframe': '1h',
'leverage_tiers_public': False,
'leverage_in_spot_market': False,
},
'binance': {
'pair': 'BTC/USDT',
'stake_currency': 'USDT',
@@ -227,6 +219,7 @@ EXCHANGES = {
'timeframe': '1h',
'futures_pair': 'BTC/USDT:USDT',
'futures': True,
'orderbook_max_entries': 50,
'leverage_tiers_public': True,
'leverage_in_spot_market': True,
'sample_order': [
@@ -247,6 +240,13 @@ EXCHANGES = {
}
]
},
'bitmart': {
'pair': 'BTC/USDT',
'stake_currency': 'USDT',
'hasQuoteVolume': True,
'timeframe': '1h',
'orderbook_max_entries': 50,
},
'huobi': {
'pair': 'ETH/BTC',
'stake_currency': 'BTC',

View File

@@ -133,6 +133,7 @@ class TestCCXTExchange:
exch, exchangename = exchange
pair = EXCHANGES[exchangename]['pair']
l2 = exch.fetch_l2_order_book(pair)
orderbook_max_entries = EXCHANGES[exchangename].get('orderbook_max_entries')
assert 'asks' in l2
assert 'bids' in l2
assert len(l2['asks']) >= 1
@@ -143,7 +144,7 @@ class TestCCXTExchange:
# TODO: Gate is unstable here at the moment, ignoring the limit partially.
return
for val in [1, 2, 5, 25, 50, 100]:
if val > 50 and exchangename == 'bybit':
if orderbook_max_entries and val > orderbook_max_entries:
continue
l2 = exch.fetch_l2_order_book(pair, val)
if not l2_limit_range or val in l2_limit_range:

View File

@@ -20,14 +20,29 @@ def is_mac() -> bool:
return "Darwin" in machine
@pytest.fixture(autouse=True)
def patch_torch_initlogs(mocker) -> None:
if is_mac():
# Mock torch import completely
import sys
import types
module_name = 'torch'
mocked_module = types.ModuleType(module_name)
sys.modules[module_name] = mocked_module
else:
mocker.patch("torch._logging._init_logs")
@pytest.fixture(scope="function")
def freqai_conf(default_conf, tmpdir):
def freqai_conf(default_conf, tmp_path):
freqaiconf = deepcopy(default_conf)
freqaiconf.update(
{
"datadir": Path(default_conf["datadir"]),
"strategy": "freqai_test_strat",
"user_data_dir": Path(tmpdir),
"user_data_dir": tmp_path,
"strategy-path": "freqtrade/tests/strategy/strats",
"freqaimodel": "LightGBMRegressor",
"freqaimodel_path": "freqai/prediction_models",

View File

@@ -179,10 +179,9 @@ def test_set_initial_return_values(mocker, freqai_conf):
hist_pred_df = freqai.dd.historic_predictions[pair]
model_return_df = freqai.dd.model_return_values[pair]
assert (hist_pred_df['date_pred'].iloc[-1] ==
pd.Timestamp(end_x_plus_5) - pd.Timedelta(days=1))
assert hist_pred_df['date_pred'].iloc[-1] == pd.Timestamp(end_x_plus_5)
assert 'date_pred' in hist_pred_df.columns
assert hist_pred_df.shape[0] == 7 # Total rows: 5 from historic and 2 new zeros
assert hist_pred_df.shape[0] == 8
# compare values in model_return_df with hist_pred_df
assert (model_return_df["value"].values ==
@@ -234,9 +233,9 @@ def test_set_initial_return_values_warning(mocker, freqai_conf):
hist_pred_df = freqai.dd.historic_predictions[pair]
model_return_df = freqai.dd.model_return_values[pair]
assert hist_pred_df['date_pred'].iloc[-1] == pd.Timestamp(end_x_plus_5) - pd.Timedelta(days=1)
assert hist_pred_df['date_pred'].iloc[-1] == pd.Timestamp(end_x_plus_5)
assert 'date_pred' in hist_pred_df.columns
assert hist_pred_df.shape[0] == 9 # Total rows: 5 from historic and 4 new zeros
assert hist_pred_df.shape[0] == 10
# compare values in model_return_df with hist_pred_df
assert (model_return_df["value"].values == hist_pred_df.tail(

View File

@@ -10,9 +10,8 @@ from freqtrade.data.dataprovider import DataProvider
from freqtrade.exceptions import OperationalException
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
from tests.conftest import get_patched_exchange
from tests.freqai.conftest import (get_patched_data_kitchen, get_patched_freqai_strategy,
from tests.freqai.conftest import (get_patched_data_kitchen, get_patched_freqai_strategy, is_mac,
make_unfiltered_dataframe)
from tests.freqai.test_freqai_interface import is_mac
@pytest.mark.parametrize(

Some files were not shown because too many files have changed in this diff Show More