Merge branch 'develop' into api-server-list-custom-data

This commit is contained in:
Axel-CH
2025-03-07 09:37:56 -04:00
69 changed files with 3927 additions and 1457 deletions

View File

@@ -16,6 +16,8 @@ jobs:
name: develop name: develop
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
with: with:
@@ -42,6 +44,7 @@ jobs:
branch: update/binance-leverage-tiers branch: update/binance-leverage-tiers
title: Update Binance Leverage Tiers title: Update Binance Leverage Tiers
commit-message: "chore: update pre-commit hooks" commit-message: "chore: update pre-commit hooks"
committer: Freqtrade Bot <noreply@github.com> committer: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
author: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
body: Update binance leverage tiers. body: Update binance leverage tiers.
delete-branch: true delete-branch: true

View File

@@ -29,6 +29,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@@ -157,6 +159,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@@ -281,6 +285,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@@ -366,6 +372,8 @@ jobs:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@@ -381,6 +389,8 @@ jobs:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
with: with:
@@ -391,6 +401,8 @@ jobs:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Documentation syntax - name: Documentation syntax
run: | run: |
@@ -420,6 +432,8 @@ jobs:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@@ -506,6 +520,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@@ -551,6 +567,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Download artifact 📦 - name: Download artifact 📦
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
@@ -578,6 +596,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Download artifact 📦 - name: Download artifact 📦
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
@@ -598,6 +618,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@@ -633,7 +655,9 @@ jobs:
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: Available platforms - name: Available platforms
run: echo ${{ steps.buildx.outputs.platforms }} run: echo ${PLATFORMS}
env:
PLATFORMS: ${{ steps.buildx.outputs.platforms }}
- name: Build and test and push docker images - name: Build and test and push docker images
env: env:
@@ -652,6 +676,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Extract branch name - name: Extract branch name
id: extract-branch id: extract-branch

View File

@@ -20,6 +20,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: true
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@@ -43,12 +45,16 @@ jobs:
- name: Build and push Mike - name: Build and push Mike
if: ${{ github.event_name == 'push' }} if: ${{ github.event_name == 'push' }}
run: | run: |
mike deploy ${{ github.ref_name }} latest --push --update-aliases mike deploy ${REF_NAME} latest --push --update-aliases
env:
REF_NAME: ${{ github.ref_name }}
- name: Build and push Mike - Release - name: Build and push Mike - Release
if: ${{ github.event_name == 'release' }} if: ${{ github.event_name == 'release' }}
run: | run: |
mike deploy ${{ github.ref_name }} stable --push --update-aliases mike deploy ${REF_NAME} stable --push --update-aliases
env:
REF_NAME: ${{ github.ref_name }}
- name: Show mike versions - name: Show mike versions
run: | run: |

View File

@@ -17,29 +17,26 @@ concurrency:
group: "${{ github.workflow }}" group: "${{ github.workflow }}"
cancel-in-progress: true cancel-in-progress: true
permissions:
packages: write
jobs: jobs:
build-and-push: build-and-push:
permissions:
packages: write
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- - uses: actions/checkout@v4
name: Checkout with:
id: checkout persist-credentials: false
uses: actions/checkout@v4 - name: Login to GitHub Container Registry
- uses: docker/login-action@v3
name: Login to GitHub Container Registry with:
uses: docker/login-action@v3 registry: ghcr.io
with: username: ${{ github.actor }}
registry: ghcr.io password: ${{ secrets.GITHUB_TOKEN }}
username: ${{ github.actor }} - name: Pre-build dev container image
password: ${{ secrets.GITHUB_TOKEN }} uses: devcontainers/ci@v0.3
- with:
name: Pre-build dev container image subFolder: .github
uses: devcontainers/ci@v0.3 imageName: ghcr.io/${{ github.repository }}-devcontainer
with: cacheFrom: ghcr.io/${{ github.repository }}-devcontainer
subFolder: .github push: always
imageName: ghcr.io/${{ github.repository }}-devcontainer
cacheFrom: ghcr.io/${{ github.repository }}-devcontainer
push: always

View File

@@ -4,11 +4,16 @@ on:
branches: branches:
- stable - stable
# disable permissions for all of the available permissions
permissions: {}
jobs: jobs:
dockerHubDescription: dockerHubDescription:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Docker Hub Description - name: Docker Hub Description
uses: peter-evans/dockerhub-description@v4 uses: peter-evans/dockerhub-description@v4

View File

@@ -14,6 +14,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
with: with:
@@ -36,6 +38,7 @@ jobs:
branch: update/pre-commit-hooks branch: update/pre-commit-hooks
title: Update pre-commit hooks title: Update pre-commit hooks
commit-message: "chore: update pre-commit hooks" commit-message: "chore: update pre-commit hooks"
committer: Freqtrade Bot <noreply@github.com> committer: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
author: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
body: Update versions of pre-commit hooks to latest version. body: Update versions of pre-commit hooks to latest version.
delete-branch: true delete-branch: true

View File

@@ -2,7 +2,7 @@
# See https://pre-commit.com/hooks.html for more hooks # See https://pre-commit.com/hooks.html for more hooks
repos: repos:
- repo: https://github.com/pycqa/flake8 - repo: https://github.com/pycqa/flake8
rev: "7.1.1" rev: "7.1.2"
hooks: hooks:
- id: flake8 - id: flake8
additional_dependencies: [Flake8-pyproject] additional_dependencies: [Flake8-pyproject]
@@ -16,14 +16,14 @@ repos:
additional_dependencies: additional_dependencies:
- types-cachetools==5.5.0.20240820 - types-cachetools==5.5.0.20240820
- types-filelock==3.2.7 - types-filelock==3.2.7
- types-requests==2.32.0.20241016 - types-requests==2.32.0.20250301
- types-tabulate==0.9.0.20241207 - types-tabulate==0.9.0.20241207
- types-python-dateutil==2.9.0.20241206 - types-python-dateutil==2.9.0.20241206
- SQLAlchemy==2.0.38 - SQLAlchemy==2.0.38
# stages: [push] # stages: [push]
- repo: https://github.com/pycqa/isort - repo: https://github.com/pycqa/isort
rev: "6.0.0" rev: "6.0.1"
hooks: hooks:
- id: isort - id: isort
name: isort (python) name: isort (python)
@@ -31,7 +31,7 @@ repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit - repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version. # Ruff version.
rev: 'v0.9.6' rev: 'v0.9.9'
hooks: hooks:
- id: ruff - id: ruff
- id: ruff-format - id: ruff-format
@@ -57,7 +57,7 @@ repos:
)$ )$
- repo: https://github.com/stefmolin/exif-stripper - repo: https://github.com/stefmolin/exif-stripper
rev: 0.6.1 rev: 0.6.2
hooks: hooks:
- id: strip-exif - id: strip-exif
@@ -67,3 +67,9 @@ repos:
- id: codespell - id: codespell
additional_dependencies: additional_dependencies:
- tomli - tomli
# Ensure github actions remain safe
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.4.1
hooks:
- id: zizmor

View File

@@ -1,4 +1,4 @@
FROM python:3.12.8-slim-bookworm as base FROM python:3.12.9-slim-bookworm as base
# Setup env # Setup env
ENV LANG C.UTF-8 ENV LANG C.UTF-8

View File

@@ -34,7 +34,7 @@ COPY build_helpers/* /tmp/
# Install dependencies # Install dependencies
COPY --chown=ftuser:ftuser requirements.txt /freqtrade/ COPY --chown=ftuser:ftuser requirements.txt /freqtrade/
USER ftuser USER ftuser
RUN pip install --user --no-cache-dir numpy \ RUN pip install --user --no-cache-dir "numpy<2" \
&& pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib \ && pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib \
&& pip install --user --no-cache-dir -r requirements.txt && pip install --user --no-cache-dir -r requirements.txt

View File

@@ -315,6 +315,7 @@ It contains some useful key metrics about performance of your strategy on backte
| Sortino | 1.88 | | Sortino | 1.88 |
| Sharpe | 2.97 | | Sharpe | 2.97 |
| Calmar | 6.29 | | Calmar | 6.29 |
| SQN | 2.45 |
| Profit factor | 1.11 | | Profit factor | 1.11 |
| Expectancy (Ratio) | -0.15 (-0.05) | | Expectancy (Ratio) | -0.15 (-0.05) |
| Avg. stake amount | 0.001 BTC | | Avg. stake amount | 0.001 BTC |
@@ -368,6 +369,7 @@ It contains some useful key metrics about performance of your strategy on backte
- `Sortino`: Annualized Sortino ratio. - `Sortino`: Annualized Sortino ratio.
- `Sharpe`: Annualized Sharpe ratio. - `Sharpe`: Annualized Sharpe ratio.
- `Calmar`: Annualized Calmar ratio. - `Calmar`: Annualized Calmar ratio.
- `SQN`: System Quality Number (SQN) - by Van Tharp.
- `Profit factor`: profit / loss. - `Profit factor`: profit / loss.
- `Avg. stake amount`: Average stake amount, either `stake_amount` or the average when using dynamic stake amount. - `Avg. stake amount`: Average stake amount, either `stake_amount` or the average when using dynamic stake amount.
- `Total trade volume`: Volume generated on the exchange to reach the above profit. - `Total trade volume`: Volume generated on the exchange to reach the above profit.

View File

@@ -54,11 +54,13 @@ By default, the bot loop runs every few seconds (`internals.process_throttle_sec
* Check timeouts for open orders. * Check timeouts for open orders.
* Calls `check_entry_timeout()` strategy callback for open entry orders. * Calls `check_entry_timeout()` strategy callback for open entry orders.
* Calls `check_exit_timeout()` strategy callback for open exit orders. * Calls `check_exit_timeout()` strategy callback for open exit orders.
* Calls `adjust_entry_price()` strategy callback for open entry orders. * Calls `adjust_order_price()` strategy callback for open orders.
* Calls `adjust_entry_price()` strategy callback for open entry orders. *only called when `adjust_order_price()` is not implemented*
* Calls `adjust_exit_price()` strategy callback for open exit orders. *only called when `adjust_order_price()` is not implemented*
* Verifies existing positions and eventually places exit orders. * Verifies existing positions and eventually places exit orders.
* Considers stoploss, ROI and exit-signal, `custom_exit()` and `custom_stoploss()`. * Considers stoploss, ROI and exit-signal, `custom_exit()` and `custom_stoploss()`.
* Determine exit-price based on `exit_pricing` configuration setting or by using the `custom_exit_price()` callback. * Determine exit-price based on `exit_pricing` configuration setting or by using the `custom_exit_price()` callback.
* Before a exit order is placed, `confirm_trade_exit()` strategy callback is called. * Before an exit order is placed, `confirm_trade_exit()` strategy callback is called.
* Check position adjustments for open trades if enabled by calling `adjust_trade_position()` and place additional order if required. * Check position adjustments for open trades if enabled by calling `adjust_trade_position()` and place additional order if required.
* Check if trade-slots are still available (if `max_open_trades` is reached). * Check if trade-slots are still available (if `max_open_trades` is reached).
* Verifies entry signal trying to enter new positions. * Verifies entry signal trying to enter new positions.
@@ -80,7 +82,9 @@ This loop will be repeated again and again until the bot is stopped.
* Loops per candle simulating entry and exit points. * Loops per candle simulating entry and exit points.
* Calls `bot_loop_start()` strategy callback. * Calls `bot_loop_start()` strategy callback.
* Check for Order timeouts, either via the `unfilledtimeout` configuration, or via `check_entry_timeout()` / `check_exit_timeout()` strategy callbacks. * Check for Order timeouts, either via the `unfilledtimeout` configuration, or via `check_entry_timeout()` / `check_exit_timeout()` strategy callbacks.
* Calls `adjust_entry_price()` strategy callback for open entry orders. * Calls `adjust_order_price()` strategy callback for open orders.
* Calls `adjust_entry_price()` strategy callback for open entry orders. *only called when `adjust_order_price()` is not implemented!*
* Calls `adjust_exit_price()` strategy callback for open exit orders. *only called when `adjust_order_price()` is not implemented!*
* Check for trade entry signals (`enter_long` / `enter_short` columns). * Check for trade entry signals (`enter_long` / `enter_short` columns).
* Confirm trade entry / exits (calls `confirm_trade_entry()` and `confirm_trade_exit()` if implemented in the strategy). * Confirm trade entry / exits (calls `confirm_trade_entry()` and `confirm_trade_exit()` if implemented in the strategy).
* Call `custom_entry_price()` (if implemented in the strategy) to determine entry price (Prices are moved to be within the opening candle). * Call `custom_entry_price()` (if implemented in the strategy) to determine entry price (Prices are moved to be within the opening candle).

View File

@@ -377,6 +377,9 @@ If an incorrect category string is chosen, the plugin will print the available c
!!! Warning "Many categories" !!! Warning "Many categories"
Each added category corresponds to one API call to CoinGecko. The more categories you add, the longer the pairlist generation will take, potentially causing rate limit issues. Each added category corresponds to one API call to CoinGecko. The more categories you add, the longer the pairlist generation will take, potentially causing rate limit issues.
!!! Danger "Duplicate symbols in coingecko"
Coingecko often has duplicate symbols, where the same symbol is used for different coins. Freqtrade will use the symbol as is and try to search for it on the exchange. If the symbol exists - it will be used. Freqtrade will however not check if the _intended_ symbol is the one coingecko meant. This can sometimes lead to unexpected results, especially on low volume coins or with meme coin categories.
#### AgeFilter #### AgeFilter
Removes pairs that have been listed on the exchange for less than `min_days_listed` days (defaults to `10`) or more than `max_days_listed` days (defaults `None` mean infinity). Removes pairs that have been listed on the exchange for less than `min_days_listed` days (defaults to `10`) or more than `max_days_listed` days (defaults `None` mean infinity).

View File

@@ -1,7 +1,7 @@
markdown==3.7 markdown==3.7
mkdocs==1.6.1 mkdocs==1.6.1
mkdocs-material==9.6.3 mkdocs-material==9.6.7
mdx_truly_sane_lists==1.3 mdx_truly_sane_lists==1.3
pymdown-extensions==10.14.3 pymdown-extensions==10.14.3
jinja2==3.1.5 jinja2==3.1.6
mike==2.1.3 mike==2.1.3

View File

@@ -154,10 +154,10 @@ For example, simplified math:
In summary: The stoploss will be adjusted to be always be -10% of the highest observed price. In summary: The stoploss will be adjusted to be always be -10% of the highest observed price.
### Trailing stop loss, custom positive loss ### Trailing stop loss, different positive loss
You could also have a default stop loss when you are in the red with your buy (buy - fee), but once you hit a positive result (or an offset you define) the system will utilize a new stop loss, which can have a different value. You could also have a default stop loss when you are in the red with your buy (buy - fee), but once you hit a positive result (or an offset you define) the system will utilize a new stop loss, with a different value.
For example, your default stop loss is -10%, but once you have more than 0% profit (example 0.1%) a different trailing stoploss will be used. For example, your default stop loss is -10%, but once you have reached profitability (example 0.1%) a different trailing stoploss will be used.
!!! Note !!! Note
If you want the stoploss to only be changed when you break even of making a profit (what most users want) please refer to next section with [offset enabled](#trailing-stop-loss-only-once-the-trade-has-reached-a-certain-offset). If you want the stoploss to only be changed when you break even of making a profit (what most users want) please refer to next section with [offset enabled](#trailing-stop-loss-only-once-the-trade-has-reached-a-certain-offset).
@@ -208,7 +208,9 @@ Before this, `stoploss` is used for the trailing stoploss.
You can also keep a static stoploss until the offset is reached, and then trail the trade to take profits once the market turns. You can also keep a static stoploss until the offset is reached, and then trail the trade to take profits once the market turns.
If `trailing_only_offset_is_reached = True` then the trailing stoploss is only activated once the offset is reached. Until then, the stoploss remains at the configured `stoploss`. If `trailing_only_offset_is_reached = True` then the trailing stoploss is only activated once the offset is reached. Until then, the stoploss remains at the configured `stoploss` and is not trailing.
Leaving this value as `trailing_only_offset_is_reached=False` will allow the trailing stoploss to start trailing as soon as the asset price increases above the initial entry price.
This option can be used with or without `trailing_stop_positive`, but uses `trailing_stop_positive_offset` as offset. This option can be used with or without `trailing_stop_positive`, but uses `trailing_stop_positive_offset` as offset.
Configuration (offset is buy-price + 3%): Configuration (offset is buy-price + 3%):

View File

@@ -79,6 +79,8 @@ import talib.abstract as ta
class MyStrategy(IStrategy): class MyStrategy(IStrategy):
timeframe = '15m'
# set the initial stoploss to -10% # set the initial stoploss to -10%
stoploss = -0.10 stoploss = -0.10

View File

@@ -876,6 +876,9 @@ class DigDeeperStrategy(IStrategy):
Return None for no action. Return None for no action.
Optionally, return a tuple with a 2nd element with an order reason Optionally, return a tuple with a 2nd element with an order reason
""" """
if trade.has_open_orders:
# Only act if no orders are open
return
if current_profit > 0.05 and trade.nr_of_successful_exits == 0: if current_profit > 0.05 and trade.nr_of_successful_exits == 0:
# Take half of the profit at +5% # Take half of the profit at +5%
@@ -934,28 +937,25 @@ class DigDeeperStrategy(IStrategy):
The total profit for this trade was 950$ on a 3350$ investment (`100@8$ + 100@9$ + 150@11$`). As such - the final relative profit is 28.35% (`950 / 3350`). The total profit for this trade was 950$ on a 3350$ investment (`100@8$ + 100@9$ + 150@11$`). As such - the final relative profit is 28.35% (`950 / 3350`).
## Adjust Entry Price ## Adjust order Price
The `adjust_entry_price()` callback may be used by strategy developer to refresh/replace limit orders upon arrival of new candles. The `adjust_order_price()` callback may be used by strategy developer to refresh/replace limit orders upon arrival of new candles.
This callback is called once every iteration unless the order has been (re)placed within the current candle - limiting the maximum (re)placement of each order to once per candle. This callback is called once every iteration unless the order has been (re)placed within the current candle - limiting the maximum (re)placement of each order to once per candle.
This also means that the first call will be at the start of the next candle after the initial order was placed. This also means that the first call will be at the start of the next candle after the initial order was placed.
Be aware that `custom_entry_price()` is still the one dictating initial entry limit order price target at the time of entry trigger. Be aware that `custom_entry_price()`/`custom_exit_price()` is still the one dictating initial limit order price target at the time of the signal.
Orders can be cancelled out of this callback by returning `None`. Orders can be cancelled out of this callback by returning `None`.
Returning `current_order_rate` will keep the order on the exchange "as is". Returning `current_order_rate` will keep the order on the exchange "as is".
Returning any other price will cancel the existing order, and replace it with a new order. Returning any other price will cancel the existing order, and replace it with a new order.
The trade open-date (`trade.open_date_utc`) will remain at the time of the very first order placed.
Please make sure to be aware of this - and eventually adjust your logic in other callbacks to account for this, and use the date of the first filled order instead.
If the cancellation of the original order fails, then the order will not be replaced - though the order will most likely have been canceled on exchange. Having this happen on initial entries will result in the deletion of the order, while on position adjustment orders, it'll result in the trade size remaining as is. If the cancellation of the original order fails, then the order will not be replaced - though the order will most likely have been canceled on exchange. Having this happen on initial entries will result in the deletion of the order, while on position adjustment orders, it'll result in the trade size remaining as is.
If the order has been partially filled, the order will not be replaced. You can however use [`adjust_trade_position()`](#adjust-trade-position) to adjust the trade size to the full, expected position size, should this be necessary / desired. If the order has been partially filled, the order will not be replaced. You can however use [`adjust_trade_position()`](#adjust-trade-position) to adjust the trade size to the expected position size, should this be necessary / desired.
!!! Warning "Regular timeout" !!! Warning "Regular timeout"
Entry `unfilledtimeout` mechanism (as well as `check_entry_timeout()`) takes precedence over this. Entry `unfilledtimeout` mechanism (as well as `check_entry_timeout()`/`check_exit_timeout()`) takes precedence over this callback.
Entry Orders that are cancelled via the above methods will not have this callback called. Be sure to update timeout values to match your expectations. Orders that are cancelled via the above methods will not have this callback called. Be sure to update timeout values to match your expectations.
```python ```python
# Default imports # Default imports
@@ -964,14 +964,26 @@ class AwesomeStrategy(IStrategy):
# ... populate_* methods # ... populate_* methods
def adjust_entry_price(self, trade: Trade, order: Order | None, pair: str, def adjust_order_price(
current_time: datetime, proposed_rate: float, current_order_rate: float, self,
entry_tag: str | None, side: str, **kwargs) -> float: trade: Trade,
order: Order | None,
pair: str,
current_time: datetime,
proposed_rate: float,
current_order_rate: float,
entry_tag: str | None,
side: str,
is_entry: bool,
**kwargs,
) -> float:
""" """
Entry price re-adjustment logic, returning the user desired limit price. Exit and entry order price re-adjustment logic, returning the user desired limit price.
This only executes when a order was already placed, still open (unfilled fully or partially) This only executes when a order was already placed, still open (unfilled fully or partially)
and not timed out on subsequent candles after entry trigger. and not timed out on subsequent candles after entry trigger.
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-callbacks/
When not implemented by a strategy, returns current_order_rate as default. When not implemented by a strategy, returns current_order_rate as default.
If current_order_rate is returned then the existing order is maintained. If current_order_rate is returned then the existing order is maintained.
If None is returned then order gets canceled but not replaced by a new one. If None is returned then order gets canceled but not replaced by a new one.
@@ -983,14 +995,16 @@ class AwesomeStrategy(IStrategy):
:param proposed_rate: Rate, calculated based on pricing settings in entry_pricing. :param proposed_rate: Rate, calculated based on pricing settings in entry_pricing.
:param current_order_rate: Rate of the existing order in place. :param current_order_rate: Rate of the existing order in place.
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal. :param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
:param side: "long" or "short" - indicating the direction of the proposed trade :param side: 'long' or 'short' - indicating the direction of the proposed trade
:param is_entry: True if the order is an entry order, False if it's an exit order.
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy. :param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
:return float: New entry price value if provided :return float: New entry price value if provided
""" """
# Limit orders to use and follow SMA200 as price target for the first 10 minutes since entry trigger for BTC/USDT pair.
# Limit entry orders to use and follow SMA200 as price target for the first 10 minutes since entry trigger for BTC/USDT pair.
if ( if (
pair == "BTC/USDT" is_entry
and pair == "BTC/USDT"
and entry_tag == "long_sma200" and entry_tag == "long_sma200"
and side == "long" and side == "long"
and (current_time - timedelta(minutes=10)) <= trade.open_date_utc and (current_time - timedelta(minutes=10)) <= trade.open_date_utc
@@ -1007,6 +1021,26 @@ class AwesomeStrategy(IStrategy):
return current_order_rate return current_order_rate
``` ```
!!! danger "Incompatibility with `adjust_*_price()`"
If you have both `adjust_order_price()` and `adjust_entry_price()`/`adjust_exit_price()` implemented, only `adjust_order_price()` will be used.
If you need to adjust entry/exit prices, you can either implement the logic in `adjust_order_price()`, or use the split `adjust_entry_price()` / `adjust_exit_price()` callbacks, but not both.
Mixing these is not supported and will raise an error during bot startup.
### Adjust Entry Price
The `adjust_entry_price()` callback may be used by strategy developer to refresh/replace entry limit orders upon arrival.
It's a sub-set of `adjust_order_price()` and is called only for entry orders.
All remaining behavior is identical to `adjust_order_price()`.
The trade open-date (`trade.open_date_utc`) will remain at the time of the very first order placed.
Please make sure to be aware of this - and eventually adjust your logic in other callbacks to account for this, and use the date of the first filled order instead.
### Adjust Exit Price
The `adjust_exit_price()` callback may be used by strategy developer to refresh/replace exit limit orders upon arrival.
It's a sub-set of `adjust_order_price()` and is called only for exit orders.
All remaining behavior is identical to `adjust_order_price()`.
## Leverage Callback ## Leverage Callback
When trading in markets that allow leverage, this method must return the desired Leverage (Defaults to 1 -> No leverage). When trading in markets that allow leverage, this method must return the desired Leverage (Defaults to 1 -> No leverage).

View File

@@ -513,7 +513,7 @@ By default, freqtrade will attempt to load strategies from all `.py` files withi
Assuming your strategy is called `AwesomeStrategy`, stored in the file `user_data/strategies/AwesomeStrategy.py`, then you can start freqtrade in dry (or live, depending on your configuration) mode with: Assuming your strategy is called `AwesomeStrategy`, stored in the file `user_data/strategies/AwesomeStrategy.py`, then you can start freqtrade in dry (or live, depending on your configuration) mode with:
```bash ```bash
freqtrade trade --strategy AwesomeStrategy` freqtrade trade --strategy AwesomeStrategy
``` ```
Note that we're using the class name, not the file name. Note that we're using the class name, not the file name.
@@ -1122,6 +1122,7 @@ The following list contains some common patterns which should be avoided to prev
- don't use `.iloc[-1]` or any other absolute position in the dataframe within `populate_` functions, as this will be different between dry-run and backtesting. Absolute `iloc` indexing is safe to use in callbacks however - see [Strategy Callbacks](strategy-callbacks.md). - don't use `.iloc[-1]` or any other absolute position in the dataframe within `populate_` functions, as this will be different between dry-run and backtesting. Absolute `iloc` indexing is safe to use in callbacks however - see [Strategy Callbacks](strategy-callbacks.md).
- don't use functions that use all dataframe or column values, e.g. `dataframe['mean_volume'] = dataframe['volume'].mean()`. As backtesting uses the full dataframe, at any point in the dataframe, the `'mean_volume'` series would include data from the future. Use rolling() calculations instead, e.g. `dataframe['volume'].rolling(<window>).mean()`. - don't use functions that use all dataframe or column values, e.g. `dataframe['mean_volume'] = dataframe['volume'].mean()`. As backtesting uses the full dataframe, at any point in the dataframe, the `'mean_volume'` series would include data from the future. Use rolling() calculations instead, e.g. `dataframe['volume'].rolling(<window>).mean()`.
- don't use `.resample('1h')`. This uses the left border of the period interval, so moves data from an hour boundary to the start of the hour. Use `.resample('1h', label='right')` instead. - don't use `.resample('1h')`. This uses the left border of the period interval, so moves data from an hour boundary to the start of the hour. Use `.resample('1h', label='right')` instead.
- don't use `.merge()` to combine longer timeframes onto shorter ones. Instead, use the [informative pair](#informative-pairs) helpers. (A plain merge can implicitly cause a lookahead bias as date refers to open date, not close date).
!!! Tip "Identifying problems" !!! Tip "Identifying problems"
You should always use the two helper commands [lookahead-analysis](lookahead-analysis.md) and [recursive-analysis](recursive-analysis.md), which can each help you figure out problems with your strategy in different ways. You should always use the two helper commands [lookahead-analysis](lookahead-analysis.md) and [recursive-analysis](recursive-analysis.md), which can each help you figure out problems with your strategy in different ways.

View File

@@ -35,6 +35,7 @@ The following attributes / properties are available for each individual trade -
| `trade_direction` | "long" / "short" | Trade direction in text - long or short. | | `trade_direction` | "long" / "short" | Trade direction in text - long or short. |
| `nr_of_successful_entries` | int | Number of successful (filled) entry orders. | | `nr_of_successful_entries` | int | Number of successful (filled) entry orders. |
| `nr_of_successful_exits` | int | Number of successful (filled) exit orders. | | `nr_of_successful_exits` | int | Number of successful (filled) exit orders. |
| `has_open_orders` | boolean | Has the trade open orders (excluding stoploss orders). |
## Class methods ## Class methods

View File

@@ -1,6 +1,6 @@
"""Freqtrade bot""" """Freqtrade bot"""
__version__ = "2025.2-dev" __version__ = "2025.3-dev"
if "dev" in __version__: if "dev" in __version__:
from pathlib import Path from pathlib import Path

View File

@@ -375,3 +375,32 @@ def calculate_calmar(
# print(expected_returns_mean, max_drawdown, calmar_ratio) # print(expected_returns_mean, max_drawdown, calmar_ratio)
return calmar_ratio return calmar_ratio
def calculate_sqn(trades: pd.DataFrame, starting_balance: float) -> float:
"""
Calculate System Quality Number (SQN) - Van K. Tharp.
SQN measures systematic trading quality and takes into account both
the number of trades and their standard deviation.
:param trades: DataFrame containing trades (requires column profit_abs)
:param starting_balance: Starting balance of the trading system
:return: SQN value
"""
if len(trades) == 0:
return 0.0
total_profit = trades["profit_abs"] / starting_balance
number_of_trades = len(trades)
# Calculate average trade and standard deviation
average_profits = total_profit.mean()
profits_std = total_profit.std()
if profits_std != 0 and not np.isnan(profits_std):
sqn = math.sqrt(number_of_trades) * (average_profits / profits_std)
else:
# Define negative SQN to indicate this is NOT optimal
sqn = -100.0
return round(sqn, 4)

View File

@@ -11,7 +11,11 @@ from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
from freqtrade.exchange import Exchange from freqtrade.exchange import Exchange
from freqtrade.exchange.binance_public_data import concat_safe, download_archive_ohlcv from freqtrade.exchange.binance_public_data import (
concat_safe,
download_archive_ohlcv,
download_archive_trades,
)
from freqtrade.exchange.common import retrier from freqtrade.exchange.common import retrier
from freqtrade.exchange.exchange_types import FtHas, Tickers from freqtrade.exchange.exchange_types import FtHas, Tickers
from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_msecs from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_msecs
@@ -379,3 +383,48 @@ class Binance(Exchange):
if not t: if not t:
return [], "0" return [], "0"
return t, from_id return t, from_id
async def _async_get_trade_history_id(
self, pair: str, until: int, since: int, from_id: str | None = None
) -> tuple[str, list[list]]:
logger.info(f"Fetching trades from Binance, {from_id=}, {since=}, {until=}")
if not self._config["exchange"].get("only_from_ccxt", False):
if from_id is None or not since:
trades = await self._api_async.fetch_trades(
pair,
params={
self._trades_pagination_arg: "0",
},
limit=5,
)
listing_date: int = trades[0]["timestamp"]
since = max(since, listing_date)
_, res = await download_archive_trades(
CandleType.SPOT,
pair,
since_ms=since,
until_ms=until,
markets=self.markets,
)
if not res:
end_time = since
end_id = from_id
else:
end_time = res[-1][0]
end_id = res[-1][1]
if end_time and end_time >= until:
return pair, res
else:
_, res2 = await super()._async_get_trade_history_id(
pair, until=until, since=end_time, from_id=end_id
)
res.extend(res2)
return pair, res
return await super()._async_get_trade_history_id(
pair, until=until, since=since, from_id=from_id
)

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,6 @@
""" """
Fetch daily-archived OHLCV data from https://data.binance.vision/ Fetch daily-archived OHLCV data from https://data.binance.vision/
Documentation can be found in https://github.com/binance/binance-public-data
""" """
import asyncio import asyncio
@@ -10,9 +11,11 @@ from io import BytesIO
from typing import Any from typing import Any
import aiohttp import aiohttp
import numpy as np
import pandas as pd import pandas as pd
from pandas import DataFrame from pandas import DataFrame
from freqtrade.constants import DEFAULT_TRADES_COLUMNS
from freqtrade.enums import CandleType from freqtrade.enums import CandleType
from freqtrade.misc import chunks from freqtrade.misc import chunks
from freqtrade.util.datetime_helpers import dt_from_ts, dt_now from freqtrade.util.datetime_helpers import dt_from_ts, dt_now
@@ -157,8 +160,8 @@ async def _download_archive_ohlcv(
return concat_safe(dfs) return concat_safe(dfs)
else: else:
dfs.append(None) dfs.append(None)
except BaseException as e: except Exception as e:
logger.warning(f"An exception raised: : {e}") logger.warning(f"An exception raised: {e}")
# Directly return the existing data, do not allow the gap within the data # Directly return the existing data, do not allow the gap within the data
await cancel_and_await_tasks(tasks[tasks.index(task) + 1 :]) await cancel_and_await_tasks(tasks[tasks.index(task) + 1 :])
return concat_safe(dfs) return concat_safe(dfs)
@@ -212,6 +215,20 @@ def binance_vision_ohlcv_zip_url(
return url return url
def binance_vision_trades_zip_url(symbol: str, candle_type: CandleType, date: date) -> str:
"""
example urls:
https://data.binance.vision/data/spot/daily/aggTrades/BTCUSDT/BTCUSDT-aggTrades-2023-10-27.zip
https://data.binance.vision/data/futures/um/daily/aggTrades/BTCUSDT/BTCUSDT-aggTrades-2023-10-27.zip
"""
asset_type_url_segment = candle_type_to_url_segment(candle_type)
url = (
f"https://data.binance.vision/data/{asset_type_url_segment}/daily/aggTrades/{symbol}"
f"/{symbol}-aggTrades-{date.strftime('%Y-%m-%d')}.zip"
)
return url
async def get_daily_ohlcv( async def get_daily_ohlcv(
symbol: str, symbol: str,
timeframe: str, timeframe: str,
@@ -268,7 +285,11 @@ async def get_daily_ohlcv(
names=["date", "open", "high", "low", "close", "volume"], names=["date", "open", "high", "low", "close", "volume"],
header=header, header=header,
) )
df["date"] = pd.to_datetime(df["date"], unit="ms", utc=True) df["date"] = pd.to_datetime(
np.where(df["date"] > 1e13, df["date"] // 1000, df["date"]),
unit="ms",
utc=True,
)
return df return df
elif resp.status == 404: elif resp.status == 404:
logger.debug(f"Failed to download {url}") logger.debug(f"Failed to download {url}")
@@ -280,3 +301,203 @@ async def get_daily_ohlcv(
if isinstance(e, Http404) or retry > retry_count: if isinstance(e, Http404) or retry > retry_count:
logger.debug(f"Failed to get data from {url}: {e}") logger.debug(f"Failed to get data from {url}: {e}")
raise raise
async def download_archive_trades(
candle_type: CandleType,
pair: str,
*,
since_ms: int,
until_ms: int | None,
markets: dict[str, Any],
stop_on_404: bool = True,
) -> tuple[str, list[list]]:
try:
symbol = markets[pair]["id"]
last_available_date = dt_now() - timedelta(days=2)
start = dt_from_ts(since_ms)
end = dt_from_ts(until_ms) if until_ms else dt_now()
end = min(end, last_available_date)
if start >= end:
return pair, []
result_list = await _download_archive_trades(
symbol, pair, candle_type, start, end, stop_on_404
)
return pair, result_list
except Exception as e:
logger.warning(
"An exception occurred during fast trades download from Binance, falling back to "
"the slower REST API, this can take a lot more time.",
exc_info=e,
)
return pair, []
def parse_trades_from_zip(csvf):
# https://github.com/binance/binance-public-data/issues/283
first_byte = csvf.read(1)[0]
if chr(first_byte).isdigit():
# spot
header = None
names = [
"id",
"price",
"amount",
"first_trade_id",
"last_trade_id",
"timestamp",
"is_buyer_maker",
"is_best_match",
]
else:
# futures
header = 0
names = [
"id",
"price",
"amount",
"first_trade_id",
"last_trade_id",
"timestamp",
"is_buyer_maker",
]
csvf.seek(0)
df = pd.read_csv(
csvf,
names=names,
header=header,
)
df.loc[:, "cost"] = df["price"] * df["amount"]
# Side is reversed intentionally
# based on ccxt parseTrade logic.
df.loc[:, "side"] = np.where(df["is_buyer_maker"], "sell", "buy")
df.loc[:, "type"] = None
# Convert timestamp to ms
df.loc[:, "timestamp"] = np.where(
df["timestamp"] > 1e13,
df["timestamp"] // 1000,
df["timestamp"],
)
return df.loc[:, DEFAULT_TRADES_COLUMNS].to_records(index=False).tolist()
async def get_daily_trades(
symbol: str,
candle_type: CandleType,
date: date,
session: aiohttp.ClientSession,
retry_count: int = 3,
retry_delay: float = 0.0,
) -> list[list]:
"""
Get daily OHLCV from https://data.binance.vision
See https://github.com/binance/binance-public-data
:symbol: binance symbol name, e.g. BTCUSDT
:candle_type: SPOT or FUTURES
:date: the returned DataFrame will cover the entire day of `date` in UTC
:session: an aiohttp.ClientSession instance
:retry_count: times to retry before returning the exceptions
:retry_delay: the time to wait before every retry
:return: a list containing trades in DEFAULT_TRADES_COLUMNS format
"""
url = binance_vision_trades_zip_url(symbol, candle_type, date)
logger.debug(f"download trades data from binance: {url}")
retry = 0
while True:
if retry > 0:
sleep_secs = retry * retry_delay
logger.debug(
f"[{retry}/{retry_count}] retry to download {url} after {sleep_secs} seconds"
)
await asyncio.sleep(sleep_secs)
try:
async with session.get(url) as resp:
if resp.status == 200:
content = await resp.read()
logger.debug(f"Successfully downloaded {url}")
with zipfile.ZipFile(BytesIO(content)) as zipf:
with zipf.open(zipf.namelist()[0]) as csvf:
return parse_trades_from_zip(csvf)
elif resp.status == 404:
logger.debug(f"Failed to download {url}")
raise Http404(f"404: {url}", date, url)
else:
raise BadHttpStatus(f"{resp.status} - {resp.reason}")
except Exception as e:
logger.info("download Daily_trades raised: %s", e)
retry += 1
if isinstance(e, Http404) or retry > retry_count:
logger.debug(f"Failed to get data from {url}: {e}")
raise
async def _download_archive_trades(
symbol: str,
pair: str,
candle_type: CandleType,
start: date,
end: date,
stop_on_404: bool,
) -> list[list]:
# daily dataframes, `None` indicates missing data in that day (when `stop_on_404` is False)
results: list[list] = []
# the current day being processing, starting at 1.
current_day = 0
connector = aiohttp.TCPConnector(limit=100)
async with aiohttp.ClientSession(connector=connector, trust_env=True) as session:
# the HTTP connections has been throttled by TCPConnector
for dates in chunks(list(date_range(start, end)), 30):
tasks = [
asyncio.create_task(get_daily_trades(symbol, candle_type, date, session))
for date in dates
]
for task in tasks:
current_day += 1
try:
result = await task
except Http404 as e:
if stop_on_404:
logger.debug(f"Failed to download {e.url} due to 404.")
# A 404 error on the first day indicates missing data
# on https://data.binance.vision, we provide the warning and the advice.
# https://github.com/freqtrade/freqtrade/blob/acc53065e5fa7ab5197073276306dc9dc3adbfa3/tests/exchange_online/test_binance_compare_ohlcv.py#L7
if current_day == 1:
logger.warning(
f"Fast download is unavailable due to missing data: "
f"{e.url}. Falling back to the slower REST API, "
"which may take more time."
)
if pair in ["BTC/USDT:USDT", "ETH/USDT:USDT", "BCH/USDT:USDT"]:
logger.warning(
f"To avoid the delay, you can first download {pair} using "
"`--timerange <start date>-20200101`, and then download the "
"remaining data with `--timerange 20200101-<end date>`."
)
else:
logger.warning(
f"Binance fast download for {pair} stopped at {e.date} due to "
f"missing data: {e.url}, falling back to rest API for the "
"remaining data, this can take more time."
)
await cancel_and_await_tasks(tasks[tasks.index(task) + 1 :])
return results
except Exception as e:
logger.warning(f"An exception raised: {e}")
# Directly return the existing data, do not allow the gap within the data
await cancel_and_await_tasks(tasks[tasks.index(task) + 1 :])
return results
else:
# Happy case
results.extend(result)
return results

View File

@@ -2351,6 +2351,7 @@ class Exchange:
since_ms=since_ms, since_ms=since_ms,
until_ms=until_ms, until_ms=until_ms,
candle_type=candle_type, candle_type=candle_type,
raise_=True,
) )
) )
logger.debug(f"Downloaded data for {pair} from ccxt with length {len(data)}.") logger.debug(f"Downloaded data for {pair} from ccxt with length {len(data)}.")
@@ -2391,7 +2392,7 @@ class Exchange:
if isinstance(res, BaseException): if isinstance(res, BaseException):
logger.warning(f"Async code raised an exception: {repr(res)}") logger.warning(f"Async code raised an exception: {repr(res)}")
if raise_: if raise_:
raise raise res
continue continue
else: else:
# Deconstruct tuple if it's not an exception # Deconstruct tuple if it's not an exception

View File

@@ -4,7 +4,7 @@ Exchange support utils
import inspect import inspect
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from math import ceil, floor from math import ceil, floor, isnan
from typing import Any from typing import Any
import ccxt import ccxt
@@ -305,7 +305,7 @@ def price_to_precision(
:param rounding_mode: rounding mode to use. Defaults to ROUND :param rounding_mode: rounding mode to use. Defaults to ROUND
:return: price rounded up to the precision the Exchange accepts :return: price rounded up to the precision the Exchange accepts
""" """
if price_precision is not None and precisionMode is not None: if price_precision is not None and precisionMode is not None and not isnan(price):
if rounding_mode not in (ROUND_UP, ROUND_DOWN): if rounding_mode not in (ROUND_UP, ROUND_DOWN):
# Use CCXT code where possible. # Use CCXT code where possible.
return float( return float(

View File

@@ -46,19 +46,20 @@ class BaseEnvironment(gym.Env):
def __init__( def __init__(
self, self,
df: DataFrame = DataFrame(), *,
prices: DataFrame = DataFrame(), df: DataFrame,
reward_kwargs: dict = {}, prices: DataFrame,
reward_kwargs: dict,
window_size=10, window_size=10,
starting_point=True, starting_point=True,
id: str = "baseenv-1", # noqa: A002 id: str = "baseenv-1", # noqa: A002
seed: int = 1, seed: int = 1,
config: dict = {}, config: dict,
live: bool = False, live: bool = False,
fee: float = 0.0015, fee: float = 0.0015,
can_short: bool = False, can_short: bool = False,
pair: str = "", pair: str = "",
df_raw: DataFrame = DataFrame(), df_raw: DataFrame,
): ):
""" """
Initializes the training/eval environment. Initializes the training/eval environment.

View File

@@ -488,7 +488,7 @@ def make_env(
seed: int, seed: int,
train_df: DataFrame, train_df: DataFrame,
price: DataFrame, price: DataFrame,
env_info: dict[str, Any] = {}, env_info: dict[str, Any],
) -> Callable: ) -> Callable:
""" """
Utility function for multiprocessed env. Utility function for multiprocessed env.

View File

@@ -33,6 +33,8 @@ LABEL_PIPELINE = "label_pipeline"
TRAINDF = "trained_df" TRAINDF = "trained_df"
METADATA = "metadata" METADATA = "metadata"
METADATA_NUMBER_MODE = rapidjson.NM_NATIVE | rapidjson.NM_NAN
class pair_info(TypedDict): class pair_info(TypedDict):
model_filename: str model_filename: str
@@ -495,7 +497,7 @@ class FreqaiDataDrawer:
dk.data["label_list"] = dk.label_list dk.data["label_list"] = dk.label_list
with (save_path / f"{dk.model_filename}_{METADATA}.json").open("w") as fp: with (save_path / f"{dk.model_filename}_{METADATA}.json").open("w") as fp:
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE) rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=METADATA_NUMBER_MODE)
return return
@@ -526,7 +528,7 @@ class FreqaiDataDrawer:
dk.data["label_list"] = dk.label_list dk.data["label_list"] = dk.label_list
# store the metadata # store the metadata
with (save_path / f"{dk.model_filename}_{METADATA}.json").open("w") as fp: with (save_path / f"{dk.model_filename}_{METADATA}.json").open("w") as fp:
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE) rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=METADATA_NUMBER_MODE)
# save the pipelines to pickle files # save the pipelines to pickle files
with (save_path / f"{dk.model_filename}_{FEATURE_PIPELINE}.pkl").open("wb") as fp: with (save_path / f"{dk.model_filename}_{FEATURE_PIPELINE}.pkl").open("wb") as fp:
@@ -563,7 +565,7 @@ class FreqaiDataDrawer:
presaved backtesting (prediction file loading). presaved backtesting (prediction file loading).
""" """
with (dk.data_path / f"{dk.model_filename}_{METADATA}.json").open("r") as fp: with (dk.data_path / f"{dk.model_filename}_{METADATA}.json").open("r") as fp:
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE) dk.data = rapidjson.load(fp, number_mode=METADATA_NUMBER_MODE)
dk.training_features_list = dk.data["training_features_list"] dk.training_features_list = dk.data["training_features_list"]
dk.label_list = dk.data["label_list"] dk.label_list = dk.data["label_list"]
@@ -587,7 +589,7 @@ class FreqaiDataDrawer:
dk.label_pipeline = self.meta_data_dictionary[coin][LABEL_PIPELINE] dk.label_pipeline = self.meta_data_dictionary[coin][LABEL_PIPELINE]
else: else:
with (dk.data_path / f"{dk.model_filename}_{METADATA}.json").open("r") as fp: with (dk.data_path / f"{dk.model_filename}_{METADATA}.json").open("r") as fp:
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE) dk.data = rapidjson.load(fp, number_mode=METADATA_NUMBER_MODE)
with (dk.data_path / f"{dk.model_filename}_{FEATURE_PIPELINE}.pkl").open("rb") as fp: with (dk.data_path / f"{dk.model_filename}_{FEATURE_PIPELINE}.pkl").open("rb") as fp:
dk.feature_pipeline = cloudpickle.load(fp) dk.feature_pipeline = cloudpickle.load(fp)

View File

@@ -214,7 +214,7 @@ class FreqaiDataKitchen:
self, self,
unfiltered_df: DataFrame, unfiltered_df: DataFrame,
training_feature_list: list, training_feature_list: list,
label_list: list = list(), label_list: list | None = None,
training_filter: bool = True, training_filter: bool = True,
) -> tuple[DataFrame, DataFrame]: ) -> tuple[DataFrame, DataFrame]:
""" """
@@ -244,7 +244,7 @@ class FreqaiDataKitchen:
# we don't care about total row number (total no. datapoints) in training, we only care # we don't care about total row number (total no. datapoints) in training, we only care
# about removing any row with NaNs # about removing any row with NaNs
# if labels has multiple columns (user wants to train multiple modelEs), we detect here # if labels has multiple columns (user wants to train multiple modelEs), we detect here
labels = unfiltered_df.filter(label_list, axis=1) labels = unfiltered_df.filter(label_list or [], axis=1)
drop_index_labels = pd.isnull(labels).any(axis=1) drop_index_labels = pd.isnull(labels).any(axis=1)
drop_index_labels = ( drop_index_labels = (
drop_index_labels.replace(True, 1).replace(False, 0).infer_objects(copy=False) drop_index_labels.replace(True, 1).replace(False, 0).infer_objects(copy=False)
@@ -654,8 +654,8 @@ class FreqaiDataKitchen:
pair: str, pair: str,
tf: str, tf: str,
strategy: IStrategy, strategy: IStrategy,
corr_dataframes: dict = {}, corr_dataframes: dict,
base_dataframes: dict = {}, base_dataframes: dict,
is_corr_pairs: bool = False, is_corr_pairs: bool = False,
) -> DataFrame: ) -> DataFrame:
""" """
@@ -773,10 +773,10 @@ class FreqaiDataKitchen:
def use_strategy_to_populate_indicators( # noqa: C901 def use_strategy_to_populate_indicators( # noqa: C901
self, self,
strategy: IStrategy, strategy: IStrategy,
corr_dataframes: dict = {}, corr_dataframes: dict[str, DataFrame] | None = None,
base_dataframes: dict = {}, base_dataframes: dict[str, dict[str, DataFrame]] | None = None,
pair: str = "", pair: str = "",
prediction_dataframe: DataFrame = pd.DataFrame(), prediction_dataframe: DataFrame | None = None,
do_corr_pairs: bool = True, do_corr_pairs: bool = True,
) -> DataFrame: ) -> DataFrame:
""" """
@@ -793,6 +793,10 @@ class FreqaiDataKitchen:
:return: :return:
dataframe: DataFrame = dataframe containing populated indicators dataframe: DataFrame = dataframe containing populated indicators
""" """
if not corr_dataframes:
corr_dataframes = {}
if not base_dataframes:
base_dataframes = {}
# check if the user is using the deprecated populate_any_indicators function # check if the user is using the deprecated populate_any_indicators function
new_version = inspect.getsource(strategy.populate_any_indicators) == ( new_version = inspect.getsource(strategy.populate_any_indicators) == (
@@ -822,7 +826,7 @@ class FreqaiDataKitchen:
if tf not in corr_dataframes[p]: if tf not in corr_dataframes[p]:
corr_dataframes[p][tf] = pd.DataFrame() corr_dataframes[p][tf] = pd.DataFrame()
if not prediction_dataframe.empty: if prediction_dataframe is not None and not prediction_dataframe.empty:
dataframe = prediction_dataframe.copy() dataframe = prediction_dataframe.copy()
base_dataframes[self.config["timeframe"]] = dataframe.copy() base_dataframes[self.config["timeframe"]] = dataframe.copy()
else: else:

View File

@@ -618,7 +618,7 @@ class IFreqaiModel(ABC):
) )
unfiltered_dataframe = dk.use_strategy_to_populate_indicators( unfiltered_dataframe = dk.use_strategy_to_populate_indicators(
strategy, corr_dataframes, base_dataframes, pair strategy, corr_dataframes=corr_dataframes, base_dataframes=base_dataframes, pair=pair
) )
trained_timestamp = new_trained_timerange.stopts trained_timestamp = new_trained_timerange.stopts

View File

@@ -25,7 +25,7 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
criterion: nn.Module, criterion: nn.Module,
device: str, device: str,
data_convertor: PyTorchDataConvertor, data_convertor: PyTorchDataConvertor,
model_meta_data: dict[str, Any] = {}, model_meta_data: dict[str, Any] | None = None,
window_size: int = 1, window_size: int = 1,
tb_logger: Any = None, tb_logger: Any = None,
**kwargs, **kwargs,
@@ -45,6 +45,8 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
:param n_epochs: The maximum number batches to use for evaluation. :param n_epochs: The maximum number batches to use for evaluation.
:param batch_size: The size of the batches to use during training. :param batch_size: The size of the batches to use during training.
""" """
if model_meta_data is None:
model_meta_data = {}
self.model = model self.model = model
self.optimizer = optimizer self.optimizer = optimizer
self.criterion = criterion self.criterion = criterion

View File

@@ -64,7 +64,7 @@ from freqtrade.rpc.rpc_types import (
) )
from freqtrade.strategy.interface import IStrategy from freqtrade.strategy.interface import IStrategy
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
from freqtrade.util import FtPrecise, MeasureTime, dt_from_ts from freqtrade.util import FtPrecise, MeasureTime, PeriodicCache, dt_from_ts, dt_now
from freqtrade.util.migrations.binance_mig import migrate_binance_futures_names from freqtrade.util.migrations.binance_mig import migrate_binance_futures_names
from freqtrade.wallets import Wallets from freqtrade.wallets import Wallets
@@ -145,7 +145,11 @@ class FreqtradeBot(LoggingMixin):
else None else None
) )
self.active_pair_whitelist = self._refresh_active_whitelist() logger.info("Starting initial pairlist refresh")
with MeasureTime(
lambda duration, _: logger.info(f"Initial Pairlist refresh took {duration:.2f}s"), 0
):
self.active_pair_whitelist = self._refresh_active_whitelist()
# Set initial bot state from config # Set initial bot state from config
initial_state = self.config.get("initial_state") initial_state = self.config.get("initial_state")
@@ -154,6 +158,7 @@ class FreqtradeBot(LoggingMixin):
# Protect exit-logic from forcesell and vice versa # Protect exit-logic from forcesell and vice versa
self._exit_lock = Lock() self._exit_lock = Lock()
timeframe_secs = timeframe_to_seconds(self.strategy.timeframe) timeframe_secs = timeframe_to_seconds(self.strategy.timeframe)
self._exit_reason_cache = PeriodicCache(100, ttl=timeframe_secs)
LoggingMixin.__init__(self, logger, timeframe_secs) LoggingMixin.__init__(self, logger, timeframe_secs)
self._schedule = Scheduler() self._schedule = Scheduler()
@@ -898,14 +903,14 @@ class FreqtradeBot(LoggingMixin):
msg = ( msg = (
f"Position adjust: about to create a new order for {pair} with stake_amount: " f"Position adjust: about to create a new order for {pair} with stake_amount: "
f"{stake_amount} for {trade}" f"{stake_amount} and price: {enter_limit_requested} for {trade}"
if mode == "pos_adjust" if mode == "pos_adjust"
else ( else (
f"Replacing {side} order: about create a new order for {pair} with stake_amount: " f"Replacing {side} order: about create a new order for {pair} with stake_amount: "
f"{stake_amount} ..." f"{stake_amount} and price: {enter_limit_requested} ..."
if mode == "replace" if mode == "replace"
else f"{name} signal found: about create a new trade for {pair} with stake_amount: " else f"{name} signal found: about create a new trade for {pair} with stake_amount: "
f"{stake_amount} ..." f"{stake_amount} and price: {enter_limit_requested} ..."
) )
) )
logger.info(msg) logger.info(msg)
@@ -1374,6 +1379,15 @@ class FreqtradeBot(LoggingMixin):
for should_exit in exits: for should_exit in exits:
if should_exit.exit_flag: if should_exit.exit_flag:
exit_tag1 = exit_tag if should_exit.exit_type == ExitType.EXIT_SIGNAL else None exit_tag1 = exit_tag if should_exit.exit_type == ExitType.EXIT_SIGNAL else None
if trade.has_open_orders:
if prev_eval := self._exit_reason_cache.get(
f"{trade.pair}_{trade.id}_{exit_tag1 or should_exit.exit_reason}", None
):
logger.debug(
f"Exit reason already seen this candle, first seen at {prev_eval}"
)
continue
logger.info( logger.info(
f"Exit for {trade.pair} detected. Reason: {should_exit.exit_type}" f"Exit for {trade.pair} detected. Reason: {should_exit.exit_type}"
f"{f' Tag: {exit_tag1}' if exit_tag1 is not None else ''}" f"{f' Tag: {exit_tag1}' if exit_tag1 is not None else ''}"
@@ -1593,27 +1607,29 @@ class FreqtradeBot(LoggingMixin):
self.replace_order(order, open_order, trade) self.replace_order(order, open_order, trade)
def handle_cancel_order( def handle_cancel_order(
self, order: CcxtOrder, order_obj: Order, trade: Trade, reason: str self, order: CcxtOrder, order_obj: Order, trade: Trade, reason: str, replacing: bool = False
) -> None: ) -> bool:
""" """
Check if current analyzed order timed out and cancel if necessary. Check if current analyzed order timed out and cancel if necessary.
:param order: Order dict grabbed with exchange.fetch_order() :param order: Order dict grabbed with exchange.fetch_order()
:param order_obj: Order object from the database. :param order_obj: Order object from the database.
:param trade: Trade object. :param trade: Trade object.
:return: None :return: True if the order was canceled, False otherwise.
""" """
if order["side"] == trade.entry_side: if order["side"] == trade.entry_side:
self.handle_cancel_enter(trade, order, order_obj, reason) return self.handle_cancel_enter(trade, order, order_obj, reason, replacing)
else: else:
canceled = self.handle_cancel_exit(trade, order, order_obj, reason) canceled = self.handle_cancel_exit(trade, order, order_obj, reason)
canceled_count = trade.get_canceled_exit_order_count() if not replacing:
max_timeouts = self.config.get("unfilledtimeout", {}).get("exit_timeout_count", 0) canceled_count = trade.get_canceled_exit_order_count()
if canceled and max_timeouts > 0 and canceled_count >= max_timeouts: max_timeouts = self.config.get("unfilledtimeout", {}).get("exit_timeout_count", 0)
logger.warning( if canceled and max_timeouts > 0 and canceled_count >= max_timeouts:
f"Emergency exiting trade {trade}, as the exit order " logger.warning(
f"timed out {max_timeouts} times. force selling {order['amount']}." f"Emergency exiting trade {trade}, as the exit order "
) f"timed out {max_timeouts} times. force selling {order['amount']}."
self.emergency_exit(trade, order["price"], order["amount"]) )
self.emergency_exit(trade, order["price"], order["amount"])
return canceled
def emergency_exit( def emergency_exit(
self, trade: Trade, price: float, sub_trade_amt: float | None = None self, trade: Trade, price: float, sub_trade_amt: float | None = None
@@ -1649,9 +1665,9 @@ class FreqtradeBot(LoggingMixin):
def replace_order(self, order: CcxtOrder, order_obj: Order | None, trade: Trade) -> None: def replace_order(self, order: CcxtOrder, order_obj: Order | None, trade: Trade) -> None:
""" """
Check if current analyzed entry order should be replaced or simply cancelled. Check if current analyzed entry order should be replaced or simply cancelled.
To simply cancel the existing order(no replacement) adjust_entry_price() should return None To simply cancel the existing order(no replacement) adjust_order_price() should return None
To maintain existing order adjust_entry_price() should return order_obj.price To maintain existing order adjust_order_price() should return order_obj.price
To replace existing order adjust_entry_price() should return desired price for limit order To replace existing order adjust_order_price() should return desired price for limit order
:param order: Order dict grabbed with exchange.fetch_order() :param order: Order dict grabbed with exchange.fetch_order()
:param order_obj: Order object. :param order_obj: Order object.
:param trade: Trade object. :param trade: Trade object.
@@ -1665,17 +1681,17 @@ class FreqtradeBot(LoggingMixin):
self.strategy.timeframe, latest_candle_open_date self.strategy.timeframe, latest_candle_open_date
) )
# Check if new candle # Check if new candle
if ( if order_obj and latest_candle_close_date > order_obj.order_date_utc:
order_obj is_entry = order_obj.side == trade.entry_side
and order_obj.side == trade.entry_side
and latest_candle_close_date > order_obj.order_date_utc
):
# New candle # New candle
proposed_rate = self.exchange.get_rate( proposed_rate = self.exchange.get_rate(
trade.pair, side="entry", is_short=trade.is_short, refresh=True trade.pair,
side="entry" if is_entry else "exit",
is_short=trade.is_short,
refresh=True,
) )
adjusted_entry_price = strategy_safe_wrapper( adjusted_price = strategy_safe_wrapper(
self.strategy.adjust_entry_price, default_retval=order_obj.safe_placement_price self.strategy.adjust_order_price, default_retval=order_obj.safe_placement_price
)( )(
trade=trade, trade=trade,
order=order_obj, order=order_obj,
@@ -1685,36 +1701,51 @@ class FreqtradeBot(LoggingMixin):
current_order_rate=order_obj.safe_placement_price, current_order_rate=order_obj.safe_placement_price,
entry_tag=trade.enter_tag, entry_tag=trade.enter_tag,
side=trade.trade_direction, side=trade.trade_direction,
is_entry=is_entry,
) )
replacing = True replacing = True
cancel_reason = constants.CANCEL_REASON["REPLACE"] cancel_reason = constants.CANCEL_REASON["REPLACE"]
if not adjusted_entry_price: if not adjusted_price:
replacing = False replacing = False
cancel_reason = constants.CANCEL_REASON["USER_CANCEL"] cancel_reason = constants.CANCEL_REASON["USER_CANCEL"]
if order_obj.safe_placement_price != adjusted_entry_price:
if order_obj.safe_placement_price != adjusted_price:
# cancel existing order if new price is supplied or None # cancel existing order if new price is supplied or None
res = self.handle_cancel_enter( res = self.handle_cancel_order(
trade, order, order_obj, cancel_reason, replacing=replacing order, order_obj, trade, cancel_reason, replacing=replacing
) )
if not res: if not res:
self.replace_order_failed( self.replace_order_failed(
trade, f"Could not fully cancel order for {trade}, therefore not replacing." trade, f"Could not fully cancel order for {trade}, therefore not replacing."
) )
return return
if adjusted_entry_price: if adjusted_price:
# place new order only if new price is supplied # place new order only if new price is supplied
try: try:
if not self.execute_entry( if is_entry:
pair=trade.pair, succeeded = self.execute_entry(
stake_amount=( pair=trade.pair,
order_obj.safe_remaining * order_obj.safe_price / trade.leverage stake_amount=(
), order_obj.safe_remaining * order_obj.safe_price / trade.leverage
price=adjusted_entry_price, ),
trade=trade, price=adjusted_price,
is_short=trade.is_short, trade=trade,
mode="replace", is_short=trade.is_short,
): mode="replace",
)
else:
succeeded = self.execute_trade_exit(
trade,
adjusted_price,
exit_check=ExitCheckTuple(
exit_type=ExitType.CUSTOM_EXIT,
exit_reason=order_obj.ft_order_tag or "order_replaced",
),
ordertype="limit",
sub_trade_amt=order_obj.safe_remaining,
)
if not succeeded:
self.replace_order_failed( self.replace_order_failed(
trade, f"Could not replace order for {trade}." trade, f"Could not replace order for {trade}."
) )
@@ -1774,7 +1805,7 @@ class FreqtradeBot(LoggingMixin):
if trade.has_open_orders: if trade.has_open_orders:
oo = trade.select_order(side, True) oo = trade.select_order(side, True)
if oo is not None: if oo is not None:
if (price == oo.price) and (side == oo.side) and (amount == oo.amount): if price == oo.price and side == oo.side and amount == oo.amount:
logger.info( logger.info(
f"A similar open order was found for {trade.pair}. " f"A similar open order was found for {trade.pair}. "
f"Keeping existing {trade.exit_side} order. {price=}, {amount=}" f"Keeping existing {trade.exit_side} order. {price=}, {amount=}"
@@ -1870,7 +1901,10 @@ class FreqtradeBot(LoggingMixin):
# to the trade object # to the trade object
self.update_trade_state(trade, order_id, corder) self.update_trade_state(trade, order_id, corder)
logger.info(f"Partial {trade.entry_side} order timeout for {trade}.") logger.info(
f"Partial {trade.entry_side} order timeout for {trade}. Filled: {filled_amount}, "
f"total: {order_obj.ft_amount}"
)
order_obj.ft_cancel_reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}" order_obj.ft_cancel_reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}"
self.wallets.update() self.wallets.update()
@@ -2092,6 +2126,7 @@ class FreqtradeBot(LoggingMixin):
self.handle_insufficient_funds(trade) self.handle_insufficient_funds(trade)
return False return False
self._exit_reason_cache[f"{trade.pair}_{trade.id}_{exit_reason}"] = dt_now()
order_obj = Order.parse_from_ccxt_object(order, trade.pair, trade.exit_side, amount, limit) order_obj = Order.parse_from_ccxt_object(order, trade.pair, trade.exit_side, amount, limit)
order_obj.ft_order_tag = exit_reason order_obj.ft_order_tag = exit_reason
trade.orders.append(order_obj) trade.orders.append(order_obj)
@@ -2555,4 +2590,15 @@ class FreqtradeBot(LoggingMixin):
max_custom_price_allowed = proposed_price + (proposed_price * cust_p_max_dist_r) max_custom_price_allowed = proposed_price + (proposed_price * cust_p_max_dist_r)
# Bracket between min_custom_price_allowed and max_custom_price_allowed # Bracket between min_custom_price_allowed and max_custom_price_allowed
return max(min(valid_custom_price, max_custom_price_allowed), min_custom_price_allowed) final_price = max(
min(valid_custom_price, max_custom_price_allowed), min_custom_price_allowed
)
# Log a warning if the custom price was adjusted by clamping.
if final_price != valid_custom_price:
logger.info(
f"Custom price adjusted from {valid_custom_price} to {final_price} based on "
"custom_price_max_distance_ratio of {cust_p_max_dist_r}."
)
return final_price

View File

@@ -75,8 +75,7 @@ def setup_logging(config: Config) -> None:
# config['logfilename']), which defaults to '/dev/log', applicable for most # config['logfilename']), which defaults to '/dev/log', applicable for most
# of the systems. # of the systems.
address = (s[1], int(s[2])) if len(s) > 2 else s[1] if len(s) > 1 else "/dev/log" address = (s[1], int(s[2])) if len(s) > 2 else s[1] if len(s) > 1 else "/dev/log"
handler_sl = get_existing_handlers(SysLogHandler) if handler_sl := get_existing_handlers(SysLogHandler):
if handler_sl:
logging.root.removeHandler(handler_sl) logging.root.removeHandler(handler_sl)
handler_sl = SysLogHandler(address=address) handler_sl = SysLogHandler(address=address)
# No datetime field for logging into syslog, to allow syslog # No datetime field for logging into syslog, to allow syslog
@@ -92,8 +91,7 @@ def setup_logging(config: Config) -> None:
"You need the cysystemd python package be installed in " "You need the cysystemd python package be installed in "
"order to use logging to journald." "order to use logging to journald."
) )
handler_jd = get_existing_handlers(JournaldLogHandler) if handler_jd := get_existing_handlers(JournaldLogHandler):
if handler_jd:
logging.root.removeHandler(handler_jd) logging.root.removeHandler(handler_jd)
handler_jd = JournaldLogHandler() handler_jd = JournaldLogHandler()
# No datetime field for logging into journald, to allow syslog # No datetime field for logging into journald, to allow syslog
@@ -102,8 +100,7 @@ def setup_logging(config: Config) -> None:
handler_jd.setFormatter(Formatter("%(name)s - %(levelname)s - %(message)s")) handler_jd.setFormatter(Formatter("%(name)s - %(levelname)s - %(message)s"))
logging.root.addHandler(handler_jd) logging.root.addHandler(handler_jd)
else: else:
handler_rf = get_existing_handlers(RotatingFileHandler) if handler_rf := get_existing_handlers(RotatingFileHandler):
if handler_rf:
logging.root.removeHandler(handler_rf) logging.root.removeHandler(handler_rf)
try: try:
logfile_path = Path(logfile) logfile_path = Path(logfile)

View File

@@ -396,6 +396,8 @@ class Backtesting:
self.canceled_trade_entries = 0 self.canceled_trade_entries = 0
self.canceled_entry_orders = 0 self.canceled_entry_orders = 0
self.replaced_entry_orders = 0 self.replaced_entry_orders = 0
self.canceled_exit_orders = 0
self.replaced_exit_orders = 0
self.dataprovider.clear_cache() self.dataprovider.clear_cache()
if enable_protections: if enable_protections:
self._load_protections(self.strategy) self._load_protections(self.strategy)
@@ -601,7 +603,7 @@ class Backtesting:
# This should not be reached... # This should not be reached...
return row[OPEN_IDX] return row[OPEN_IDX]
def _get_adjust_trade_entry_for_candle( def _check_adjust_trade_for_candle(
self, trade: LocalTrade, row: tuple, current_time: datetime self, trade: LocalTrade, row: tuple, current_time: datetime
) -> LocalTrade: ) -> LocalTrade:
current_rate: float = row[OPEN_IDX] current_rate: float = row[OPEN_IDX]
@@ -869,7 +871,7 @@ class Backtesting:
# Check if we need to adjust our current positions # Check if we need to adjust our current positions
if self.strategy.position_adjustment_enable: if self.strategy.position_adjustment_enable:
trade = self._get_adjust_trade_entry_for_candle(trade, row, current_time) trade = self._check_adjust_trade_for_candle(trade, row, current_time)
if trade.is_open: if trade.is_open:
enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX] enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX]
@@ -1234,8 +1236,8 @@ class Backtesting:
for order in [o for o in trade.orders if o.ft_is_open]: for order in [o for o in trade.orders if o.ft_is_open]:
if order.side == trade.entry_side: if order.side == trade.entry_side:
self.canceled_entry_orders += 1 self.canceled_entry_orders += 1
# elif order.side == trade.exit_side: elif order.side == trade.exit_side:
# self.canceled_exit_orders += 1 self.canceled_exit_orders += 1
# canceled orders are removed from the trade # canceled orders are removed from the trade
del trade.orders[trade.orders.index(order)] del trade.orders[trade.orders.index(order)]
@@ -1299,9 +1301,10 @@ class Backtesting:
Returns True if the trade should be deleted. Returns True if the trade should be deleted.
""" """
# only check on new candles for open entry orders # only check on new candles for open entry orders
if order.side == trade.entry_side and current_time > order.order_date_utc: if current_time > order.order_date_utc:
is_entry = order.side == trade.entry_side
requested_rate = strategy_safe_wrapper( requested_rate = strategy_safe_wrapper(
self.strategy.adjust_entry_price, default_retval=order.ft_price self.strategy.adjust_order_price, default_retval=order.ft_price
)( )(
trade=trade, # type: ignore[arg-type] trade=trade, # type: ignore[arg-type]
order=order, order=order,
@@ -1311,6 +1314,7 @@ class Backtesting:
current_order_rate=order.ft_price, current_order_rate=order.ft_price,
entry_tag=trade.enter_tag, entry_tag=trade.enter_tag,
side=trade.trade_direction, side=trade.trade_direction,
is_entry=is_entry,
) # default value is current order price ) # default value is current order price
# cancel existing order whenever a new rate is requested (or None) # cancel existing order whenever a new rate is requested (or None)
@@ -1319,22 +1323,35 @@ class Backtesting:
return False return False
else: else:
del trade.orders[trade.orders.index(order)] del trade.orders[trade.orders.index(order)]
self.canceled_entry_orders += 1 if is_entry:
self.canceled_entry_orders += 1
else:
self.canceled_exit_orders += 1
# place new order if result was not None # place new order if result was not None
if requested_rate: if requested_rate:
self._enter_trade( if is_entry:
pair=trade.pair, self._enter_trade(
row=row, pair=trade.pair,
trade=trade, row=row,
requested_rate=requested_rate, trade=trade,
requested_stake=(order.safe_remaining * order.ft_price / trade.leverage), requested_rate=requested_rate,
direction="short" if trade.is_short else "long", requested_stake=(order.safe_remaining * order.ft_price / trade.leverage),
) direction="short" if trade.is_short else "long",
)
self.replaced_entry_orders += 1
else:
self._exit_trade(
trade=trade,
sell_row=row,
close_rate=requested_rate,
amount=order.safe_remaining,
exit_reason=order.ft_order_tag,
)
self.replaced_exit_orders += 1
# Delete trade if no successful entries happened (if placing the new order failed) # Delete trade if no successful entries happened (if placing the new order failed)
if not trade.has_open_orders and trade.nr_of_successful_entries == 0: if not trade.has_open_orders and is_entry and trade.nr_of_successful_entries == 0:
return True return True
self.replaced_entry_orders += 1
else: else:
# assumption: there can't be multiple open entry orders at any given time # assumption: there can't be multiple open entry orders at any given time
return trade.nr_of_successful_entries == 0 return trade.nr_of_successful_entries == 0

View File

@@ -312,6 +312,7 @@ def text_table_add_metrics(strat_results: dict) -> None:
("Sortino", f"{strat_results['sortino']:.2f}" if "sortino" in strat_results else "N/A"), ("Sortino", f"{strat_results['sortino']:.2f}" if "sortino" in strat_results else "N/A"),
("Sharpe", f"{strat_results['sharpe']:.2f}" if "sharpe" in strat_results else "N/A"), ("Sharpe", f"{strat_results['sharpe']:.2f}" if "sharpe" in strat_results else "N/A"),
("Calmar", f"{strat_results['calmar']:.2f}" if "calmar" in strat_results else "N/A"), ("Calmar", f"{strat_results['calmar']:.2f}" if "calmar" in strat_results else "N/A"),
("SQN", f"{strat_results['sqn']:.2f}" if "sqn" in strat_results else "N/A"),
( (
"Profit factor", "Profit factor",
( (

View File

@@ -16,6 +16,7 @@ from freqtrade.data.metrics import (
calculate_max_drawdown, calculate_max_drawdown,
calculate_sharpe, calculate_sharpe,
calculate_sortino, calculate_sortino,
calculate_sqn,
) )
from freqtrade.ft_types import BacktestResultType from freqtrade.ft_types import BacktestResultType
from freqtrade.util import decimals_per_coin, fmt_coin, get_dry_run_wallet from freqtrade.util import decimals_per_coin, fmt_coin, get_dry_run_wallet
@@ -468,6 +469,7 @@ def generate_strategy_stats(
"sortino": calculate_sortino(results, min_date, max_date, start_balance), "sortino": calculate_sortino(results, min_date, max_date, start_balance),
"sharpe": calculate_sharpe(results, min_date, max_date, start_balance), "sharpe": calculate_sharpe(results, min_date, max_date, start_balance),
"calmar": calculate_calmar(results, min_date, max_date, start_balance), "calmar": calculate_calmar(results, min_date, max_date, start_balance),
"sqn": calculate_sqn(results, start_balance),
"profit_factor": profit_factor, "profit_factor": profit_factor,
"backtest_start": min_date.strftime(DATETIME_PRINT_FORMAT), "backtest_start": min_date.strftime(DATETIME_PRINT_FORMAT),
"backtest_start_ts": int(min_date.timestamp() * 1000), "backtest_start_ts": int(min_date.timestamp() * 1000),

View File

@@ -124,6 +124,7 @@ def migrate_trades_and_orders_table(
funding_fees = get_column_def(cols, "funding_fees", "0.0") funding_fees = get_column_def(cols, "funding_fees", "0.0")
funding_fee_running = get_column_def(cols, "funding_fee_running", "null") funding_fee_running = get_column_def(cols, "funding_fee_running", "null")
max_stake_amount = get_column_def(cols, "max_stake_amount", "stake_amount") max_stake_amount = get_column_def(cols, "max_stake_amount", "stake_amount")
record_version = get_column_def(cols, "record_version", "1")
# If ticker-interval existed use that, else null. # If ticker-interval existed use that, else null.
if has_column(cols, "ticker_interval"): if has_column(cols, "ticker_interval"):
@@ -180,7 +181,7 @@ def migrate_trades_and_orders_table(
trading_mode, leverage, liquidation_price, is_short, trading_mode, leverage, liquidation_price, is_short,
interest_rate, funding_fees, funding_fee_running, realized_profit, interest_rate, funding_fees, funding_fee_running, realized_profit,
amount_precision, price_precision, precision_mode, precision_mode_price, contract_size, amount_precision, price_precision, precision_mode, precision_mode_price, contract_size,
max_stake_amount max_stake_amount, record_version
) )
select id, lower(exchange), pair, {base_currency} base_currency, select id, lower(exchange), pair, {base_currency} base_currency,
{stake_currency} stake_currency, {stake_currency} stake_currency,
@@ -210,7 +211,8 @@ def migrate_trades_and_orders_table(
{realized_profit} realized_profit, {realized_profit} realized_profit,
{amount_precision} amount_precision, {price_precision} price_precision, {amount_precision} amount_precision, {price_precision} price_precision,
{precision_mode} precision_mode, {precision_mode_price} precision_mode_price, {precision_mode} precision_mode, {precision_mode_price} precision_mode_price,
{contract_size} contract_size, {max_stake_amount} max_stake_amount {contract_size} contract_size, {max_stake_amount} max_stake_amount,
{record_version} record_version
from {trade_back_name} from {trade_back_name}
""" """
) )
@@ -329,6 +331,25 @@ def fix_old_dry_orders(engine):
connection.execute(stmt) connection.execute(stmt)
def fix_wrong_max_stake_amount(engine):
"""
Fix max_stake_amount for leveraged closed trades
This caused record_version to be bumped to 2.
"""
with engine.begin() as connection:
stmt = (
update(Trade)
.where(
Trade.record_version < 2,
Trade.leverage > 1,
Trade.is_open.is_(False),
Trade.max_stake_amount != 0,
)
.values(max_stake_amount=Trade.max_stake_amount / Trade.leverage, record_version=2)
)
connection.execute(stmt)
def check_migrate(engine, decl_base, previous_tables) -> None: def check_migrate(engine, decl_base, previous_tables) -> None:
""" """
Checks if migration is necessary and migrates if necessary Checks if migration is necessary and migrates if necessary
@@ -350,7 +371,7 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
# if ('orders' not in previous_tables # if ('orders' not in previous_tables
# or not has_column(cols_orders, 'funding_fee')): # or not has_column(cols_orders, 'funding_fee')):
migrating = False migrating = False
if not has_column(cols_trades, "precision_mode_price"): if not has_column(cols_trades, "record_version"):
# if not has_column(cols_orders, "ft_order_tag"): # if not has_column(cols_orders, "ft_order_tag"):
migrating = True migrating = True
logger.info( logger.info(
@@ -383,6 +404,7 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
set_sqlite_to_wal(engine) set_sqlite_to_wal(engine)
fix_old_dry_orders(engine) fix_old_dry_orders(engine)
fix_wrong_max_stake_amount(engine)
if migrating: if migrating:
logger.info("Database migration finished.") logger.info("Database migration finished.")

View File

@@ -464,6 +464,8 @@ class LocalTrade:
# Used to keep running funding fees - between the last filled order and now # Used to keep running funding fees - between the last filled order and now
# Shall not be used for calculations! # Shall not be used for calculations!
funding_fee_running: float | None = None funding_fee_running: float | None = None
# v 2 -> correct max_stake_amount calculation for leveraged trades
record_version: int = 2
@property @property
def stoploss_or_liquidation(self) -> float: def stoploss_or_liquidation(self) -> float:
@@ -1243,7 +1245,7 @@ class LocalTrade:
total_stake += self._calc_open_trade_value(tmp_amount, price) total_stake += self._calc_open_trade_value(tmp_amount, price)
max_stake_amount += tmp_amount * price max_stake_amount += tmp_amount * price
self.funding_fees = funding_fees self.funding_fees = funding_fees
self.max_stake_amount = float(max_stake_amount) self.max_stake_amount = float(max_stake_amount) / (self.leverage or 1.0)
if close_profit: if close_profit:
self.close_profit = close_profit self.close_profit = close_profit
@@ -1535,45 +1537,47 @@ class LocalTrade:
:param json_str: json string to parse :param json_str: json string to parse
:return: Trade instance :return: Trade instance
""" """
from uuid import uuid4
import rapidjson import rapidjson
data = rapidjson.loads(json_str) data = rapidjson.loads(json_str)
trade = cls( trade = cls(
__FROM_JSON=True, __FROM_JSON=True,
id=data["trade_id"], id=data.get("trade_id"),
pair=data["pair"], pair=data["pair"],
base_currency=data["base_currency"], base_currency=data.get("base_currency"),
stake_currency=data["quote_currency"], stake_currency=data.get("quote_currency"),
is_open=data["is_open"], is_open=data["is_open"],
exchange=data["exchange"], exchange=data.get("exchange", "import"),
amount=data["amount"], amount=data["amount"],
amount_requested=data["amount_requested"], amount_requested=data.get("amount_requested", data["amount"]),
stake_amount=data["stake_amount"], stake_amount=data["stake_amount"],
strategy=data["strategy"], strategy=data.get("strategy"),
enter_tag=data["enter_tag"], enter_tag=data["enter_tag"],
timeframe=data["timeframe"], timeframe=data.get("timeframe"),
fee_open=data["fee_open"], fee_open=data["fee_open"],
fee_open_cost=data["fee_open_cost"], fee_open_cost=data.get("fee_open_cost"),
fee_open_currency=data["fee_open_currency"], fee_open_currency=data.get("fee_open_currency"),
fee_close=data["fee_close"], fee_close=data["fee_close"],
fee_close_cost=data["fee_close_cost"], fee_close_cost=data.get("fee_close_cost"),
fee_close_currency=data["fee_close_currency"], fee_close_currency=data.get("fee_close_currency"),
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc), open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
open_rate=data["open_rate"], open_rate=data["open_rate"],
open_rate_requested=data["open_rate_requested"], open_rate_requested=data.get("open_rate_requested", data["open_rate"]),
open_trade_value=data["open_trade_value"], open_trade_value=data.get("open_trade_value"),
close_date=( close_date=(
datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc) datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
if data["close_timestamp"] if data["close_timestamp"]
else None else None
), ),
realized_profit=data["realized_profit"], realized_profit=data.get("realized_profit", 0),
close_rate=data["close_rate"], close_rate=data["close_rate"],
close_rate_requested=data["close_rate_requested"], close_rate_requested=data.get("close_rate_requested", data["close_rate"]),
close_profit=data["close_profit"], close_profit=data.get("close_profit", data.get("profit_ratio")),
close_profit_abs=data["close_profit_abs"], close_profit_abs=data.get("close_profit_abs", data.get("profit_abs")),
exit_reason=data["exit_reason"], exit_reason=data["exit_reason"],
exit_order_status=data["exit_order_status"], exit_order_status=data.get("exit_order_status"),
stop_loss=data["stop_loss_abs"], stop_loss=data["stop_loss_abs"],
stop_loss_pct=data["stop_loss_ratio"], stop_loss_pct=data["stop_loss_ratio"],
initial_stop_loss=data["initial_stop_loss_abs"], initial_stop_loss=data["initial_stop_loss_abs"],
@@ -1581,11 +1585,11 @@ class LocalTrade:
min_rate=data["min_rate"], min_rate=data["min_rate"],
max_rate=data["max_rate"], max_rate=data["max_rate"],
leverage=data["leverage"], leverage=data["leverage"],
interest_rate=data["interest_rate"], interest_rate=data.get("interest_rate"),
liquidation_price=data["liquidation_price"], liquidation_price=data.get("liquidation_price"),
is_short=data["is_short"], is_short=data["is_short"],
trading_mode=data["trading_mode"], trading_mode=data.get("trading_mode"),
funding_fees=data["funding_fees"], funding_fees=data.get("funding_fees"),
amount_precision=data.get("amount_precision", None), amount_precision=data.get("amount_precision", None),
price_precision=data.get("price_precision", None), price_precision=data.get("price_precision", None),
precision_mode=data.get("precision_mode", None), precision_mode=data.get("precision_mode", None),
@@ -1597,23 +1601,25 @@ class LocalTrade:
amount=order["amount"], amount=order["amount"],
ft_amount=order["amount"], ft_amount=order["amount"],
ft_order_side=order["ft_order_side"], ft_order_side=order["ft_order_side"],
ft_pair=order["pair"], ft_pair=order.get("pair", data["pair"]),
ft_is_open=order["is_open"], ft_is_open=order.get("is_open", False),
order_id=order["order_id"], order_id=order.get("order_id", uuid4().hex),
status=order["status"], status=order.get("status"),
average=order["average"], average=order.get("average", order.get("safe_price")),
cost=order["cost"], cost=order["cost"],
filled=order["filled"], filled=order.get("filled", order["amount"]),
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT), order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT)
if order.get("order_date")
else None,
order_filled_date=( order_filled_date=(
datetime.fromtimestamp(order["order_filled_timestamp"] // 1000, tz=timezone.utc) datetime.fromtimestamp(order["order_filled_timestamp"] // 1000, tz=timezone.utc)
if order["order_filled_timestamp"] if order["order_filled_timestamp"]
else None else None
), ),
order_type=order["order_type"], order_type=order.get("order_type"),
price=order["price"], price=order.get("price", order.get("safe_price")),
ft_price=order["price"], ft_price=order.get("price", order.get("safe_price")),
remaining=order["remaining"], remaining=order.get("remaining", 0.0),
funding_fee=order.get("funding_fee", None), funding_fee=order.get("funding_fee", None),
ft_order_tag=order.get("ft_order_tag", None), ft_order_tag=order.get("ft_order_tag", None),
) )
@@ -1748,6 +1754,8 @@ class Trade(ModelBase, LocalTrade):
Float(), nullable=True, default=None Float(), nullable=True, default=None
) )
record_version: Mapped[int] = mapped_column(Integer, nullable=False, default=2) # type: ignore
def __init__(self, **kwargs): def __init__(self, **kwargs):
from_json = kwargs.pop("__FROM_JSON", None) from_json = kwargs.pop("__FROM_JSON", None)
super().__init__(**kwargs) super().__init__(**kwargs)

View File

@@ -242,6 +242,14 @@ class StrategyResolver(IResolver):
if has_after_fill: if has_after_fill:
strategy._ft_stop_uses_after_fill = True strategy._ft_stop_uses_after_fill = True
if check_override(strategy, IStrategy, "adjust_order_price") and (
check_override(strategy, IStrategy, "adjust_entry_price")
or check_override(strategy, IStrategy, "adjust_exit_price")
):
raise OperationalException(
"If you implement `adjust_order_price`, `adjust_entry_price` and "
"`adjust_exit_price` will not be used. Please pick one approach for your strategy."
)
return strategy return strategy
@staticmethod @staticmethod

View File

@@ -110,13 +110,17 @@ def handleExchangePayload(payload: ExchangeModePayloadMixin, config_loc: Config)
Handle exchange and trading mode payload. Handle exchange and trading mode payload.
Updates the configuration with the payload values. Updates the configuration with the payload values.
""" """
from freqtrade.configuration.directory_operations import create_datadir
if payload.exchange: if payload.exchange:
config_loc["exchange"]["name"] = payload.exchange config_loc["exchange"]["name"] = payload.exchange
config_loc.update({"datadir": create_datadir(config_loc, None)})
if payload.trading_mode: if payload.trading_mode:
config_loc["trading_mode"] = payload.trading_mode config_loc["trading_mode"] = payload.trading_mode
config_loc["candle_type_def"] = CandleType.get_default( config_loc["candle_type_def"] = CandleType.get_default(
config_loc.get("trading_mode", "spot") or "spot" config_loc.get("trading_mode", "spot") or "spot"
) )
if payload.margin_mode: if payload.margin_mode:
config_loc["margin_mode"] = payload.margin_mode config_loc["margin_mode"] = payload.margin_mode

View File

@@ -6,13 +6,12 @@ import logging
from abc import abstractmethod from abc import abstractmethod
from collections.abc import Generator, Sequence from collections.abc import Generator, Sequence
from datetime import date, datetime, timedelta, timezone from datetime import date, datetime, timedelta, timezone
from math import isnan
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any
import psutil import psutil
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from dateutil.tz import tzlocal from dateutil.tz import tzlocal
from numpy import inf, int64, mean, nan from numpy import inf, int64, isnan, mean, nan
from pandas import DataFrame, NaT from pandas import DataFrame, NaT
from sqlalchemy import func, select from sqlalchemy import func, select

View File

@@ -690,6 +690,104 @@ class IStrategy(ABC, HyperStrategyMixin):
""" """
return current_order_rate return current_order_rate
def adjust_exit_price(
self,
trade: Trade,
order: Order | None,
pair: str,
current_time: datetime,
proposed_rate: float,
current_order_rate: float,
entry_tag: str | None,
side: str,
**kwargs,
) -> float:
"""
Exit price re-adjustment logic, returning the user desired limit price.
This only executes when a order was already placed, still open (unfilled fully or partially)
and not timed out on subsequent candles after entry trigger.
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-callbacks/
When not implemented by a strategy, returns current_order_rate as default.
If current_order_rate is returned then the existing order is maintained.
If None is returned then order gets canceled but not replaced by a new one.
:param pair: Pair that's currently analyzed
:param trade: Trade object.
:param order: Order object
:param current_time: datetime object, containing the current datetime
:param proposed_rate: Rate, calculated based on pricing settings in entry_pricing.
:param current_order_rate: Rate of the existing order in place.
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
:param side: 'long' or 'short' - indicating the direction of the proposed trade
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
:return float: New entry price value if provided
"""
return current_order_rate
def adjust_order_price(
self,
trade: Trade,
order: Order | None,
pair: str,
current_time: datetime,
proposed_rate: float,
current_order_rate: float,
entry_tag: str | None,
side: str,
is_entry: bool,
**kwargs,
) -> float:
"""
Exit and entry order price re-adjustment logic, returning the user desired limit price.
This only executes when a order was already placed, still open (unfilled fully or partially)
and not timed out on subsequent candles after entry trigger.
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-callbacks/
When not implemented by a strategy, returns current_order_rate as default.
If current_order_rate is returned then the existing order is maintained.
If None is returned then order gets canceled but not replaced by a new one.
:param pair: Pair that's currently analyzed
:param trade: Trade object.
:param order: Order object
:param current_time: datetime object, containing the current datetime
:param proposed_rate: Rate, calculated based on pricing settings in entry_pricing.
:param current_order_rate: Rate of the existing order in place.
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
:param side: 'long' or 'short' - indicating the direction of the proposed trade
:param is_entry: True if the order is an entry order, False if it's an exit order.
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
:return float: New entry price value if provided
"""
if is_entry:
return self.adjust_entry_price(
trade=trade,
order=order,
pair=pair,
current_time=current_time,
proposed_rate=proposed_rate,
current_order_rate=current_order_rate,
entry_tag=entry_tag,
side=side,
**kwargs,
)
else:
return self.adjust_exit_price(
trade=trade,
order=order,
pair=pair,
current_time=current_time,
proposed_rate=proposed_rate,
current_order_rate=current_order_rate,
entry_tag=entry_tag,
side=side,
**kwargs,
)
def leverage( def leverage(
self, self,
pair: str, pair: str,

View File

@@ -40,7 +40,7 @@ def custom_entry_price(
""" """
return proposed_rate return proposed_rate
def adjust_entry_price( def adjust_order_price(
self, self,
trade: Trade, trade: Trade,
order: Order | None, order: Order | None,
@@ -50,10 +50,11 @@ def adjust_entry_price(
current_order_rate: float, current_order_rate: float,
entry_tag: str | None, entry_tag: str | None,
side: str, side: str,
is_entry: bool,
**kwargs, **kwargs,
) -> float: ) -> float:
""" """
Entry price re-adjustment logic, returning the user desired limit price. Exit and entry order price re-adjustment logic, returning the user desired limit price.
This only executes when a order was already placed, still open (unfilled fully or partially) This only executes when a order was already placed, still open (unfilled fully or partially)
and not timed out on subsequent candles after entry trigger. and not timed out on subsequent candles after entry trigger.
@@ -71,6 +72,7 @@ def adjust_entry_price(
:param current_order_rate: Rate of the existing order in place. :param current_order_rate: Rate of the existing order in place.
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal. :param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
:param side: 'long' or 'short' - indicating the direction of the proposed trade :param side: 'long' or 'short' - indicating the direction of the proposed trade
:param is_entry: True if the order is an entry order, False if it's an exit order.
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy. :param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
:return float: New entry price value if provided :return float: New entry price value if provided

View File

@@ -197,7 +197,7 @@ class Wallets:
# Position is not open ... # Position is not open ...
continue continue
size = self._exchange._contracts_to_amount(symbol, position["contracts"]) size = self._exchange._contracts_to_amount(symbol, position["contracts"])
collateral = safe_value_fallback(position, "collateral", "initialMargin", 0.0) collateral = safe_value_fallback(position, "initialMargin", "collateral", 0.0)
leverage = position.get("leverage") leverage = position.get("leverage")
_parsed_positions[symbol] = PositionWallet( _parsed_positions[symbol] = PositionWallet(
symbol, symbol,

View File

@@ -1,7 +1,7 @@
from freqtrade_client.ft_rest_client import FtRestClient from freqtrade_client.ft_rest_client import FtRestClient
__version__ = "2025.2-dev" __version__ = "2025.3-dev"
if "dev" in __version__: if "dev" in __version__:
from pathlib import Path from pathlib import Path

View File

@@ -287,8 +287,6 @@ max-complexity = 12
[tool.ruff.lint.per-file-ignores] [tool.ruff.lint.per-file-ignores]
"freqtrade/freqai/**/*.py" = [ "freqtrade/freqai/**/*.py" = [
"S311", # Standard pseudo-random generators are not suitable for cryptographic purposes "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes
"B006", # Bugbear - mutable default argument
"B008", # bugbear - Do not perform function calls in argument defaults
] ]
"tests/**/*.py" = [ "tests/**/*.py" = [
"S101", # allow assert in tests "S101", # allow assert in tests

View File

@@ -7,17 +7,17 @@
-r docs/requirements-docs.txt -r docs/requirements-docs.txt
coveralls==4.0.1 coveralls==4.0.1
ruff==0.9.5 ruff==0.9.9
mypy==1.15.0 mypy==1.15.0
pre-commit==4.1.0 pre-commit==4.1.0
pytest==8.3.4 pytest==8.3.5
pytest-asyncio==0.25.3 pytest-asyncio==0.25.3
pytest-cov==6.0.0 pytest-cov==6.0.0
pytest-mock==3.14.0 pytest-mock==3.14.0
pytest-random-order==1.1.1 pytest-random-order==1.1.1
pytest-timeout==2.3.1 pytest-timeout==2.3.1
pytest-xdist==3.6.1 pytest-xdist==3.6.1
isort==6.0.0 isort==6.0.1
# For datetime mocking # For datetime mocking
time-machine==2.16.0 time-machine==2.16.0
@@ -27,6 +27,6 @@ nbconvert==7.16.6
# mypy types # mypy types
types-cachetools==5.5.0.20240820 types-cachetools==5.5.0.20240820
types-filelock==3.2.7 types-filelock==3.2.7
types-requests==2.32.0.20241016 types-requests==2.32.0.20250301
types-tabulate==0.9.0.20241207 types-tabulate==0.9.0.20241207
types-python-dateutil==2.9.0.20241206 types-python-dateutil==2.9.0.20241206

View File

@@ -6,7 +6,7 @@
scikit-learn==1.6.1 scikit-learn==1.6.1
joblib==1.4.2 joblib==1.4.2
catboost==1.2.7; 'arm' not in platform_machine catboost==1.2.7; 'arm' not in platform_machine
lightgbm==4.5.0 lightgbm==4.6.0
xgboost==2.1.4 xgboost==2.1.4
tensorboard==2.18.0 tensorboard==2.19.0
datasieve==0.1.7 datasieve==0.1.7

View File

@@ -2,7 +2,7 @@
-r requirements.txt -r requirements.txt
# Required for hyperopt # Required for hyperopt
scipy==1.15.1 scipy==1.15.2
scikit-learn==1.6.1 scikit-learn==1.6.1
ft-scikit-optimize==0.9.2 ft-scikit-optimize==0.9.2
filelock==3.17.0 filelock==3.17.0

View File

@@ -4,16 +4,15 @@ bottleneck==1.4.2
numexpr==2.10.2 numexpr==2.10.2
pandas-ta==0.3.14b pandas-ta==0.3.14b
ccxt==4.4.58 ccxt==4.4.64
cryptography==42.0.8; platform_machine == 'armv7l' cryptography==44.0.2
cryptography==44.0.1; platform_machine != 'armv7l'
aiohttp==3.9.5 aiohttp==3.9.5
SQLAlchemy==2.0.38 SQLAlchemy==2.0.38
python-telegram-bot==21.10 python-telegram-bot==21.11.1
# can't be hard-pinned due to telegram-bot pinning httpx with ~ # can't be hard-pinned due to telegram-bot pinning httpx with ~
httpx>=0.24.1 httpx>=0.24.1
humanize==4.11.0 humanize==4.12.1
cachetools==5.5.1 cachetools==5.5.2
requests==2.32.3 requests==2.32.3
urllib3==2.3.0 urllib3==2.3.0
jsonschema==4.23.0 jsonschema==4.23.0
@@ -21,10 +20,10 @@ TA-Lib==0.4.38
technical==1.5.0 technical==1.5.0
tabulate==0.9.0 tabulate==0.9.0
pycoingecko==3.2.0 pycoingecko==3.2.0
jinja2==3.1.5 jinja2==3.1.6
joblib==1.4.2 joblib==1.4.2
rich==13.9.4 rich==13.9.4
pyarrow==19.0.0; platform_machine != 'armv7l' pyarrow==19.0.1; platform_machine != 'armv7l'
# find first, C search in arrays # find first, C search in arrays
py_find_1st==1.1.7 py_find_1st==1.1.7
@@ -38,12 +37,12 @@ orjson==3.10.15
sdnotify==0.3.2 sdnotify==0.3.2
# API Server # API Server
fastapi==0.115.8 fastapi==0.115.11
pydantic==2.10.6 pydantic==2.10.6
uvicorn==0.34.0 uvicorn==0.34.0
pyjwt==2.10.1 pyjwt==2.10.1
aiofiles==24.1.0 aiofiles==24.1.0
psutil==6.1.1 psutil==7.0.0
# Building config files interactively # Building config files interactively
questionary==2.1.0 questionary==2.1.0
@@ -56,7 +55,7 @@ pytz==2025.1
schedule==1.2.2 schedule==1.2.2
#WS Messages #WS Messages
websockets==14.2 websockets==15.0
janus==2.0.0 janus==2.0.0
ast-comments==1.2.2 ast-comments==1.2.2

View File

@@ -1,8 +1,10 @@
import subprocess import subprocess
import time import time
from tests.conftest import is_arm, is_mac
MAXIMUM_STARTUP_TIME = 0.5
MAXIMUM_STARTUP_TIME = 0.7 if is_mac() and not is_arm() else 0.5
def test_startup_time(): def test_startup_time():
@@ -14,4 +16,5 @@ def test_startup_time():
elapsed = time.time() - start elapsed = time.time() - start
assert elapsed < MAXIMUM_STARTUP_TIME, ( assert elapsed < MAXIMUM_STARTUP_TIME, (
"The startup time is too long, try to use lazy import in the command entry function" "The startup time is too long, try to use lazy import in the command entry function"
f" (maximum {MAXIMUM_STARTUP_TIME}s, got {elapsed}s)"
) )

View File

@@ -30,6 +30,7 @@ from freqtrade.data.metrics import (
calculate_max_drawdown, calculate_max_drawdown,
calculate_sharpe, calculate_sharpe,
calculate_sortino, calculate_sortino,
calculate_sqn,
calculate_underwater, calculate_underwater,
combine_dataframes_with_mean, combine_dataframes_with_mean,
combined_dataframes_with_rel_mean, combined_dataframes_with_rel_mean,
@@ -457,6 +458,42 @@ def test_calculate_calmar(testdatadir):
assert pytest.approx(calmar) == 559.040508 assert pytest.approx(calmar) == 559.040508
def test_calculate_sqn(testdatadir):
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
sqn = calculate_sqn(DataFrame(), 0)
assert sqn == 0.0
sqn = calculate_sqn(
bt_data,
0.01,
)
assert isinstance(sqn, float)
assert pytest.approx(sqn) == 3.2991
@pytest.mark.parametrize(
"profits,starting_balance,expected_sqn,description",
[
([1.0, -0.5, 2.0, -1.0, 0.5, 1.5, -0.5, 1.0], 100, 1.3229, "Mixed profits/losses"),
([], 100, 0.0, "Empty dataframe"),
([1.0, 0.5, 2.0, 1.5, 0.8], 100, 4.3657, "All winning trades"),
([-1.0, -0.5, -2.0, -1.5, -0.8], 100, -4.3657, "All losing trades"),
([1.0], 100, -100, "Single trade"),
],
)
def test_calculate_sqn_cases(profits, starting_balance, expected_sqn, description):
"""
Test SQN calculation with various scenarios:
"""
trades = DataFrame({"profit_abs": profits})
sqn = calculate_sqn(trades, starting_balance=starting_balance)
assert isinstance(sqn, float)
assert pytest.approx(sqn, rel=1e-4) == expected_sqn
@pytest.mark.parametrize( @pytest.mark.parametrize(
"start,end,days, expected", "start,end,days, expected",
[ [

View File

@@ -6,6 +6,7 @@ import ccxt
import pandas as pd import pandas as pd
import pytest import pytest
from freqtrade.data.converter.trade_converter import trades_dict_to_list
from freqtrade.enums import CandleType, MarginMode, TradingMode from freqtrade.enums import CandleType, MarginMode, TradingMode
from freqtrade.exceptions import DependencyException, InvalidOrderException, OperationalException from freqtrade.exceptions import DependencyException, InvalidOrderException, OperationalException
from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_seconds from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_seconds
@@ -1002,6 +1003,7 @@ def test_get_maintenance_ratio_and_amt_binance(
async def test__async_get_trade_history_id_binance(default_conf_usdt, mocker, fetch_trades_result): async def test__async_get_trade_history_id_binance(default_conf_usdt, mocker, fetch_trades_result):
default_conf_usdt["exchange"]["only_from_ccxt"] = True
exchange = get_patched_exchange(mocker, default_conf_usdt, exchange="binance") exchange = get_patched_exchange(mocker, default_conf_usdt, exchange="binance")
async def mock_get_trade_hist(pair, *args, **kwargs): async def mock_get_trade_hist(pair, *args, **kwargs):
@@ -1056,3 +1058,53 @@ async def test__async_get_trade_history_id_binance(default_conf_usdt, mocker, fe
# Clean up event loop to avoid warnings # Clean up event loop to avoid warnings
exchange.close() exchange.close()
async def test__async_get_trade_history_id_binance_fast(
default_conf_usdt, mocker, fetch_trades_result
):
default_conf_usdt["exchange"]["only_from_ccxt"] = False
exchange = get_patched_exchange(mocker, default_conf_usdt, exchange="binance")
async def mock_get_trade_hist(pair, *args, **kwargs):
if "since" in kwargs:
pass
# older than initial call
# if kwargs["since"] < 1565798399752:
# return []
# else:
# # Don't expect to get here
# raise ValueError("Unexpected call")
# # return fetch_trades_result[:-2]
elif kwargs.get("params", {}).get(exchange._trades_pagination_arg) == "0":
# Return first 3
return fetch_trades_result[:-2]
# elif kwargs.get("params", {}).get(exchange._trades_pagination_arg) in (
# fetch_trades_result[-3]["id"],
# 1565798399752,
# ):
# # Return 2
# return fetch_trades_result[-3:-1]
# else:
# # Return last 2
# return fetch_trades_result[-2:]
pair = "ETH/BTC"
mocker.patch(
"freqtrade.exchange.binance.download_archive_trades",
return_value=(pair, trades_dict_to_list(fetch_trades_result[-2:])),
)
exchange._api_async.fetch_trades = MagicMock(side_effect=mock_get_trade_hist)
ret = await exchange._async_get_trade_history(
pair,
since=fetch_trades_result[0]["timestamp"],
until=fetch_trades_result[-1]["timestamp"] - 1,
)
assert ret[0] == pair
assert isinstance(ret[1], list)
# Clean up event loop to avoid warnings
exchange.close()

View File

@@ -14,11 +14,15 @@ from freqtrade.enums import CandleType
from freqtrade.exchange.binance_public_data import ( from freqtrade.exchange.binance_public_data import (
BadHttpStatus, BadHttpStatus,
Http404, Http404,
binance_vision_trades_zip_url,
binance_vision_zip_name, binance_vision_zip_name,
download_archive_ohlcv, download_archive_ohlcv,
download_archive_trades,
get_daily_ohlcv, get_daily_ohlcv,
get_daily_trades,
) )
from freqtrade.util.datetime_helpers import dt_ts, dt_utc from freqtrade.util.datetime_helpers import dt_ts, dt_utc
from ft_client.test_client.test_rest_client import log_has_re
@pytest.fixture(scope="module") @pytest.fixture(scope="module")
@@ -337,3 +341,156 @@ async def test_get_daily_ohlcv(mocker, testdatadir):
with pytest.raises(zipfile.BadZipFile): with pytest.raises(zipfile.BadZipFile):
df = await get_daily_ohlcv(symbol, timeframe, CandleType.SPOT, date, session) df = await get_daily_ohlcv(symbol, timeframe, CandleType.SPOT, date, session)
assert get.call_count == 4 # 1 + 3 default retries assert get.call_count == 4 # 1 + 3 default retries
async def test_download_archive_trades(mocker, caplog):
pair = "BTC/USDT"
since_ms = dt_ts(dt_utc(2020, 1, 1))
until_ms = dt_ts(dt_utc(2020, 1, 2))
markets = {"BTC/USDT": {"id": "BTCUSDT"}, "BTC/USDT:USDT": {"id": "BTCUSDT"}}
mocker.patch("freqtrade.exchange.binance_public_data.get_daily_trades", return_value=[[2, 3]])
pair1, res = await download_archive_trades(
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
)
assert pair1 == pair
assert res == [[2, 3], [2, 3]]
mocker.patch(
"freqtrade.exchange.binance_public_data.get_daily_trades",
side_effect=Http404("xxx", dt_utc(2020, 1, 1), "http://example.com/something"),
)
pair1, res = await download_archive_trades(
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
)
assert pair1 == pair
assert res == []
# exit on day 1
assert log_has_re("Fast download is unavailable", caplog)
# Test fail on day 2
caplog.clear()
mocker.patch(
"freqtrade.exchange.binance_public_data.get_daily_trades",
side_effect=[
[[2, 3]],
[[2, 3]],
Http404("xxx", dt_utc(2020, 1, 2), "http://example.com/something"),
[[2, 3]],
],
)
# Download 3 days
until_ms = dt_ts(dt_utc(2020, 1, 3))
pair1, res = await download_archive_trades(
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
)
assert pair1 == pair
assert res == [[2, 3], [2, 3]]
assert log_has_re(r"Binance fast download .*stopped", caplog)
async def test_download_archive_trades_exception(mocker, caplog):
pair = "BTC/USDT"
since_ms = dt_ts(dt_utc(2020, 1, 1))
until_ms = dt_ts(dt_utc(2020, 1, 2))
markets = {"BTC/USDT": {"id": "BTCUSDT"}, "BTC/USDT:USDT": {"id": "BTCUSDT"}}
mocker.patch(
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get", side_effect=RuntimeError
)
pair1, res = await download_archive_trades(
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
)
assert pair1 == pair
assert res == []
mocker.patch(
"freqtrade.exchange.binance_public_data._download_archive_trades", side_effect=RuntimeError
)
await download_archive_trades(
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
)
assert pair1 == pair
assert res == []
assert log_has_re("An exception occurred during fast trades download", caplog)
async def test_binance_vision_trades_zip_url():
url = binance_vision_trades_zip_url("BTCUSDT", CandleType.SPOT, dt_utc(2023, 10, 27))
assert (
url == "https://data.binance.vision/data/spot/daily/aggTrades/"
"BTCUSDT/BTCUSDT-aggTrades-2023-10-27.zip"
)
url = binance_vision_trades_zip_url("BTCUSDT", CandleType.FUTURES, dt_utc(2023, 10, 28))
assert (
url == "https://data.binance.vision/data/futures/um/daily/aggTrades/"
"BTCUSDT/BTCUSDT-aggTrades-2023-10-28.zip"
)
async def test_get_daily_trades(mocker, testdatadir):
symbol = "PEPEUSDT"
symbol_futures = "APEUSDT"
date = dt_utc(2024, 10, 28).date()
first_date = 1729987202368
last_date = 1730073596350
async with aiohttp.ClientSession() as session:
spot_path = (
testdatadir / "binance/binance_public_data/spot-PEPEUSDT-aggTrades-2024-10-27.zip"
)
get = mocker.patch(
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
return_value=MockResponse(spot_path.read_bytes(), 200),
)
res = await get_daily_trades(symbol, CandleType.SPOT, date, session)
assert get.call_count == 1
assert res[0][0] == first_date
assert res[-1][0] == last_date
futures_path = (
testdatadir / "binance/binance_public_data/futures-APEUSDT-aggTrades-2024-10-18.zip"
)
get = mocker.patch(
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
return_value=MockResponse(futures_path.read_bytes(), 200),
)
res_fut = await get_daily_trades(symbol_futures, CandleType.FUTURES, date, session)
assert get.call_count == 1
assert res_fut[0][0] == 1729209603958
assert res_fut[-1][0] == 1729295981272
get = mocker.patch(
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
return_value=MockResponse(b"", 404),
)
with pytest.raises(Http404):
await get_daily_trades(symbol, CandleType.SPOT, date, session, retry_delay=0)
assert get.call_count == 1
get = mocker.patch(
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
return_value=MockResponse(b"", 500),
)
mocker.patch("asyncio.sleep")
with pytest.raises(BadHttpStatus):
await get_daily_trades(symbol, CandleType.SPOT, date, session)
assert get.call_count == 4 # 1 + 3 default retries
get = mocker.patch(
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
return_value=MockResponse(b"nop", 200),
)
with pytest.raises(zipfile.BadZipFile):
await get_daily_trades(symbol, CandleType.SPOT, date, session)
assert get.call_count == 4 # 1 + 3 default retries

View File

@@ -2177,13 +2177,11 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_
caplog.clear() caplog.clear()
async def mock_get_candle_hist_error(pair, *args, **kwargs): exchange._async_get_candle_history = get_mock_coro(side_effect=TimeoutError())
raise TimeoutError() with pytest.raises(TimeoutError):
exchange.get_historic_ohlcv(
exchange._async_get_candle_history = MagicMock(side_effect=mock_get_candle_hist_error) pair, "5m", dt_ts(dt_now() - timedelta(seconds=since)), candle_type=candle_type
ret = exchange.get_historic_ohlcv( )
pair, "5m", dt_ts(dt_now() - timedelta(seconds=since)), candle_type=candle_type
)
assert log_has_re(r"Async code raised an exception: .*", caplog) assert log_has_re(r"Async code raised an exception: .*", caplog)
@@ -2373,6 +2371,8 @@ def test_refresh_latest_trades(
caplog.set_level(logging.DEBUG) caplog.set_level(logging.DEBUG)
use_trades_conf = default_conf use_trades_conf = default_conf
use_trades_conf["exchange"]["use_public_trades"] = True use_trades_conf["exchange"]["use_public_trades"] = True
use_trades_conf["exchange"]["only_from_ccxt"] = True
use_trades_conf["datadir"] = tmp_path use_trades_conf["datadir"] = tmp_path
use_trades_conf["orderflow"] = {"max_candles": 1500} use_trades_conf["orderflow"] = {"max_candles": 1500}
exchange = get_patched_exchange(mocker, use_trades_conf) exchange = get_patched_exchange(mocker, use_trades_conf)
@@ -3365,6 +3365,7 @@ async def test__async_fetch_trades_contract_size(
async def test__async_get_trade_history_id( async def test__async_get_trade_history_id(
default_conf, mocker, exchange_name, fetch_trades_result default_conf, mocker, exchange_name, fetch_trades_result
): ):
default_conf["exchange"]["only_from_ccxt"] = True
exchange = get_patched_exchange(mocker, default_conf, exchange=exchange_name) exchange = get_patched_exchange(mocker, default_conf, exchange=exchange_name)
if exchange._trades_pagination != "id": if exchange._trades_pagination != "id":
exchange.close() exchange.close()

View File

@@ -1,5 +1,6 @@
# pragma pylint: disable=missing-docstring, protected-access, invalid-name # pragma pylint: disable=missing-docstring, protected-access, invalid-name
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from math import isnan, nan
import pytest import pytest
from ccxt import ( from ccxt import (
@@ -321,6 +322,7 @@ def test_amount_to_precision(
(2.9977, TICK_SIZE, 0.005, 3.0, ROUND), (2.9977, TICK_SIZE, 0.005, 3.0, ROUND),
(234.24, TICK_SIZE, 0.5, 234.0, ROUND), (234.24, TICK_SIZE, 0.5, 234.0, ROUND),
(234.26, TICK_SIZE, 0.5, 234.5, ROUND), (234.26, TICK_SIZE, 0.5, 234.5, ROUND),
(nan, TICK_SIZE, 3, nan, ROUND),
# Tests for TRUNCATTE # Tests for TRUNCATTE
(2.34559, DECIMAL_PLACES, 4, 2.3455, TRUNCATE), (2.34559, DECIMAL_PLACES, 4, 2.3455, TRUNCATE),
(2.34559, DECIMAL_PLACES, 5, 2.34559, TRUNCATE), (2.34559, DECIMAL_PLACES, 5, 2.34559, TRUNCATE),
@@ -359,10 +361,11 @@ def test_amount_to_precision(
], ],
) )
def test_price_to_precision(price, precision_mode, precision, expected, rounding_mode): def test_price_to_precision(price, precision_mode, precision, expected, rounding_mode):
assert ( result = price_to_precision(price, precision, precision_mode, rounding_mode=rounding_mode)
price_to_precision(price, precision, precision_mode, rounding_mode=rounding_mode) if not isnan(expected):
== expected assert result == expected
) else:
assert isnan(result)
@pytest.mark.parametrize( @pytest.mark.parametrize(

View File

@@ -150,7 +150,9 @@ def test_get_pair_data_for_features_with_prealoaded_data(mocker, freqai_conf):
freqai.dd.load_all_pair_histories(timerange, freqai.dk) freqai.dd.load_all_pair_histories(timerange, freqai.dk)
_, base_df = freqai.dd.get_base_and_corr_dataframes(timerange, "LTC/BTC", freqai.dk) _, base_df = freqai.dd.get_base_and_corr_dataframes(timerange, "LTC/BTC", freqai.dk)
df = freqai.dk.get_pair_data_for_features("LTC/BTC", "5m", strategy, base_dataframes=base_df) df = freqai.dk.get_pair_data_for_features(
"LTC/BTC", "5m", strategy, {}, base_dataframes=base_df
)
assert df is base_df["5m"] assert df is base_df["5m"]
assert not df.empty assert not df.empty
@@ -170,7 +172,9 @@ def test_get_pair_data_for_features_without_preloaded_data(mocker, freqai_conf):
freqai.dd.load_all_pair_histories(timerange, freqai.dk) freqai.dd.load_all_pair_histories(timerange, freqai.dk)
base_df = {"5m": pd.DataFrame()} base_df = {"5m": pd.DataFrame()}
df = freqai.dk.get_pair_data_for_features("LTC/BTC", "5m", strategy, base_dataframes=base_df) df = freqai.dk.get_pair_data_for_features(
"LTC/BTC", "5m", strategy, {}, base_dataframes=base_df
)
assert df is not base_df["5m"] assert df is not base_df["5m"]
assert not df.empty assert not df.empty

View File

@@ -701,9 +701,9 @@ def test_process_trade_creation(
assert pytest.approx(trade.amount) == 0 assert pytest.approx(trade.amount) == 0
assert pytest.approx(trade.amount_requested) == 60 / ticker_usdt.return_value[ticker_side] assert pytest.approx(trade.amount_requested) == 60 / ticker_usdt.return_value[ticker_side]
assert log_has( assert log_has_re(
f"{'Short' if is_short else 'Long'} signal found: about create a new trade for ETH/USDT " f"{'Short' if is_short else 'Long'} signal found: about create a new trade for ETH/USDT "
"with stake_amount: 60.0 ...", r"with stake_amount: 60.0 and price: .*",
caplog, caplog,
) )
mocker.patch("freqtrade.freqtradebot.FreqtradeBot._check_and_execute_exit") mocker.patch("freqtrade.freqtradebot.FreqtradeBot._check_and_execute_exit")
@@ -3743,8 +3743,9 @@ def test_trailing_stop_loss_positive(
@pytest.mark.parametrize("is_short", [False, True]) @pytest.mark.parametrize("is_short", [False, True])
def test_disable_ignore_roi_if_entry_signal( def test_disable_ignore_roi_if_entry_signal(
default_conf_usdt, limit_order, limit_order_open, is_short, fee, mocker default_conf_usdt, limit_order, limit_order_open, is_short, fee, mocker, time_machine
) -> None: ) -> None:
time_machine.move_to("2025-01-10 08:00:16 +00:00")
patch_RPCManager(mocker) patch_RPCManager(mocker)
patch_exchange(mocker) patch_exchange(mocker)
eside = entry_side(is_short) eside = entry_side(is_short)
@@ -3773,6 +3774,13 @@ def test_disable_ignore_roi_if_entry_signal(
patch_get_signal(freqtrade, enter_long=not is_short, enter_short=is_short, exit_short=is_short) patch_get_signal(freqtrade, enter_long=not is_short, enter_short=is_short, exit_short=is_short)
assert freqtrade.handle_trade(trade) is True assert freqtrade.handle_trade(trade) is True
# Test if entry-signal is absent
patch_get_signal(freqtrade)
# Signal was evaluated already - no action.
assert freqtrade.handle_trade(trade) is False
# Move to after the candle expired
time_machine.shift(timedelta(hours=5))
# Test if entry-signal is absent # Test if entry-signal is absent
patch_get_signal(freqtrade) patch_get_signal(freqtrade)
assert freqtrade.handle_trade(trade) is True assert freqtrade.handle_trade(trade) is True

View File

@@ -436,6 +436,7 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
# Replace new order with diff. order at a lower price # Replace new order with diff. order at a lower price
freqtrade.strategy.adjust_entry_price = MagicMock(return_value=1.95) freqtrade.strategy.adjust_entry_price = MagicMock(return_value=1.95)
freqtrade.strategy.adjust_exit_price = MagicMock(side_effect=ValueError)
freqtrade.strategy.adjust_trade_position = MagicMock(return_value=None) freqtrade.strategy.adjust_trade_position = MagicMock(return_value=None)
freqtrade.process() freqtrade.process()
trade = Trade.get_trades().first() trade = Trade.get_trades().first()
@@ -445,6 +446,8 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
assert pytest.approx(trade.stake_amount) == 60 assert pytest.approx(trade.stake_amount) == 60
assert trade.orders[-1].price == 1.95 assert trade.orders[-1].price == 1.95
assert pytest.approx(trade.orders[-1].cost) == 120 * leverage assert pytest.approx(trade.orders[-1].cost) == 120 * leverage
assert freqtrade.strategy.adjust_entry_price.call_count == 1
assert freqtrade.strategy.adjust_exit_price.call_count == 0
# Fill DCA order # Fill DCA order
freqtrade.strategy.adjust_trade_position = MagicMock(return_value=None) freqtrade.strategy.adjust_trade_position = MagicMock(return_value=None)
@@ -469,6 +472,7 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
mocker.patch(f"{EXMS}._dry_is_price_crossed", return_value=False) mocker.patch(f"{EXMS}._dry_is_price_crossed", return_value=False)
freqtrade.strategy.custom_exit = MagicMock(return_value="Exit now") freqtrade.strategy.custom_exit = MagicMock(return_value="Exit now")
freqtrade.strategy.adjust_entry_price = MagicMock(return_value=2.02) freqtrade.strategy.adjust_entry_price = MagicMock(return_value=2.02)
freqtrade.strategy.adjust_exit_price = MagicMock(side_effect=ValueError)
freqtrade.process() freqtrade.process()
trade = Trade.get_trades().first() trade = Trade.get_trades().first()
assert len(trade.orders) == 5 assert len(trade.orders) == 5
@@ -478,8 +482,9 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
assert pytest.approx(trade.amount) == 91.689215 * leverage assert pytest.approx(trade.amount) == 91.689215 * leverage
assert pytest.approx(trade.orders[-1].amount) == 91.689215 * leverage assert pytest.approx(trade.orders[-1].amount) == 91.689215 * leverage
assert freqtrade.strategy.adjust_entry_price.call_count == 0 assert freqtrade.strategy.adjust_entry_price.call_count == 0
assert freqtrade.strategy.adjust_exit_price.call_count == 0
# Process again, should not adjust entry price # Process again, should not adjust price
freqtrade.process() freqtrade.process()
trade = Trade.get_trades().first() trade = Trade.get_trades().first()
@@ -490,6 +495,21 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
assert trade.orders[-1].price == 2.02 assert trade.orders[-1].price == 2.02
# Adjust entry price cannot be called - this is an exit order # Adjust entry price cannot be called - this is an exit order
assert freqtrade.strategy.adjust_entry_price.call_count == 0 assert freqtrade.strategy.adjust_entry_price.call_count == 0
assert freqtrade.strategy.adjust_exit_price.call_count == 1
freqtrade.strategy.adjust_exit_price = MagicMock(return_value=2.03)
# Process again, should adjust exit price
freqtrade.process()
trade = Trade.get_trades().first()
assert trade.orders[-2].status == "canceled"
assert len(trade.orders) == 6
assert trade.orders[-1].side == trade.exit_side
assert trade.orders[-1].status == "open"
assert trade.orders[-1].price == 2.03
assert freqtrade.strategy.adjust_entry_price.call_count == 0
assert freqtrade.strategy.adjust_exit_price.call_count == 1
@pytest.mark.parametrize("leverage", [1, 2]) @pytest.mark.parametrize("leverage", [1, 2])

View File

@@ -45,6 +45,7 @@ class BTContainer(NamedTuple):
leverage: float = 1.0 leverage: float = 1.0
timeout: int | None = None timeout: int | None = None
adjust_entry_price: float | None = None adjust_entry_price: float | None = None
adjust_exit_price: float | None = None
adjust_trade_position: list[float] | None = None adjust_trade_position: list[float] | None = None

View File

@@ -1217,6 +1217,46 @@ tc57 = BTContainer(
], ],
) )
# Test 58: Custom-exit-price short - below all candles
tc58 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
[0, 5000, 5050, 4950, 5000, 6172, 0, 0, 1, 0],
[1, 5000, 5200, 4951, 5000, 6172, 0, 0, 0, 0], # enter trade (signal on last candle)
[2, 4900, 5250, 4900, 5100, 6172, 0, 0, 0, 1], # Exit - delayed
[3, 5100, 5100, 4650, 4750, 6172, 0, 0, 0, 0], #
[4, 4750, 5100, 4350, 4750, 6172, 0, 0, 0, 0],
],
stop_loss=-0.10,
roi={"0": 1.00},
profit_perc=-0.01,
use_exit_signal=True,
timeout=1000,
custom_exit_price=4300,
adjust_exit_price=5050,
trades=[BTrade(exit_reason=ExitType.EXIT_SIGNAL, open_tick=1, close_tick=4, is_short=True)],
)
# Test 59: Custom-exit-price above all candles - readjust order
tc59 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
[0, 5000, 5050, 4950, 5000, 6172, 1, 0],
[1, 5000, 5500, 4951, 5000, 6172, 0, 0],
[2, 4900, 5250, 4500, 5100, 6172, 0, 1], # exit
[3, 5100, 5100, 4650, 4750, 6172, 0, 0], # order readjust
[4, 4750, 4950, 4350, 4750, 6172, 0, 0],
],
stop_loss=-0.2,
roi={"0": 0.10},
profit_perc=-0.02,
use_exit_signal=True,
timeout=1000,
custom_exit_price=5300,
adjust_exit_price=4900,
trades=[BTrade(exit_reason=ExitType.EXIT_SIGNAL, open_tick=1, close_tick=4, is_short=False)],
)
TESTS = [ TESTS = [
tc0, tc0,
@@ -1277,6 +1317,8 @@ TESTS = [
tc55, tc55,
tc56, tc56,
tc57, tc57,
tc58,
tc59,
] ]
@@ -1330,6 +1372,8 @@ def test_backtest_results(default_conf, mocker, caplog, data: BTContainer) -> No
) )
if data.adjust_entry_price: if data.adjust_entry_price:
backtesting.strategy.adjust_entry_price = MagicMock(return_value=data.adjust_entry_price) backtesting.strategy.adjust_entry_price = MagicMock(return_value=data.adjust_entry_price)
if data.adjust_exit_price:
backtesting.strategy.adjust_exit_price = MagicMock(return_value=data.adjust_exit_price)
backtesting.strategy.use_custom_stoploss = data.use_custom_stoploss backtesting.strategy.use_custom_stoploss = data.use_custom_stoploss
backtesting.strategy.leverage = lambda **kwargs: data.leverage backtesting.strategy.leverage = lambda **kwargs: data.leverage

View File

@@ -162,7 +162,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
backtesting.strategy.adjust_trade_position = MagicMock(return_value=None) backtesting.strategy.adjust_trade_position = MagicMock(return_value=None)
assert pytest.approx(trade.liquidation_price) == (0.10278333 if leverage == 1 else 1.2122249) assert pytest.approx(trade.liquidation_price) == (0.10278333 if leverage == 1 else 1.2122249)
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_enter, current_time) trade = backtesting._check_adjust_trade_for_candle(trade, row_enter, current_time)
assert trade assert trade
assert pytest.approx(trade.stake_amount) == 100.0 assert pytest.approx(trade.stake_amount) == 100.0
assert pytest.approx(trade.amount) == 47.61904762 * leverage assert pytest.approx(trade.amount) == 47.61904762 * leverage
@@ -170,7 +170,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
# Increase position by 100 # Increase position by 100
backtesting.strategy.adjust_trade_position = MagicMock(return_value=(100, "PartIncrease")) backtesting.strategy.adjust_trade_position = MagicMock(return_value=(100, "PartIncrease"))
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_enter, current_time) trade = backtesting._check_adjust_trade_for_candle(trade, row_enter, current_time)
liq_price = 0.1038916 if leverage == 1 else 1.2127791 liq_price = 0.1038916 if leverage == 1 else 1.2127791
assert trade assert trade
@@ -184,7 +184,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
backtesting.strategy.adjust_trade_position = MagicMock(return_value=-500) backtesting.strategy.adjust_trade_position = MagicMock(return_value=-500)
current_time = row_exit[0].to_pydatetime() current_time = row_exit[0].to_pydatetime()
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_exit, current_time) trade = backtesting._check_adjust_trade_for_candle(trade, row_exit, current_time)
assert trade assert trade
assert pytest.approx(trade.stake_amount) == 200.0 assert pytest.approx(trade.stake_amount) == 200.0
@@ -195,7 +195,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
# Reduce position by 50 # Reduce position by 50
backtesting.strategy.adjust_trade_position = MagicMock(return_value=(-100, "partDecrease")) backtesting.strategy.adjust_trade_position = MagicMock(return_value=(-100, "partDecrease"))
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_exit, current_time) trade = backtesting._check_adjust_trade_for_candle(trade, row_exit, current_time)
assert trade assert trade
assert pytest.approx(trade.stake_amount) == 100.0 assert pytest.approx(trade.stake_amount) == 100.0
@@ -208,7 +208,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
# Adjust below minimum # Adjust below minimum
backtesting.strategy.adjust_trade_position = MagicMock(return_value=-99) backtesting.strategy.adjust_trade_position = MagicMock(return_value=-99)
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_exit, current_time) trade = backtesting._check_adjust_trade_for_candle(trade, row_exit, current_time)
assert trade assert trade
assert pytest.approx(trade.stake_amount) == 100.0 assert pytest.approx(trade.stake_amount) == 100.0
@@ -220,5 +220,5 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
# Adjust to close trade # Adjust to close trade
backtesting.strategy.adjust_trade_position = MagicMock(return_value=-trade.stake_amount) backtesting.strategy.adjust_trade_position = MagicMock(return_value=-trade.stake_amount)
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_exit, current_time) trade = backtesting._check_adjust_trade_for_candle(trade, row_exit, current_time)
assert trade.is_open is False assert trade.is_open is False

View File

@@ -276,3 +276,67 @@ def test_trade_serialize_load_back(fee):
trade3 = LocalTrade.from_json(trade_string) trade3 = LocalTrade.from_json(trade_string)
assert len(trade3.orders) == len(t.orders) assert len(trade3.orders) == len(t.orders)
@pytest.mark.usefixtures("init_persistence")
def test_trade_fromjson_backtesting():
"""
trade.from_json should be able to load a trade output via backtesting.
"""
trade_string = """
{
"pair":"XRP/USDT:USDT",
"stake_amount":3015.294001,
"max_stake_amount":60305.88002,
"amount":94612.3,
"open_date":"2024-03-02 10:40:00+00:00",
"close_date":"2024-03-02 11:10:00+00:00",
"open_rate":0.6374,
"close_rate":0.6399,
"fee_open":0.0005,
"fee_close":0.0005,
"trade_duration":30,
"profit_ratio":-0.09853216535933962,
"profit_abs":-296.95489539,
"exit_reason":"trailing_stop_loss",
"initial_stop_loss_abs":0.6689,
"initial_stop_loss_ratio":-0.99,
"stop_loss_abs":0.6399,
"stop_loss_ratio":-0.3368287257705749,
"min_rate":0.6294,
"max_rate":0.6421,
"is_open":false,
"enter_tag":"[0.6373, 0.5993, 0.64]",
"leverage":20,
"is_short":true,
"open_timestamp":1709376000000,
"close_timestamp":1709377800000,
"orders":[
{
"amount":94612.3,
"safe_price":0.6374,
"ft_order_side":"sell",
"order_filled_timestamp":1709376000000,
"ft_is_entry":true,
"ft_order_tag":"[0.6373, 0.5993, 0.64]",
"cost":60336.032960009994
},
{
"amount":94612.3,
"safe_price":0.6399,
"ft_order_side":"buy",
"order_filled_timestamp":1709377800000,
"ft_is_entry":false,
"ft_order_tag":"trailing_stop_loss",
"cost":60572.681975385
}
]
}
"""
trade = Trade.from_json(trade_string)
Trade.session.add(trade)
Trade.commit()
# Trade-id not given - use first available
assert trade.id == 1

View File

@@ -577,7 +577,7 @@ def test_rpc_balance_handle(default_conf_usdt, mocker, tickers, proxy_coin, marg
"symbol": "ETH/USDT:USDT", "symbol": "ETH/USDT:USDT",
"timestamp": None, "timestamp": None,
"datetime": None, "datetime": None,
"initialMargin": 0.0, "initialMargin": 20,
"initialMarginPercentage": None, "initialMarginPercentage": None,
"maintenanceMargin": 0.0, "maintenanceMargin": 0.0,
"maintenanceMarginPercentage": 0.005, "maintenanceMarginPercentage": 0.005,
@@ -590,8 +590,9 @@ def test_rpc_balance_handle(default_conf_usdt, mocker, tickers, proxy_coin, marg
"marginRatio": None, "marginRatio": None,
"liquidationPrice": 0.0, "liquidationPrice": 0.0,
"markPrice": 2896.41, "markPrice": 2896.41,
"collateral": 20, # Collateral is in USDT - and can be higher than position size in cross mode
"marginType": "isolated", "collateral": 50,
"marginType": "cross",
"side": "short", "side": "short",
"percentage": None, "percentage": None,
} }

View File

@@ -21,10 +21,12 @@ class TestStrategyNoImplementSell(TestStrategyNoImplements):
return super().populate_entry_trend(dataframe, metadata) return super().populate_entry_trend(dataframe, metadata)
class TestStrategyImplementCustomSell(TestStrategyNoImplementSell): class TestStrategyImplementEmptyWorking(TestStrategyNoImplementSell):
def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame: def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
return super().populate_exit_trend(dataframe, metadata) return super().populate_exit_trend(dataframe, metadata)
class TestStrategyImplementCustomSell(TestStrategyImplementEmptyWorking):
def custom_sell( def custom_sell(
self, self,
pair: str, pair: str,
@@ -55,3 +57,34 @@ class TestStrategyImplementSellTimeout(TestStrategyNoImplementSell):
self, pair: str, trade, order: Order, current_time: datetime, **kwargs self, pair: str, trade, order: Order, current_time: datetime, **kwargs
) -> bool: ) -> bool:
return False return False
class TestStrategyAdjustOrderPrice(TestStrategyImplementEmptyWorking):
def adjust_entry_price(
self,
trade,
order,
pair,
current_time,
proposed_rate,
current_order_rate,
entry_tag,
side,
**kwargs,
):
return proposed_rate
def adjust_order_price(
self,
trade,
order,
pair,
current_time,
proposed_rate,
current_order_rate,
entry_tag,
side,
is_entry,
**kwargs,
):
return proposed_rate

View File

@@ -460,6 +460,10 @@ def test_missing_implements(default_conf, caplog):
): ):
StrategyResolver.load_strategy(default_conf) StrategyResolver.load_strategy(default_conf)
default_conf["strategy"] = "TestStrategyAdjustOrderPrice"
with pytest.raises(OperationalException, match=r"If you implement `adjust_order_price`.*"):
StrategyResolver.load_strategy(default_conf)
def test_call_deprecated_function(default_conf): def test_call_deprecated_function(default_conf):
default_location = Path(__file__).parent / "strats/broken_strats/" default_location = Path(__file__).parent / "strats/broken_strats/"