mirror of
https://github.com/freqtrade/freqtrade.git
synced 2025-12-17 13:21:15 +00:00
Merge branch 'develop' into api-server-list-custom-data
This commit is contained in:
@@ -16,6 +16,8 @@ jobs:
|
||||
name: develop
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
@@ -42,6 +44,7 @@ jobs:
|
||||
branch: update/binance-leverage-tiers
|
||||
title: Update Binance Leverage Tiers
|
||||
commit-message: "chore: update pre-commit hooks"
|
||||
committer: Freqtrade Bot <noreply@github.com>
|
||||
committer: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
|
||||
author: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
|
||||
body: Update binance leverage tiers.
|
||||
delete-branch: true
|
||||
|
||||
28
.github/workflows/ci.yml
vendored
28
.github/workflows/ci.yml
vendored
@@ -29,6 +29,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -157,6 +159,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -281,6 +285,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -366,6 +372,8 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -381,6 +389,8 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
@@ -391,6 +401,8 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Documentation syntax
|
||||
run: |
|
||||
@@ -420,6 +432,8 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -506,6 +520,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -551,6 +567,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download artifact 📦
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -578,6 +596,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download artifact 📦
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -598,6 +618,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -633,7 +655,9 @@ jobs:
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Available platforms
|
||||
run: echo ${{ steps.buildx.outputs.platforms }}
|
||||
run: echo ${PLATFORMS}
|
||||
env:
|
||||
PLATFORMS: ${{ steps.buildx.outputs.platforms }}
|
||||
|
||||
- name: Build and test and push docker images
|
||||
env:
|
||||
@@ -652,6 +676,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Extract branch name
|
||||
id: extract-branch
|
||||
|
||||
10
.github/workflows/deploy-docs.yml
vendored
10
.github/workflows/deploy-docs.yml
vendored
@@ -20,6 +20,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: true
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -43,12 +45,16 @@ jobs:
|
||||
- name: Build and push Mike
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
run: |
|
||||
mike deploy ${{ github.ref_name }} latest --push --update-aliases
|
||||
mike deploy ${REF_NAME} latest --push --update-aliases
|
||||
env:
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
|
||||
- name: Build and push Mike - Release
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
run: |
|
||||
mike deploy ${{ github.ref_name }} stable --push --update-aliases
|
||||
mike deploy ${REF_NAME} stable --push --update-aliases
|
||||
env:
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
|
||||
- name: Show mike versions
|
||||
run: |
|
||||
|
||||
39
.github/workflows/devcontainer-build.yml
vendored
39
.github/workflows/devcontainer-build.yml
vendored
@@ -17,29 +17,26 @@ concurrency:
|
||||
group: "${{ github.workflow }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
permissions:
|
||||
packages: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
id: checkout
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Pre-build dev container image
|
||||
uses: devcontainers/ci@v0.3
|
||||
with:
|
||||
subFolder: .github
|
||||
imageName: ghcr.io/${{ github.repository }}-devcontainer
|
||||
cacheFrom: ghcr.io/${{ github.repository }}-devcontainer
|
||||
push: always
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Pre-build dev container image
|
||||
uses: devcontainers/ci@v0.3
|
||||
with:
|
||||
subFolder: .github
|
||||
imageName: ghcr.io/${{ github.repository }}-devcontainer
|
||||
cacheFrom: ghcr.io/${{ github.repository }}-devcontainer
|
||||
push: always
|
||||
|
||||
5
.github/workflows/docker-update-readme.yml
vendored
5
.github/workflows/docker-update-readme.yml
vendored
@@ -4,11 +4,16 @@ on:
|
||||
branches:
|
||||
- stable
|
||||
|
||||
# disable permissions for all of the available permissions
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
dockerHubDescription:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Docker Hub Description
|
||||
uses: peter-evans/dockerhub-description@v4
|
||||
|
||||
5
.github/workflows/pre-commit-update.yml
vendored
5
.github/workflows/pre-commit-update.yml
vendored
@@ -14,6 +14,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
@@ -36,6 +38,7 @@ jobs:
|
||||
branch: update/pre-commit-hooks
|
||||
title: Update pre-commit hooks
|
||||
commit-message: "chore: update pre-commit hooks"
|
||||
committer: Freqtrade Bot <noreply@github.com>
|
||||
committer: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
|
||||
author: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
|
||||
body: Update versions of pre-commit hooks to latest version.
|
||||
delete-branch: true
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: "7.1.1"
|
||||
rev: "7.1.2"
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [Flake8-pyproject]
|
||||
@@ -16,14 +16,14 @@ repos:
|
||||
additional_dependencies:
|
||||
- types-cachetools==5.5.0.20240820
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.32.0.20241016
|
||||
- types-requests==2.32.0.20250301
|
||||
- types-tabulate==0.9.0.20241207
|
||||
- types-python-dateutil==2.9.0.20241206
|
||||
- SQLAlchemy==2.0.38
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: "6.0.0"
|
||||
rev: "6.0.1"
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort (python)
|
||||
@@ -31,7 +31,7 @@ repos:
|
||||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.9.6'
|
||||
rev: 'v0.9.9'
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
@@ -57,7 +57,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/stefmolin/exif-stripper
|
||||
rev: 0.6.1
|
||||
rev: 0.6.2
|
||||
hooks:
|
||||
- id: strip-exif
|
||||
|
||||
@@ -67,3 +67,9 @@ repos:
|
||||
- id: codespell
|
||||
additional_dependencies:
|
||||
- tomli
|
||||
|
||||
# Ensure github actions remain safe
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12.8-slim-bookworm as base
|
||||
FROM python:3.12.9-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
Binary file not shown.
@@ -34,7 +34,7 @@ COPY build_helpers/* /tmp/
|
||||
# Install dependencies
|
||||
COPY --chown=ftuser:ftuser requirements.txt /freqtrade/
|
||||
USER ftuser
|
||||
RUN pip install --user --no-cache-dir numpy \
|
||||
RUN pip install --user --no-cache-dir "numpy<2" \
|
||||
&& pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib \
|
||||
&& pip install --user --no-cache-dir -r requirements.txt
|
||||
|
||||
|
||||
@@ -315,6 +315,7 @@ It contains some useful key metrics about performance of your strategy on backte
|
||||
| Sortino | 1.88 |
|
||||
| Sharpe | 2.97 |
|
||||
| Calmar | 6.29 |
|
||||
| SQN | 2.45 |
|
||||
| Profit factor | 1.11 |
|
||||
| Expectancy (Ratio) | -0.15 (-0.05) |
|
||||
| Avg. stake amount | 0.001 BTC |
|
||||
@@ -368,6 +369,7 @@ It contains some useful key metrics about performance of your strategy on backte
|
||||
- `Sortino`: Annualized Sortino ratio.
|
||||
- `Sharpe`: Annualized Sharpe ratio.
|
||||
- `Calmar`: Annualized Calmar ratio.
|
||||
- `SQN`: System Quality Number (SQN) - by Van Tharp.
|
||||
- `Profit factor`: profit / loss.
|
||||
- `Avg. stake amount`: Average stake amount, either `stake_amount` or the average when using dynamic stake amount.
|
||||
- `Total trade volume`: Volume generated on the exchange to reach the above profit.
|
||||
|
||||
@@ -54,11 +54,13 @@ By default, the bot loop runs every few seconds (`internals.process_throttle_sec
|
||||
* Check timeouts for open orders.
|
||||
* Calls `check_entry_timeout()` strategy callback for open entry orders.
|
||||
* Calls `check_exit_timeout()` strategy callback for open exit orders.
|
||||
* Calls `adjust_entry_price()` strategy callback for open entry orders.
|
||||
* Calls `adjust_order_price()` strategy callback for open orders.
|
||||
* Calls `adjust_entry_price()` strategy callback for open entry orders. *only called when `adjust_order_price()` is not implemented*
|
||||
* Calls `adjust_exit_price()` strategy callback for open exit orders. *only called when `adjust_order_price()` is not implemented*
|
||||
* Verifies existing positions and eventually places exit orders.
|
||||
* Considers stoploss, ROI and exit-signal, `custom_exit()` and `custom_stoploss()`.
|
||||
* Determine exit-price based on `exit_pricing` configuration setting or by using the `custom_exit_price()` callback.
|
||||
* Before a exit order is placed, `confirm_trade_exit()` strategy callback is called.
|
||||
* Before an exit order is placed, `confirm_trade_exit()` strategy callback is called.
|
||||
* Check position adjustments for open trades if enabled by calling `adjust_trade_position()` and place additional order if required.
|
||||
* Check if trade-slots are still available (if `max_open_trades` is reached).
|
||||
* Verifies entry signal trying to enter new positions.
|
||||
@@ -80,7 +82,9 @@ This loop will be repeated again and again until the bot is stopped.
|
||||
* Loops per candle simulating entry and exit points.
|
||||
* Calls `bot_loop_start()` strategy callback.
|
||||
* Check for Order timeouts, either via the `unfilledtimeout` configuration, or via `check_entry_timeout()` / `check_exit_timeout()` strategy callbacks.
|
||||
* Calls `adjust_entry_price()` strategy callback for open entry orders.
|
||||
* Calls `adjust_order_price()` strategy callback for open orders.
|
||||
* Calls `adjust_entry_price()` strategy callback for open entry orders. *only called when `adjust_order_price()` is not implemented!*
|
||||
* Calls `adjust_exit_price()` strategy callback for open exit orders. *only called when `adjust_order_price()` is not implemented!*
|
||||
* Check for trade entry signals (`enter_long` / `enter_short` columns).
|
||||
* Confirm trade entry / exits (calls `confirm_trade_entry()` and `confirm_trade_exit()` if implemented in the strategy).
|
||||
* Call `custom_entry_price()` (if implemented in the strategy) to determine entry price (Prices are moved to be within the opening candle).
|
||||
|
||||
@@ -377,6 +377,9 @@ If an incorrect category string is chosen, the plugin will print the available c
|
||||
!!! Warning "Many categories"
|
||||
Each added category corresponds to one API call to CoinGecko. The more categories you add, the longer the pairlist generation will take, potentially causing rate limit issues.
|
||||
|
||||
!!! Danger "Duplicate symbols in coingecko"
|
||||
Coingecko often has duplicate symbols, where the same symbol is used for different coins. Freqtrade will use the symbol as is and try to search for it on the exchange. If the symbol exists - it will be used. Freqtrade will however not check if the _intended_ symbol is the one coingecko meant. This can sometimes lead to unexpected results, especially on low volume coins or with meme coin categories.
|
||||
|
||||
#### AgeFilter
|
||||
|
||||
Removes pairs that have been listed on the exchange for less than `min_days_listed` days (defaults to `10`) or more than `max_days_listed` days (defaults `None` mean infinity).
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
markdown==3.7
|
||||
mkdocs==1.6.1
|
||||
mkdocs-material==9.6.3
|
||||
mkdocs-material==9.6.7
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.14.3
|
||||
jinja2==3.1.5
|
||||
jinja2==3.1.6
|
||||
mike==2.1.3
|
||||
|
||||
@@ -154,10 +154,10 @@ For example, simplified math:
|
||||
|
||||
In summary: The stoploss will be adjusted to be always be -10% of the highest observed price.
|
||||
|
||||
### Trailing stop loss, custom positive loss
|
||||
### Trailing stop loss, different positive loss
|
||||
|
||||
You could also have a default stop loss when you are in the red with your buy (buy - fee), but once you hit a positive result (or an offset you define) the system will utilize a new stop loss, which can have a different value.
|
||||
For example, your default stop loss is -10%, but once you have more than 0% profit (example 0.1%) a different trailing stoploss will be used.
|
||||
You could also have a default stop loss when you are in the red with your buy (buy - fee), but once you hit a positive result (or an offset you define) the system will utilize a new stop loss, with a different value.
|
||||
For example, your default stop loss is -10%, but once you have reached profitability (example 0.1%) a different trailing stoploss will be used.
|
||||
|
||||
!!! Note
|
||||
If you want the stoploss to only be changed when you break even of making a profit (what most users want) please refer to next section with [offset enabled](#trailing-stop-loss-only-once-the-trade-has-reached-a-certain-offset).
|
||||
@@ -208,7 +208,9 @@ Before this, `stoploss` is used for the trailing stoploss.
|
||||
|
||||
You can also keep a static stoploss until the offset is reached, and then trail the trade to take profits once the market turns.
|
||||
|
||||
If `trailing_only_offset_is_reached = True` then the trailing stoploss is only activated once the offset is reached. Until then, the stoploss remains at the configured `stoploss`.
|
||||
If `trailing_only_offset_is_reached = True` then the trailing stoploss is only activated once the offset is reached. Until then, the stoploss remains at the configured `stoploss` and is not trailing.
|
||||
Leaving this value as `trailing_only_offset_is_reached=False` will allow the trailing stoploss to start trailing as soon as the asset price increases above the initial entry price.
|
||||
|
||||
This option can be used with or without `trailing_stop_positive`, but uses `trailing_stop_positive_offset` as offset.
|
||||
|
||||
Configuration (offset is buy-price + 3%):
|
||||
|
||||
@@ -79,6 +79,8 @@ import talib.abstract as ta
|
||||
|
||||
class MyStrategy(IStrategy):
|
||||
|
||||
timeframe = '15m'
|
||||
|
||||
# set the initial stoploss to -10%
|
||||
stoploss = -0.10
|
||||
|
||||
|
||||
@@ -876,6 +876,9 @@ class DigDeeperStrategy(IStrategy):
|
||||
Return None for no action.
|
||||
Optionally, return a tuple with a 2nd element with an order reason
|
||||
"""
|
||||
if trade.has_open_orders:
|
||||
# Only act if no orders are open
|
||||
return
|
||||
|
||||
if current_profit > 0.05 and trade.nr_of_successful_exits == 0:
|
||||
# Take half of the profit at +5%
|
||||
@@ -934,28 +937,25 @@ class DigDeeperStrategy(IStrategy):
|
||||
|
||||
The total profit for this trade was 950$ on a 3350$ investment (`100@8$ + 100@9$ + 150@11$`). As such - the final relative profit is 28.35% (`950 / 3350`).
|
||||
|
||||
## Adjust Entry Price
|
||||
## Adjust order Price
|
||||
|
||||
The `adjust_entry_price()` callback may be used by strategy developer to refresh/replace limit orders upon arrival of new candles.
|
||||
The `adjust_order_price()` callback may be used by strategy developer to refresh/replace limit orders upon arrival of new candles.
|
||||
This callback is called once every iteration unless the order has been (re)placed within the current candle - limiting the maximum (re)placement of each order to once per candle.
|
||||
This also means that the first call will be at the start of the next candle after the initial order was placed.
|
||||
|
||||
Be aware that `custom_entry_price()` is still the one dictating initial entry limit order price target at the time of entry trigger.
|
||||
Be aware that `custom_entry_price()`/`custom_exit_price()` is still the one dictating initial limit order price target at the time of the signal.
|
||||
|
||||
Orders can be cancelled out of this callback by returning `None`.
|
||||
|
||||
Returning `current_order_rate` will keep the order on the exchange "as is".
|
||||
Returning any other price will cancel the existing order, and replace it with a new order.
|
||||
|
||||
The trade open-date (`trade.open_date_utc`) will remain at the time of the very first order placed.
|
||||
Please make sure to be aware of this - and eventually adjust your logic in other callbacks to account for this, and use the date of the first filled order instead.
|
||||
|
||||
If the cancellation of the original order fails, then the order will not be replaced - though the order will most likely have been canceled on exchange. Having this happen on initial entries will result in the deletion of the order, while on position adjustment orders, it'll result in the trade size remaining as is.
|
||||
If the order has been partially filled, the order will not be replaced. You can however use [`adjust_trade_position()`](#adjust-trade-position) to adjust the trade size to the full, expected position size, should this be necessary / desired.
|
||||
If the order has been partially filled, the order will not be replaced. You can however use [`adjust_trade_position()`](#adjust-trade-position) to adjust the trade size to the expected position size, should this be necessary / desired.
|
||||
|
||||
!!! Warning "Regular timeout"
|
||||
Entry `unfilledtimeout` mechanism (as well as `check_entry_timeout()`) takes precedence over this.
|
||||
Entry Orders that are cancelled via the above methods will not have this callback called. Be sure to update timeout values to match your expectations.
|
||||
Entry `unfilledtimeout` mechanism (as well as `check_entry_timeout()`/`check_exit_timeout()`) takes precedence over this callback.
|
||||
Orders that are cancelled via the above methods will not have this callback called. Be sure to update timeout values to match your expectations.
|
||||
|
||||
```python
|
||||
# Default imports
|
||||
@@ -964,14 +964,26 @@ class AwesomeStrategy(IStrategy):
|
||||
|
||||
# ... populate_* methods
|
||||
|
||||
def adjust_entry_price(self, trade: Trade, order: Order | None, pair: str,
|
||||
current_time: datetime, proposed_rate: float, current_order_rate: float,
|
||||
entry_tag: str | None, side: str, **kwargs) -> float:
|
||||
def adjust_order_price(
|
||||
self,
|
||||
trade: Trade,
|
||||
order: Order | None,
|
||||
pair: str,
|
||||
current_time: datetime,
|
||||
proposed_rate: float,
|
||||
current_order_rate: float,
|
||||
entry_tag: str | None,
|
||||
side: str,
|
||||
is_entry: bool,
|
||||
**kwargs,
|
||||
) -> float:
|
||||
"""
|
||||
Entry price re-adjustment logic, returning the user desired limit price.
|
||||
Exit and entry order price re-adjustment logic, returning the user desired limit price.
|
||||
This only executes when a order was already placed, still open (unfilled fully or partially)
|
||||
and not timed out on subsequent candles after entry trigger.
|
||||
|
||||
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-callbacks/
|
||||
|
||||
When not implemented by a strategy, returns current_order_rate as default.
|
||||
If current_order_rate is returned then the existing order is maintained.
|
||||
If None is returned then order gets canceled but not replaced by a new one.
|
||||
@@ -983,14 +995,16 @@ class AwesomeStrategy(IStrategy):
|
||||
:param proposed_rate: Rate, calculated based on pricing settings in entry_pricing.
|
||||
:param current_order_rate: Rate of the existing order in place.
|
||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||
:param side: "long" or "short" - indicating the direction of the proposed trade
|
||||
:param side: 'long' or 'short' - indicating the direction of the proposed trade
|
||||
:param is_entry: True if the order is an entry order, False if it's an exit order.
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return float: New entry price value if provided
|
||||
|
||||
"""
|
||||
# Limit orders to use and follow SMA200 as price target for the first 10 minutes since entry trigger for BTC/USDT pair.
|
||||
|
||||
# Limit entry orders to use and follow SMA200 as price target for the first 10 minutes since entry trigger for BTC/USDT pair.
|
||||
if (
|
||||
pair == "BTC/USDT"
|
||||
is_entry
|
||||
and pair == "BTC/USDT"
|
||||
and entry_tag == "long_sma200"
|
||||
and side == "long"
|
||||
and (current_time - timedelta(minutes=10)) <= trade.open_date_utc
|
||||
@@ -1007,6 +1021,26 @@ class AwesomeStrategy(IStrategy):
|
||||
return current_order_rate
|
||||
```
|
||||
|
||||
!!! danger "Incompatibility with `adjust_*_price()`"
|
||||
If you have both `adjust_order_price()` and `adjust_entry_price()`/`adjust_exit_price()` implemented, only `adjust_order_price()` will be used.
|
||||
If you need to adjust entry/exit prices, you can either implement the logic in `adjust_order_price()`, or use the split `adjust_entry_price()` / `adjust_exit_price()` callbacks, but not both.
|
||||
Mixing these is not supported and will raise an error during bot startup.
|
||||
|
||||
### Adjust Entry Price
|
||||
|
||||
The `adjust_entry_price()` callback may be used by strategy developer to refresh/replace entry limit orders upon arrival.
|
||||
It's a sub-set of `adjust_order_price()` and is called only for entry orders.
|
||||
All remaining behavior is identical to `adjust_order_price()`.
|
||||
|
||||
The trade open-date (`trade.open_date_utc`) will remain at the time of the very first order placed.
|
||||
Please make sure to be aware of this - and eventually adjust your logic in other callbacks to account for this, and use the date of the first filled order instead.
|
||||
|
||||
### Adjust Exit Price
|
||||
|
||||
The `adjust_exit_price()` callback may be used by strategy developer to refresh/replace exit limit orders upon arrival.
|
||||
It's a sub-set of `adjust_order_price()` and is called only for exit orders.
|
||||
All remaining behavior is identical to `adjust_order_price()`.
|
||||
|
||||
## Leverage Callback
|
||||
|
||||
When trading in markets that allow leverage, this method must return the desired Leverage (Defaults to 1 -> No leverage).
|
||||
|
||||
@@ -513,7 +513,7 @@ By default, freqtrade will attempt to load strategies from all `.py` files withi
|
||||
Assuming your strategy is called `AwesomeStrategy`, stored in the file `user_data/strategies/AwesomeStrategy.py`, then you can start freqtrade in dry (or live, depending on your configuration) mode with:
|
||||
|
||||
```bash
|
||||
freqtrade trade --strategy AwesomeStrategy`
|
||||
freqtrade trade --strategy AwesomeStrategy
|
||||
```
|
||||
|
||||
Note that we're using the class name, not the file name.
|
||||
@@ -1122,6 +1122,7 @@ The following list contains some common patterns which should be avoided to prev
|
||||
- don't use `.iloc[-1]` or any other absolute position in the dataframe within `populate_` functions, as this will be different between dry-run and backtesting. Absolute `iloc` indexing is safe to use in callbacks however - see [Strategy Callbacks](strategy-callbacks.md).
|
||||
- don't use functions that use all dataframe or column values, e.g. `dataframe['mean_volume'] = dataframe['volume'].mean()`. As backtesting uses the full dataframe, at any point in the dataframe, the `'mean_volume'` series would include data from the future. Use rolling() calculations instead, e.g. `dataframe['volume'].rolling(<window>).mean()`.
|
||||
- don't use `.resample('1h')`. This uses the left border of the period interval, so moves data from an hour boundary to the start of the hour. Use `.resample('1h', label='right')` instead.
|
||||
- don't use `.merge()` to combine longer timeframes onto shorter ones. Instead, use the [informative pair](#informative-pairs) helpers. (A plain merge can implicitly cause a lookahead bias as date refers to open date, not close date).
|
||||
|
||||
!!! Tip "Identifying problems"
|
||||
You should always use the two helper commands [lookahead-analysis](lookahead-analysis.md) and [recursive-analysis](recursive-analysis.md), which can each help you figure out problems with your strategy in different ways.
|
||||
|
||||
@@ -35,6 +35,7 @@ The following attributes / properties are available for each individual trade -
|
||||
| `trade_direction` | "long" / "short" | Trade direction in text - long or short. |
|
||||
| `nr_of_successful_entries` | int | Number of successful (filled) entry orders. |
|
||||
| `nr_of_successful_exits` | int | Number of successful (filled) exit orders. |
|
||||
| `has_open_orders` | boolean | Has the trade open orders (excluding stoploss orders). |
|
||||
|
||||
## Class methods
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Freqtrade bot"""
|
||||
|
||||
__version__ = "2025.2-dev"
|
||||
__version__ = "2025.3-dev"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
@@ -375,3 +375,32 @@ def calculate_calmar(
|
||||
|
||||
# print(expected_returns_mean, max_drawdown, calmar_ratio)
|
||||
return calmar_ratio
|
||||
|
||||
|
||||
def calculate_sqn(trades: pd.DataFrame, starting_balance: float) -> float:
|
||||
"""
|
||||
Calculate System Quality Number (SQN) - Van K. Tharp.
|
||||
SQN measures systematic trading quality and takes into account both
|
||||
the number of trades and their standard deviation.
|
||||
|
||||
:param trades: DataFrame containing trades (requires column profit_abs)
|
||||
:param starting_balance: Starting balance of the trading system
|
||||
:return: SQN value
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
return 0.0
|
||||
|
||||
total_profit = trades["profit_abs"] / starting_balance
|
||||
number_of_trades = len(trades)
|
||||
|
||||
# Calculate average trade and standard deviation
|
||||
average_profits = total_profit.mean()
|
||||
profits_std = total_profit.std()
|
||||
|
||||
if profits_std != 0 and not np.isnan(profits_std):
|
||||
sqn = math.sqrt(number_of_trades) * (average_profits / profits_std)
|
||||
else:
|
||||
# Define negative SQN to indicate this is NOT optimal
|
||||
sqn = -100.0
|
||||
|
||||
return round(sqn, 4)
|
||||
|
||||
@@ -11,7 +11,11 @@ from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
|
||||
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.binance_public_data import concat_safe, download_archive_ohlcv
|
||||
from freqtrade.exchange.binance_public_data import (
|
||||
concat_safe,
|
||||
download_archive_ohlcv,
|
||||
download_archive_trades,
|
||||
)
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.exchange.exchange_types import FtHas, Tickers
|
||||
from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_msecs
|
||||
@@ -379,3 +383,48 @@ class Binance(Exchange):
|
||||
if not t:
|
||||
return [], "0"
|
||||
return t, from_id
|
||||
|
||||
async def _async_get_trade_history_id(
|
||||
self, pair: str, until: int, since: int, from_id: str | None = None
|
||||
) -> tuple[str, list[list]]:
|
||||
logger.info(f"Fetching trades from Binance, {from_id=}, {since=}, {until=}")
|
||||
|
||||
if not self._config["exchange"].get("only_from_ccxt", False):
|
||||
if from_id is None or not since:
|
||||
trades = await self._api_async.fetch_trades(
|
||||
pair,
|
||||
params={
|
||||
self._trades_pagination_arg: "0",
|
||||
},
|
||||
limit=5,
|
||||
)
|
||||
listing_date: int = trades[0]["timestamp"]
|
||||
since = max(since, listing_date)
|
||||
|
||||
_, res = await download_archive_trades(
|
||||
CandleType.SPOT,
|
||||
pair,
|
||||
since_ms=since,
|
||||
until_ms=until,
|
||||
markets=self.markets,
|
||||
)
|
||||
|
||||
if not res:
|
||||
end_time = since
|
||||
end_id = from_id
|
||||
else:
|
||||
end_time = res[-1][0]
|
||||
end_id = res[-1][1]
|
||||
|
||||
if end_time and end_time >= until:
|
||||
return pair, res
|
||||
else:
|
||||
_, res2 = await super()._async_get_trade_history_id(
|
||||
pair, until=until, since=end_time, from_id=end_id
|
||||
)
|
||||
res.extend(res2)
|
||||
return pair, res
|
||||
|
||||
return await super()._async_get_trade_history_id(
|
||||
pair, until=until, since=since, from_id=from_id
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,6 @@
|
||||
"""
|
||||
Fetch daily-archived OHLCV data from https://data.binance.vision/
|
||||
Documentation can be found in https://github.com/binance/binance-public-data
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
@@ -10,9 +11,11 @@ from io import BytesIO
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.constants import DEFAULT_TRADES_COLUMNS
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.misc import chunks
|
||||
from freqtrade.util.datetime_helpers import dt_from_ts, dt_now
|
||||
@@ -157,8 +160,8 @@ async def _download_archive_ohlcv(
|
||||
return concat_safe(dfs)
|
||||
else:
|
||||
dfs.append(None)
|
||||
except BaseException as e:
|
||||
logger.warning(f"An exception raised: : {e}")
|
||||
except Exception as e:
|
||||
logger.warning(f"An exception raised: {e}")
|
||||
# Directly return the existing data, do not allow the gap within the data
|
||||
await cancel_and_await_tasks(tasks[tasks.index(task) + 1 :])
|
||||
return concat_safe(dfs)
|
||||
@@ -212,6 +215,20 @@ def binance_vision_ohlcv_zip_url(
|
||||
return url
|
||||
|
||||
|
||||
def binance_vision_trades_zip_url(symbol: str, candle_type: CandleType, date: date) -> str:
|
||||
"""
|
||||
example urls:
|
||||
https://data.binance.vision/data/spot/daily/aggTrades/BTCUSDT/BTCUSDT-aggTrades-2023-10-27.zip
|
||||
https://data.binance.vision/data/futures/um/daily/aggTrades/BTCUSDT/BTCUSDT-aggTrades-2023-10-27.zip
|
||||
"""
|
||||
asset_type_url_segment = candle_type_to_url_segment(candle_type)
|
||||
url = (
|
||||
f"https://data.binance.vision/data/{asset_type_url_segment}/daily/aggTrades/{symbol}"
|
||||
f"/{symbol}-aggTrades-{date.strftime('%Y-%m-%d')}.zip"
|
||||
)
|
||||
return url
|
||||
|
||||
|
||||
async def get_daily_ohlcv(
|
||||
symbol: str,
|
||||
timeframe: str,
|
||||
@@ -268,7 +285,11 @@ async def get_daily_ohlcv(
|
||||
names=["date", "open", "high", "low", "close", "volume"],
|
||||
header=header,
|
||||
)
|
||||
df["date"] = pd.to_datetime(df["date"], unit="ms", utc=True)
|
||||
df["date"] = pd.to_datetime(
|
||||
np.where(df["date"] > 1e13, df["date"] // 1000, df["date"]),
|
||||
unit="ms",
|
||||
utc=True,
|
||||
)
|
||||
return df
|
||||
elif resp.status == 404:
|
||||
logger.debug(f"Failed to download {url}")
|
||||
@@ -280,3 +301,203 @@ async def get_daily_ohlcv(
|
||||
if isinstance(e, Http404) or retry > retry_count:
|
||||
logger.debug(f"Failed to get data from {url}: {e}")
|
||||
raise
|
||||
|
||||
|
||||
async def download_archive_trades(
|
||||
candle_type: CandleType,
|
||||
pair: str,
|
||||
*,
|
||||
since_ms: int,
|
||||
until_ms: int | None,
|
||||
markets: dict[str, Any],
|
||||
stop_on_404: bool = True,
|
||||
) -> tuple[str, list[list]]:
|
||||
try:
|
||||
symbol = markets[pair]["id"]
|
||||
|
||||
last_available_date = dt_now() - timedelta(days=2)
|
||||
|
||||
start = dt_from_ts(since_ms)
|
||||
end = dt_from_ts(until_ms) if until_ms else dt_now()
|
||||
end = min(end, last_available_date)
|
||||
if start >= end:
|
||||
return pair, []
|
||||
result_list = await _download_archive_trades(
|
||||
symbol, pair, candle_type, start, end, stop_on_404
|
||||
)
|
||||
return pair, result_list
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"An exception occurred during fast trades download from Binance, falling back to "
|
||||
"the slower REST API, this can take a lot more time.",
|
||||
exc_info=e,
|
||||
)
|
||||
return pair, []
|
||||
|
||||
|
||||
def parse_trades_from_zip(csvf):
|
||||
# https://github.com/binance/binance-public-data/issues/283
|
||||
first_byte = csvf.read(1)[0]
|
||||
if chr(first_byte).isdigit():
|
||||
# spot
|
||||
header = None
|
||||
names = [
|
||||
"id",
|
||||
"price",
|
||||
"amount",
|
||||
"first_trade_id",
|
||||
"last_trade_id",
|
||||
"timestamp",
|
||||
"is_buyer_maker",
|
||||
"is_best_match",
|
||||
]
|
||||
else:
|
||||
# futures
|
||||
header = 0
|
||||
names = [
|
||||
"id",
|
||||
"price",
|
||||
"amount",
|
||||
"first_trade_id",
|
||||
"last_trade_id",
|
||||
"timestamp",
|
||||
"is_buyer_maker",
|
||||
]
|
||||
csvf.seek(0)
|
||||
|
||||
df = pd.read_csv(
|
||||
csvf,
|
||||
names=names,
|
||||
header=header,
|
||||
)
|
||||
df.loc[:, "cost"] = df["price"] * df["amount"]
|
||||
# Side is reversed intentionally
|
||||
# based on ccxt parseTrade logic.
|
||||
df.loc[:, "side"] = np.where(df["is_buyer_maker"], "sell", "buy")
|
||||
df.loc[:, "type"] = None
|
||||
# Convert timestamp to ms
|
||||
df.loc[:, "timestamp"] = np.where(
|
||||
df["timestamp"] > 1e13,
|
||||
df["timestamp"] // 1000,
|
||||
df["timestamp"],
|
||||
)
|
||||
return df.loc[:, DEFAULT_TRADES_COLUMNS].to_records(index=False).tolist()
|
||||
|
||||
|
||||
async def get_daily_trades(
|
||||
symbol: str,
|
||||
candle_type: CandleType,
|
||||
date: date,
|
||||
session: aiohttp.ClientSession,
|
||||
retry_count: int = 3,
|
||||
retry_delay: float = 0.0,
|
||||
) -> list[list]:
|
||||
"""
|
||||
Get daily OHLCV from https://data.binance.vision
|
||||
See https://github.com/binance/binance-public-data
|
||||
|
||||
:symbol: binance symbol name, e.g. BTCUSDT
|
||||
:candle_type: SPOT or FUTURES
|
||||
:date: the returned DataFrame will cover the entire day of `date` in UTC
|
||||
:session: an aiohttp.ClientSession instance
|
||||
:retry_count: times to retry before returning the exceptions
|
||||
:retry_delay: the time to wait before every retry
|
||||
:return: a list containing trades in DEFAULT_TRADES_COLUMNS format
|
||||
"""
|
||||
|
||||
url = binance_vision_trades_zip_url(symbol, candle_type, date)
|
||||
|
||||
logger.debug(f"download trades data from binance: {url}")
|
||||
|
||||
retry = 0
|
||||
while True:
|
||||
if retry > 0:
|
||||
sleep_secs = retry * retry_delay
|
||||
logger.debug(
|
||||
f"[{retry}/{retry_count}] retry to download {url} after {sleep_secs} seconds"
|
||||
)
|
||||
await asyncio.sleep(sleep_secs)
|
||||
try:
|
||||
async with session.get(url) as resp:
|
||||
if resp.status == 200:
|
||||
content = await resp.read()
|
||||
logger.debug(f"Successfully downloaded {url}")
|
||||
with zipfile.ZipFile(BytesIO(content)) as zipf:
|
||||
with zipf.open(zipf.namelist()[0]) as csvf:
|
||||
return parse_trades_from_zip(csvf)
|
||||
elif resp.status == 404:
|
||||
logger.debug(f"Failed to download {url}")
|
||||
raise Http404(f"404: {url}", date, url)
|
||||
else:
|
||||
raise BadHttpStatus(f"{resp.status} - {resp.reason}")
|
||||
except Exception as e:
|
||||
logger.info("download Daily_trades raised: %s", e)
|
||||
retry += 1
|
||||
if isinstance(e, Http404) or retry > retry_count:
|
||||
logger.debug(f"Failed to get data from {url}: {e}")
|
||||
raise
|
||||
|
||||
|
||||
async def _download_archive_trades(
|
||||
symbol: str,
|
||||
pair: str,
|
||||
candle_type: CandleType,
|
||||
start: date,
|
||||
end: date,
|
||||
stop_on_404: bool,
|
||||
) -> list[list]:
|
||||
# daily dataframes, `None` indicates missing data in that day (when `stop_on_404` is False)
|
||||
results: list[list] = []
|
||||
# the current day being processing, starting at 1.
|
||||
current_day = 0
|
||||
|
||||
connector = aiohttp.TCPConnector(limit=100)
|
||||
async with aiohttp.ClientSession(connector=connector, trust_env=True) as session:
|
||||
# the HTTP connections has been throttled by TCPConnector
|
||||
for dates in chunks(list(date_range(start, end)), 30):
|
||||
tasks = [
|
||||
asyncio.create_task(get_daily_trades(symbol, candle_type, date, session))
|
||||
for date in dates
|
||||
]
|
||||
for task in tasks:
|
||||
current_day += 1
|
||||
try:
|
||||
result = await task
|
||||
except Http404 as e:
|
||||
if stop_on_404:
|
||||
logger.debug(f"Failed to download {e.url} due to 404.")
|
||||
|
||||
# A 404 error on the first day indicates missing data
|
||||
# on https://data.binance.vision, we provide the warning and the advice.
|
||||
# https://github.com/freqtrade/freqtrade/blob/acc53065e5fa7ab5197073276306dc9dc3adbfa3/tests/exchange_online/test_binance_compare_ohlcv.py#L7
|
||||
if current_day == 1:
|
||||
logger.warning(
|
||||
f"Fast download is unavailable due to missing data: "
|
||||
f"{e.url}. Falling back to the slower REST API, "
|
||||
"which may take more time."
|
||||
)
|
||||
if pair in ["BTC/USDT:USDT", "ETH/USDT:USDT", "BCH/USDT:USDT"]:
|
||||
logger.warning(
|
||||
f"To avoid the delay, you can first download {pair} using "
|
||||
"`--timerange <start date>-20200101`, and then download the "
|
||||
"remaining data with `--timerange 20200101-<end date>`."
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Binance fast download for {pair} stopped at {e.date} due to "
|
||||
f"missing data: {e.url}, falling back to rest API for the "
|
||||
"remaining data, this can take more time."
|
||||
)
|
||||
await cancel_and_await_tasks(tasks[tasks.index(task) + 1 :])
|
||||
return results
|
||||
except Exception as e:
|
||||
logger.warning(f"An exception raised: {e}")
|
||||
# Directly return the existing data, do not allow the gap within the data
|
||||
await cancel_and_await_tasks(tasks[tasks.index(task) + 1 :])
|
||||
return results
|
||||
else:
|
||||
# Happy case
|
||||
results.extend(result)
|
||||
|
||||
return results
|
||||
|
||||
@@ -2351,6 +2351,7 @@ class Exchange:
|
||||
since_ms=since_ms,
|
||||
until_ms=until_ms,
|
||||
candle_type=candle_type,
|
||||
raise_=True,
|
||||
)
|
||||
)
|
||||
logger.debug(f"Downloaded data for {pair} from ccxt with length {len(data)}.")
|
||||
@@ -2391,7 +2392,7 @@ class Exchange:
|
||||
if isinstance(res, BaseException):
|
||||
logger.warning(f"Async code raised an exception: {repr(res)}")
|
||||
if raise_:
|
||||
raise
|
||||
raise res
|
||||
continue
|
||||
else:
|
||||
# Deconstruct tuple if it's not an exception
|
||||
|
||||
@@ -4,7 +4,7 @@ Exchange support utils
|
||||
|
||||
import inspect
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import ceil, floor
|
||||
from math import ceil, floor, isnan
|
||||
from typing import Any
|
||||
|
||||
import ccxt
|
||||
@@ -305,7 +305,7 @@ def price_to_precision(
|
||||
:param rounding_mode: rounding mode to use. Defaults to ROUND
|
||||
:return: price rounded up to the precision the Exchange accepts
|
||||
"""
|
||||
if price_precision is not None and precisionMode is not None:
|
||||
if price_precision is not None and precisionMode is not None and not isnan(price):
|
||||
if rounding_mode not in (ROUND_UP, ROUND_DOWN):
|
||||
# Use CCXT code where possible.
|
||||
return float(
|
||||
|
||||
@@ -46,19 +46,20 @@ class BaseEnvironment(gym.Env):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
df: DataFrame = DataFrame(),
|
||||
prices: DataFrame = DataFrame(),
|
||||
reward_kwargs: dict = {},
|
||||
*,
|
||||
df: DataFrame,
|
||||
prices: DataFrame,
|
||||
reward_kwargs: dict,
|
||||
window_size=10,
|
||||
starting_point=True,
|
||||
id: str = "baseenv-1", # noqa: A002
|
||||
seed: int = 1,
|
||||
config: dict = {},
|
||||
config: dict,
|
||||
live: bool = False,
|
||||
fee: float = 0.0015,
|
||||
can_short: bool = False,
|
||||
pair: str = "",
|
||||
df_raw: DataFrame = DataFrame(),
|
||||
df_raw: DataFrame,
|
||||
):
|
||||
"""
|
||||
Initializes the training/eval environment.
|
||||
|
||||
@@ -488,7 +488,7 @@ def make_env(
|
||||
seed: int,
|
||||
train_df: DataFrame,
|
||||
price: DataFrame,
|
||||
env_info: dict[str, Any] = {},
|
||||
env_info: dict[str, Any],
|
||||
) -> Callable:
|
||||
"""
|
||||
Utility function for multiprocessed env.
|
||||
|
||||
@@ -33,6 +33,8 @@ LABEL_PIPELINE = "label_pipeline"
|
||||
TRAINDF = "trained_df"
|
||||
METADATA = "metadata"
|
||||
|
||||
METADATA_NUMBER_MODE = rapidjson.NM_NATIVE | rapidjson.NM_NAN
|
||||
|
||||
|
||||
class pair_info(TypedDict):
|
||||
model_filename: str
|
||||
@@ -495,7 +497,7 @@ class FreqaiDataDrawer:
|
||||
dk.data["label_list"] = dk.label_list
|
||||
|
||||
with (save_path / f"{dk.model_filename}_{METADATA}.json").open("w") as fp:
|
||||
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE)
|
||||
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=METADATA_NUMBER_MODE)
|
||||
|
||||
return
|
||||
|
||||
@@ -526,7 +528,7 @@ class FreqaiDataDrawer:
|
||||
dk.data["label_list"] = dk.label_list
|
||||
# store the metadata
|
||||
with (save_path / f"{dk.model_filename}_{METADATA}.json").open("w") as fp:
|
||||
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE)
|
||||
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=METADATA_NUMBER_MODE)
|
||||
|
||||
# save the pipelines to pickle files
|
||||
with (save_path / f"{dk.model_filename}_{FEATURE_PIPELINE}.pkl").open("wb") as fp:
|
||||
@@ -563,7 +565,7 @@ class FreqaiDataDrawer:
|
||||
presaved backtesting (prediction file loading).
|
||||
"""
|
||||
with (dk.data_path / f"{dk.model_filename}_{METADATA}.json").open("r") as fp:
|
||||
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||
dk.data = rapidjson.load(fp, number_mode=METADATA_NUMBER_MODE)
|
||||
dk.training_features_list = dk.data["training_features_list"]
|
||||
dk.label_list = dk.data["label_list"]
|
||||
|
||||
@@ -587,7 +589,7 @@ class FreqaiDataDrawer:
|
||||
dk.label_pipeline = self.meta_data_dictionary[coin][LABEL_PIPELINE]
|
||||
else:
|
||||
with (dk.data_path / f"{dk.model_filename}_{METADATA}.json").open("r") as fp:
|
||||
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||
dk.data = rapidjson.load(fp, number_mode=METADATA_NUMBER_MODE)
|
||||
|
||||
with (dk.data_path / f"{dk.model_filename}_{FEATURE_PIPELINE}.pkl").open("rb") as fp:
|
||||
dk.feature_pipeline = cloudpickle.load(fp)
|
||||
|
||||
@@ -214,7 +214,7 @@ class FreqaiDataKitchen:
|
||||
self,
|
||||
unfiltered_df: DataFrame,
|
||||
training_feature_list: list,
|
||||
label_list: list = list(),
|
||||
label_list: list | None = None,
|
||||
training_filter: bool = True,
|
||||
) -> tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
@@ -244,7 +244,7 @@ class FreqaiDataKitchen:
|
||||
# we don't care about total row number (total no. datapoints) in training, we only care
|
||||
# about removing any row with NaNs
|
||||
# if labels has multiple columns (user wants to train multiple modelEs), we detect here
|
||||
labels = unfiltered_df.filter(label_list, axis=1)
|
||||
labels = unfiltered_df.filter(label_list or [], axis=1)
|
||||
drop_index_labels = pd.isnull(labels).any(axis=1)
|
||||
drop_index_labels = (
|
||||
drop_index_labels.replace(True, 1).replace(False, 0).infer_objects(copy=False)
|
||||
@@ -654,8 +654,8 @@ class FreqaiDataKitchen:
|
||||
pair: str,
|
||||
tf: str,
|
||||
strategy: IStrategy,
|
||||
corr_dataframes: dict = {},
|
||||
base_dataframes: dict = {},
|
||||
corr_dataframes: dict,
|
||||
base_dataframes: dict,
|
||||
is_corr_pairs: bool = False,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
@@ -773,10 +773,10 @@ class FreqaiDataKitchen:
|
||||
def use_strategy_to_populate_indicators( # noqa: C901
|
||||
self,
|
||||
strategy: IStrategy,
|
||||
corr_dataframes: dict = {},
|
||||
base_dataframes: dict = {},
|
||||
corr_dataframes: dict[str, DataFrame] | None = None,
|
||||
base_dataframes: dict[str, dict[str, DataFrame]] | None = None,
|
||||
pair: str = "",
|
||||
prediction_dataframe: DataFrame = pd.DataFrame(),
|
||||
prediction_dataframe: DataFrame | None = None,
|
||||
do_corr_pairs: bool = True,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
@@ -793,6 +793,10 @@ class FreqaiDataKitchen:
|
||||
:return:
|
||||
dataframe: DataFrame = dataframe containing populated indicators
|
||||
"""
|
||||
if not corr_dataframes:
|
||||
corr_dataframes = {}
|
||||
if not base_dataframes:
|
||||
base_dataframes = {}
|
||||
|
||||
# check if the user is using the deprecated populate_any_indicators function
|
||||
new_version = inspect.getsource(strategy.populate_any_indicators) == (
|
||||
@@ -822,7 +826,7 @@ class FreqaiDataKitchen:
|
||||
if tf not in corr_dataframes[p]:
|
||||
corr_dataframes[p][tf] = pd.DataFrame()
|
||||
|
||||
if not prediction_dataframe.empty:
|
||||
if prediction_dataframe is not None and not prediction_dataframe.empty:
|
||||
dataframe = prediction_dataframe.copy()
|
||||
base_dataframes[self.config["timeframe"]] = dataframe.copy()
|
||||
else:
|
||||
|
||||
@@ -618,7 +618,7 @@ class IFreqaiModel(ABC):
|
||||
)
|
||||
|
||||
unfiltered_dataframe = dk.use_strategy_to_populate_indicators(
|
||||
strategy, corr_dataframes, base_dataframes, pair
|
||||
strategy, corr_dataframes=corr_dataframes, base_dataframes=base_dataframes, pair=pair
|
||||
)
|
||||
|
||||
trained_timestamp = new_trained_timerange.stopts
|
||||
|
||||
@@ -25,7 +25,7 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
|
||||
criterion: nn.Module,
|
||||
device: str,
|
||||
data_convertor: PyTorchDataConvertor,
|
||||
model_meta_data: dict[str, Any] = {},
|
||||
model_meta_data: dict[str, Any] | None = None,
|
||||
window_size: int = 1,
|
||||
tb_logger: Any = None,
|
||||
**kwargs,
|
||||
@@ -45,6 +45,8 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
|
||||
:param n_epochs: The maximum number batches to use for evaluation.
|
||||
:param batch_size: The size of the batches to use during training.
|
||||
"""
|
||||
if model_meta_data is None:
|
||||
model_meta_data = {}
|
||||
self.model = model
|
||||
self.optimizer = optimizer
|
||||
self.criterion = criterion
|
||||
|
||||
@@ -64,7 +64,7 @@ from freqtrade.rpc.rpc_types import (
|
||||
)
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
from freqtrade.util import FtPrecise, MeasureTime, dt_from_ts
|
||||
from freqtrade.util import FtPrecise, MeasureTime, PeriodicCache, dt_from_ts, dt_now
|
||||
from freqtrade.util.migrations.binance_mig import migrate_binance_futures_names
|
||||
from freqtrade.wallets import Wallets
|
||||
|
||||
@@ -145,7 +145,11 @@ class FreqtradeBot(LoggingMixin):
|
||||
else None
|
||||
)
|
||||
|
||||
self.active_pair_whitelist = self._refresh_active_whitelist()
|
||||
logger.info("Starting initial pairlist refresh")
|
||||
with MeasureTime(
|
||||
lambda duration, _: logger.info(f"Initial Pairlist refresh took {duration:.2f}s"), 0
|
||||
):
|
||||
self.active_pair_whitelist = self._refresh_active_whitelist()
|
||||
|
||||
# Set initial bot state from config
|
||||
initial_state = self.config.get("initial_state")
|
||||
@@ -154,6 +158,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# Protect exit-logic from forcesell and vice versa
|
||||
self._exit_lock = Lock()
|
||||
timeframe_secs = timeframe_to_seconds(self.strategy.timeframe)
|
||||
self._exit_reason_cache = PeriodicCache(100, ttl=timeframe_secs)
|
||||
LoggingMixin.__init__(self, logger, timeframe_secs)
|
||||
|
||||
self._schedule = Scheduler()
|
||||
@@ -898,14 +903,14 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
msg = (
|
||||
f"Position adjust: about to create a new order for {pair} with stake_amount: "
|
||||
f"{stake_amount} for {trade}"
|
||||
f"{stake_amount} and price: {enter_limit_requested} for {trade}"
|
||||
if mode == "pos_adjust"
|
||||
else (
|
||||
f"Replacing {side} order: about create a new order for {pair} with stake_amount: "
|
||||
f"{stake_amount} ..."
|
||||
f"{stake_amount} and price: {enter_limit_requested} ..."
|
||||
if mode == "replace"
|
||||
else f"{name} signal found: about create a new trade for {pair} with stake_amount: "
|
||||
f"{stake_amount} ..."
|
||||
f"{stake_amount} and price: {enter_limit_requested} ..."
|
||||
)
|
||||
)
|
||||
logger.info(msg)
|
||||
@@ -1374,6 +1379,15 @@ class FreqtradeBot(LoggingMixin):
|
||||
for should_exit in exits:
|
||||
if should_exit.exit_flag:
|
||||
exit_tag1 = exit_tag if should_exit.exit_type == ExitType.EXIT_SIGNAL else None
|
||||
if trade.has_open_orders:
|
||||
if prev_eval := self._exit_reason_cache.get(
|
||||
f"{trade.pair}_{trade.id}_{exit_tag1 or should_exit.exit_reason}", None
|
||||
):
|
||||
logger.debug(
|
||||
f"Exit reason already seen this candle, first seen at {prev_eval}"
|
||||
)
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
f"Exit for {trade.pair} detected. Reason: {should_exit.exit_type}"
|
||||
f"{f' Tag: {exit_tag1}' if exit_tag1 is not None else ''}"
|
||||
@@ -1593,27 +1607,29 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.replace_order(order, open_order, trade)
|
||||
|
||||
def handle_cancel_order(
|
||||
self, order: CcxtOrder, order_obj: Order, trade: Trade, reason: str
|
||||
) -> None:
|
||||
self, order: CcxtOrder, order_obj: Order, trade: Trade, reason: str, replacing: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
Check if current analyzed order timed out and cancel if necessary.
|
||||
:param order: Order dict grabbed with exchange.fetch_order()
|
||||
:param order_obj: Order object from the database.
|
||||
:param trade: Trade object.
|
||||
:return: None
|
||||
:return: True if the order was canceled, False otherwise.
|
||||
"""
|
||||
if order["side"] == trade.entry_side:
|
||||
self.handle_cancel_enter(trade, order, order_obj, reason)
|
||||
return self.handle_cancel_enter(trade, order, order_obj, reason, replacing)
|
||||
else:
|
||||
canceled = self.handle_cancel_exit(trade, order, order_obj, reason)
|
||||
canceled_count = trade.get_canceled_exit_order_count()
|
||||
max_timeouts = self.config.get("unfilledtimeout", {}).get("exit_timeout_count", 0)
|
||||
if canceled and max_timeouts > 0 and canceled_count >= max_timeouts:
|
||||
logger.warning(
|
||||
f"Emergency exiting trade {trade}, as the exit order "
|
||||
f"timed out {max_timeouts} times. force selling {order['amount']}."
|
||||
)
|
||||
self.emergency_exit(trade, order["price"], order["amount"])
|
||||
if not replacing:
|
||||
canceled_count = trade.get_canceled_exit_order_count()
|
||||
max_timeouts = self.config.get("unfilledtimeout", {}).get("exit_timeout_count", 0)
|
||||
if canceled and max_timeouts > 0 and canceled_count >= max_timeouts:
|
||||
logger.warning(
|
||||
f"Emergency exiting trade {trade}, as the exit order "
|
||||
f"timed out {max_timeouts} times. force selling {order['amount']}."
|
||||
)
|
||||
self.emergency_exit(trade, order["price"], order["amount"])
|
||||
return canceled
|
||||
|
||||
def emergency_exit(
|
||||
self, trade: Trade, price: float, sub_trade_amt: float | None = None
|
||||
@@ -1649,9 +1665,9 @@ class FreqtradeBot(LoggingMixin):
|
||||
def replace_order(self, order: CcxtOrder, order_obj: Order | None, trade: Trade) -> None:
|
||||
"""
|
||||
Check if current analyzed entry order should be replaced or simply cancelled.
|
||||
To simply cancel the existing order(no replacement) adjust_entry_price() should return None
|
||||
To maintain existing order adjust_entry_price() should return order_obj.price
|
||||
To replace existing order adjust_entry_price() should return desired price for limit order
|
||||
To simply cancel the existing order(no replacement) adjust_order_price() should return None
|
||||
To maintain existing order adjust_order_price() should return order_obj.price
|
||||
To replace existing order adjust_order_price() should return desired price for limit order
|
||||
:param order: Order dict grabbed with exchange.fetch_order()
|
||||
:param order_obj: Order object.
|
||||
:param trade: Trade object.
|
||||
@@ -1665,17 +1681,17 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.strategy.timeframe, latest_candle_open_date
|
||||
)
|
||||
# Check if new candle
|
||||
if (
|
||||
order_obj
|
||||
and order_obj.side == trade.entry_side
|
||||
and latest_candle_close_date > order_obj.order_date_utc
|
||||
):
|
||||
if order_obj and latest_candle_close_date > order_obj.order_date_utc:
|
||||
is_entry = order_obj.side == trade.entry_side
|
||||
# New candle
|
||||
proposed_rate = self.exchange.get_rate(
|
||||
trade.pair, side="entry", is_short=trade.is_short, refresh=True
|
||||
trade.pair,
|
||||
side="entry" if is_entry else "exit",
|
||||
is_short=trade.is_short,
|
||||
refresh=True,
|
||||
)
|
||||
adjusted_entry_price = strategy_safe_wrapper(
|
||||
self.strategy.adjust_entry_price, default_retval=order_obj.safe_placement_price
|
||||
adjusted_price = strategy_safe_wrapper(
|
||||
self.strategy.adjust_order_price, default_retval=order_obj.safe_placement_price
|
||||
)(
|
||||
trade=trade,
|
||||
order=order_obj,
|
||||
@@ -1685,36 +1701,51 @@ class FreqtradeBot(LoggingMixin):
|
||||
current_order_rate=order_obj.safe_placement_price,
|
||||
entry_tag=trade.enter_tag,
|
||||
side=trade.trade_direction,
|
||||
is_entry=is_entry,
|
||||
)
|
||||
|
||||
replacing = True
|
||||
cancel_reason = constants.CANCEL_REASON["REPLACE"]
|
||||
if not adjusted_entry_price:
|
||||
if not adjusted_price:
|
||||
replacing = False
|
||||
cancel_reason = constants.CANCEL_REASON["USER_CANCEL"]
|
||||
if order_obj.safe_placement_price != adjusted_entry_price:
|
||||
|
||||
if order_obj.safe_placement_price != adjusted_price:
|
||||
# cancel existing order if new price is supplied or None
|
||||
res = self.handle_cancel_enter(
|
||||
trade, order, order_obj, cancel_reason, replacing=replacing
|
||||
res = self.handle_cancel_order(
|
||||
order, order_obj, trade, cancel_reason, replacing=replacing
|
||||
)
|
||||
if not res:
|
||||
self.replace_order_failed(
|
||||
trade, f"Could not fully cancel order for {trade}, therefore not replacing."
|
||||
)
|
||||
return
|
||||
if adjusted_entry_price:
|
||||
if adjusted_price:
|
||||
# place new order only if new price is supplied
|
||||
try:
|
||||
if not self.execute_entry(
|
||||
pair=trade.pair,
|
||||
stake_amount=(
|
||||
order_obj.safe_remaining * order_obj.safe_price / trade.leverage
|
||||
),
|
||||
price=adjusted_entry_price,
|
||||
trade=trade,
|
||||
is_short=trade.is_short,
|
||||
mode="replace",
|
||||
):
|
||||
if is_entry:
|
||||
succeeded = self.execute_entry(
|
||||
pair=trade.pair,
|
||||
stake_amount=(
|
||||
order_obj.safe_remaining * order_obj.safe_price / trade.leverage
|
||||
),
|
||||
price=adjusted_price,
|
||||
trade=trade,
|
||||
is_short=trade.is_short,
|
||||
mode="replace",
|
||||
)
|
||||
else:
|
||||
succeeded = self.execute_trade_exit(
|
||||
trade,
|
||||
adjusted_price,
|
||||
exit_check=ExitCheckTuple(
|
||||
exit_type=ExitType.CUSTOM_EXIT,
|
||||
exit_reason=order_obj.ft_order_tag or "order_replaced",
|
||||
),
|
||||
ordertype="limit",
|
||||
sub_trade_amt=order_obj.safe_remaining,
|
||||
)
|
||||
if not succeeded:
|
||||
self.replace_order_failed(
|
||||
trade, f"Could not replace order for {trade}."
|
||||
)
|
||||
@@ -1774,7 +1805,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
if trade.has_open_orders:
|
||||
oo = trade.select_order(side, True)
|
||||
if oo is not None:
|
||||
if (price == oo.price) and (side == oo.side) and (amount == oo.amount):
|
||||
if price == oo.price and side == oo.side and amount == oo.amount:
|
||||
logger.info(
|
||||
f"A similar open order was found for {trade.pair}. "
|
||||
f"Keeping existing {trade.exit_side} order. {price=}, {amount=}"
|
||||
@@ -1870,7 +1901,10 @@ class FreqtradeBot(LoggingMixin):
|
||||
# to the trade object
|
||||
self.update_trade_state(trade, order_id, corder)
|
||||
|
||||
logger.info(f"Partial {trade.entry_side} order timeout for {trade}.")
|
||||
logger.info(
|
||||
f"Partial {trade.entry_side} order timeout for {trade}. Filled: {filled_amount}, "
|
||||
f"total: {order_obj.ft_amount}"
|
||||
)
|
||||
order_obj.ft_cancel_reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}"
|
||||
|
||||
self.wallets.update()
|
||||
@@ -2092,6 +2126,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.handle_insufficient_funds(trade)
|
||||
return False
|
||||
|
||||
self._exit_reason_cache[f"{trade.pair}_{trade.id}_{exit_reason}"] = dt_now()
|
||||
order_obj = Order.parse_from_ccxt_object(order, trade.pair, trade.exit_side, amount, limit)
|
||||
order_obj.ft_order_tag = exit_reason
|
||||
trade.orders.append(order_obj)
|
||||
@@ -2555,4 +2590,15 @@ class FreqtradeBot(LoggingMixin):
|
||||
max_custom_price_allowed = proposed_price + (proposed_price * cust_p_max_dist_r)
|
||||
|
||||
# Bracket between min_custom_price_allowed and max_custom_price_allowed
|
||||
return max(min(valid_custom_price, max_custom_price_allowed), min_custom_price_allowed)
|
||||
final_price = max(
|
||||
min(valid_custom_price, max_custom_price_allowed), min_custom_price_allowed
|
||||
)
|
||||
|
||||
# Log a warning if the custom price was adjusted by clamping.
|
||||
if final_price != valid_custom_price:
|
||||
logger.info(
|
||||
f"Custom price adjusted from {valid_custom_price} to {final_price} based on "
|
||||
"custom_price_max_distance_ratio of {cust_p_max_dist_r}."
|
||||
)
|
||||
|
||||
return final_price
|
||||
|
||||
@@ -75,8 +75,7 @@ def setup_logging(config: Config) -> None:
|
||||
# config['logfilename']), which defaults to '/dev/log', applicable for most
|
||||
# of the systems.
|
||||
address = (s[1], int(s[2])) if len(s) > 2 else s[1] if len(s) > 1 else "/dev/log"
|
||||
handler_sl = get_existing_handlers(SysLogHandler)
|
||||
if handler_sl:
|
||||
if handler_sl := get_existing_handlers(SysLogHandler):
|
||||
logging.root.removeHandler(handler_sl)
|
||||
handler_sl = SysLogHandler(address=address)
|
||||
# No datetime field for logging into syslog, to allow syslog
|
||||
@@ -92,8 +91,7 @@ def setup_logging(config: Config) -> None:
|
||||
"You need the cysystemd python package be installed in "
|
||||
"order to use logging to journald."
|
||||
)
|
||||
handler_jd = get_existing_handlers(JournaldLogHandler)
|
||||
if handler_jd:
|
||||
if handler_jd := get_existing_handlers(JournaldLogHandler):
|
||||
logging.root.removeHandler(handler_jd)
|
||||
handler_jd = JournaldLogHandler()
|
||||
# No datetime field for logging into journald, to allow syslog
|
||||
@@ -102,8 +100,7 @@ def setup_logging(config: Config) -> None:
|
||||
handler_jd.setFormatter(Formatter("%(name)s - %(levelname)s - %(message)s"))
|
||||
logging.root.addHandler(handler_jd)
|
||||
else:
|
||||
handler_rf = get_existing_handlers(RotatingFileHandler)
|
||||
if handler_rf:
|
||||
if handler_rf := get_existing_handlers(RotatingFileHandler):
|
||||
logging.root.removeHandler(handler_rf)
|
||||
try:
|
||||
logfile_path = Path(logfile)
|
||||
|
||||
@@ -396,6 +396,8 @@ class Backtesting:
|
||||
self.canceled_trade_entries = 0
|
||||
self.canceled_entry_orders = 0
|
||||
self.replaced_entry_orders = 0
|
||||
self.canceled_exit_orders = 0
|
||||
self.replaced_exit_orders = 0
|
||||
self.dataprovider.clear_cache()
|
||||
if enable_protections:
|
||||
self._load_protections(self.strategy)
|
||||
@@ -601,7 +603,7 @@ class Backtesting:
|
||||
# This should not be reached...
|
||||
return row[OPEN_IDX]
|
||||
|
||||
def _get_adjust_trade_entry_for_candle(
|
||||
def _check_adjust_trade_for_candle(
|
||||
self, trade: LocalTrade, row: tuple, current_time: datetime
|
||||
) -> LocalTrade:
|
||||
current_rate: float = row[OPEN_IDX]
|
||||
@@ -869,7 +871,7 @@ class Backtesting:
|
||||
|
||||
# Check if we need to adjust our current positions
|
||||
if self.strategy.position_adjustment_enable:
|
||||
trade = self._get_adjust_trade_entry_for_candle(trade, row, current_time)
|
||||
trade = self._check_adjust_trade_for_candle(trade, row, current_time)
|
||||
|
||||
if trade.is_open:
|
||||
enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX]
|
||||
@@ -1234,8 +1236,8 @@ class Backtesting:
|
||||
for order in [o for o in trade.orders if o.ft_is_open]:
|
||||
if order.side == trade.entry_side:
|
||||
self.canceled_entry_orders += 1
|
||||
# elif order.side == trade.exit_side:
|
||||
# self.canceled_exit_orders += 1
|
||||
elif order.side == trade.exit_side:
|
||||
self.canceled_exit_orders += 1
|
||||
# canceled orders are removed from the trade
|
||||
del trade.orders[trade.orders.index(order)]
|
||||
|
||||
@@ -1299,9 +1301,10 @@ class Backtesting:
|
||||
Returns True if the trade should be deleted.
|
||||
"""
|
||||
# only check on new candles for open entry orders
|
||||
if order.side == trade.entry_side and current_time > order.order_date_utc:
|
||||
if current_time > order.order_date_utc:
|
||||
is_entry = order.side == trade.entry_side
|
||||
requested_rate = strategy_safe_wrapper(
|
||||
self.strategy.adjust_entry_price, default_retval=order.ft_price
|
||||
self.strategy.adjust_order_price, default_retval=order.ft_price
|
||||
)(
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
order=order,
|
||||
@@ -1311,6 +1314,7 @@ class Backtesting:
|
||||
current_order_rate=order.ft_price,
|
||||
entry_tag=trade.enter_tag,
|
||||
side=trade.trade_direction,
|
||||
is_entry=is_entry,
|
||||
) # default value is current order price
|
||||
|
||||
# cancel existing order whenever a new rate is requested (or None)
|
||||
@@ -1319,22 +1323,35 @@ class Backtesting:
|
||||
return False
|
||||
else:
|
||||
del trade.orders[trade.orders.index(order)]
|
||||
self.canceled_entry_orders += 1
|
||||
if is_entry:
|
||||
self.canceled_entry_orders += 1
|
||||
else:
|
||||
self.canceled_exit_orders += 1
|
||||
|
||||
# place new order if result was not None
|
||||
if requested_rate:
|
||||
self._enter_trade(
|
||||
pair=trade.pair,
|
||||
row=row,
|
||||
trade=trade,
|
||||
requested_rate=requested_rate,
|
||||
requested_stake=(order.safe_remaining * order.ft_price / trade.leverage),
|
||||
direction="short" if trade.is_short else "long",
|
||||
)
|
||||
if is_entry:
|
||||
self._enter_trade(
|
||||
pair=trade.pair,
|
||||
row=row,
|
||||
trade=trade,
|
||||
requested_rate=requested_rate,
|
||||
requested_stake=(order.safe_remaining * order.ft_price / trade.leverage),
|
||||
direction="short" if trade.is_short else "long",
|
||||
)
|
||||
self.replaced_entry_orders += 1
|
||||
else:
|
||||
self._exit_trade(
|
||||
trade=trade,
|
||||
sell_row=row,
|
||||
close_rate=requested_rate,
|
||||
amount=order.safe_remaining,
|
||||
exit_reason=order.ft_order_tag,
|
||||
)
|
||||
self.replaced_exit_orders += 1
|
||||
# Delete trade if no successful entries happened (if placing the new order failed)
|
||||
if not trade.has_open_orders and trade.nr_of_successful_entries == 0:
|
||||
if not trade.has_open_orders and is_entry and trade.nr_of_successful_entries == 0:
|
||||
return True
|
||||
self.replaced_entry_orders += 1
|
||||
else:
|
||||
# assumption: there can't be multiple open entry orders at any given time
|
||||
return trade.nr_of_successful_entries == 0
|
||||
|
||||
@@ -312,6 +312,7 @@ def text_table_add_metrics(strat_results: dict) -> None:
|
||||
("Sortino", f"{strat_results['sortino']:.2f}" if "sortino" in strat_results else "N/A"),
|
||||
("Sharpe", f"{strat_results['sharpe']:.2f}" if "sharpe" in strat_results else "N/A"),
|
||||
("Calmar", f"{strat_results['calmar']:.2f}" if "calmar" in strat_results else "N/A"),
|
||||
("SQN", f"{strat_results['sqn']:.2f}" if "sqn" in strat_results else "N/A"),
|
||||
(
|
||||
"Profit factor",
|
||||
(
|
||||
|
||||
@@ -16,6 +16,7 @@ from freqtrade.data.metrics import (
|
||||
calculate_max_drawdown,
|
||||
calculate_sharpe,
|
||||
calculate_sortino,
|
||||
calculate_sqn,
|
||||
)
|
||||
from freqtrade.ft_types import BacktestResultType
|
||||
from freqtrade.util import decimals_per_coin, fmt_coin, get_dry_run_wallet
|
||||
@@ -468,6 +469,7 @@ def generate_strategy_stats(
|
||||
"sortino": calculate_sortino(results, min_date, max_date, start_balance),
|
||||
"sharpe": calculate_sharpe(results, min_date, max_date, start_balance),
|
||||
"calmar": calculate_calmar(results, min_date, max_date, start_balance),
|
||||
"sqn": calculate_sqn(results, start_balance),
|
||||
"profit_factor": profit_factor,
|
||||
"backtest_start": min_date.strftime(DATETIME_PRINT_FORMAT),
|
||||
"backtest_start_ts": int(min_date.timestamp() * 1000),
|
||||
|
||||
@@ -124,6 +124,7 @@ def migrate_trades_and_orders_table(
|
||||
funding_fees = get_column_def(cols, "funding_fees", "0.0")
|
||||
funding_fee_running = get_column_def(cols, "funding_fee_running", "null")
|
||||
max_stake_amount = get_column_def(cols, "max_stake_amount", "stake_amount")
|
||||
record_version = get_column_def(cols, "record_version", "1")
|
||||
|
||||
# If ticker-interval existed use that, else null.
|
||||
if has_column(cols, "ticker_interval"):
|
||||
@@ -180,7 +181,7 @@ def migrate_trades_and_orders_table(
|
||||
trading_mode, leverage, liquidation_price, is_short,
|
||||
interest_rate, funding_fees, funding_fee_running, realized_profit,
|
||||
amount_precision, price_precision, precision_mode, precision_mode_price, contract_size,
|
||||
max_stake_amount
|
||||
max_stake_amount, record_version
|
||||
)
|
||||
select id, lower(exchange), pair, {base_currency} base_currency,
|
||||
{stake_currency} stake_currency,
|
||||
@@ -210,7 +211,8 @@ def migrate_trades_and_orders_table(
|
||||
{realized_profit} realized_profit,
|
||||
{amount_precision} amount_precision, {price_precision} price_precision,
|
||||
{precision_mode} precision_mode, {precision_mode_price} precision_mode_price,
|
||||
{contract_size} contract_size, {max_stake_amount} max_stake_amount
|
||||
{contract_size} contract_size, {max_stake_amount} max_stake_amount,
|
||||
{record_version} record_version
|
||||
from {trade_back_name}
|
||||
"""
|
||||
)
|
||||
@@ -329,6 +331,25 @@ def fix_old_dry_orders(engine):
|
||||
connection.execute(stmt)
|
||||
|
||||
|
||||
def fix_wrong_max_stake_amount(engine):
|
||||
"""
|
||||
Fix max_stake_amount for leveraged closed trades
|
||||
This caused record_version to be bumped to 2.
|
||||
"""
|
||||
with engine.begin() as connection:
|
||||
stmt = (
|
||||
update(Trade)
|
||||
.where(
|
||||
Trade.record_version < 2,
|
||||
Trade.leverage > 1,
|
||||
Trade.is_open.is_(False),
|
||||
Trade.max_stake_amount != 0,
|
||||
)
|
||||
.values(max_stake_amount=Trade.max_stake_amount / Trade.leverage, record_version=2)
|
||||
)
|
||||
connection.execute(stmt)
|
||||
|
||||
|
||||
def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
"""
|
||||
Checks if migration is necessary and migrates if necessary
|
||||
@@ -350,7 +371,7 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
# if ('orders' not in previous_tables
|
||||
# or not has_column(cols_orders, 'funding_fee')):
|
||||
migrating = False
|
||||
if not has_column(cols_trades, "precision_mode_price"):
|
||||
if not has_column(cols_trades, "record_version"):
|
||||
# if not has_column(cols_orders, "ft_order_tag"):
|
||||
migrating = True
|
||||
logger.info(
|
||||
@@ -383,6 +404,7 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
|
||||
set_sqlite_to_wal(engine)
|
||||
fix_old_dry_orders(engine)
|
||||
fix_wrong_max_stake_amount(engine)
|
||||
|
||||
if migrating:
|
||||
logger.info("Database migration finished.")
|
||||
|
||||
@@ -464,6 +464,8 @@ class LocalTrade:
|
||||
# Used to keep running funding fees - between the last filled order and now
|
||||
# Shall not be used for calculations!
|
||||
funding_fee_running: float | None = None
|
||||
# v 2 -> correct max_stake_amount calculation for leveraged trades
|
||||
record_version: int = 2
|
||||
|
||||
@property
|
||||
def stoploss_or_liquidation(self) -> float:
|
||||
@@ -1243,7 +1245,7 @@ class LocalTrade:
|
||||
total_stake += self._calc_open_trade_value(tmp_amount, price)
|
||||
max_stake_amount += tmp_amount * price
|
||||
self.funding_fees = funding_fees
|
||||
self.max_stake_amount = float(max_stake_amount)
|
||||
self.max_stake_amount = float(max_stake_amount) / (self.leverage or 1.0)
|
||||
|
||||
if close_profit:
|
||||
self.close_profit = close_profit
|
||||
@@ -1535,45 +1537,47 @@ class LocalTrade:
|
||||
:param json_str: json string to parse
|
||||
:return: Trade instance
|
||||
"""
|
||||
from uuid import uuid4
|
||||
|
||||
import rapidjson
|
||||
|
||||
data = rapidjson.loads(json_str)
|
||||
trade = cls(
|
||||
__FROM_JSON=True,
|
||||
id=data["trade_id"],
|
||||
id=data.get("trade_id"),
|
||||
pair=data["pair"],
|
||||
base_currency=data["base_currency"],
|
||||
stake_currency=data["quote_currency"],
|
||||
base_currency=data.get("base_currency"),
|
||||
stake_currency=data.get("quote_currency"),
|
||||
is_open=data["is_open"],
|
||||
exchange=data["exchange"],
|
||||
exchange=data.get("exchange", "import"),
|
||||
amount=data["amount"],
|
||||
amount_requested=data["amount_requested"],
|
||||
amount_requested=data.get("amount_requested", data["amount"]),
|
||||
stake_amount=data["stake_amount"],
|
||||
strategy=data["strategy"],
|
||||
strategy=data.get("strategy"),
|
||||
enter_tag=data["enter_tag"],
|
||||
timeframe=data["timeframe"],
|
||||
timeframe=data.get("timeframe"),
|
||||
fee_open=data["fee_open"],
|
||||
fee_open_cost=data["fee_open_cost"],
|
||||
fee_open_currency=data["fee_open_currency"],
|
||||
fee_open_cost=data.get("fee_open_cost"),
|
||||
fee_open_currency=data.get("fee_open_currency"),
|
||||
fee_close=data["fee_close"],
|
||||
fee_close_cost=data["fee_close_cost"],
|
||||
fee_close_currency=data["fee_close_currency"],
|
||||
fee_close_cost=data.get("fee_close_cost"),
|
||||
fee_close_currency=data.get("fee_close_currency"),
|
||||
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
|
||||
open_rate=data["open_rate"],
|
||||
open_rate_requested=data["open_rate_requested"],
|
||||
open_trade_value=data["open_trade_value"],
|
||||
open_rate_requested=data.get("open_rate_requested", data["open_rate"]),
|
||||
open_trade_value=data.get("open_trade_value"),
|
||||
close_date=(
|
||||
datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
|
||||
if data["close_timestamp"]
|
||||
else None
|
||||
),
|
||||
realized_profit=data["realized_profit"],
|
||||
realized_profit=data.get("realized_profit", 0),
|
||||
close_rate=data["close_rate"],
|
||||
close_rate_requested=data["close_rate_requested"],
|
||||
close_profit=data["close_profit"],
|
||||
close_profit_abs=data["close_profit_abs"],
|
||||
close_rate_requested=data.get("close_rate_requested", data["close_rate"]),
|
||||
close_profit=data.get("close_profit", data.get("profit_ratio")),
|
||||
close_profit_abs=data.get("close_profit_abs", data.get("profit_abs")),
|
||||
exit_reason=data["exit_reason"],
|
||||
exit_order_status=data["exit_order_status"],
|
||||
exit_order_status=data.get("exit_order_status"),
|
||||
stop_loss=data["stop_loss_abs"],
|
||||
stop_loss_pct=data["stop_loss_ratio"],
|
||||
initial_stop_loss=data["initial_stop_loss_abs"],
|
||||
@@ -1581,11 +1585,11 @@ class LocalTrade:
|
||||
min_rate=data["min_rate"],
|
||||
max_rate=data["max_rate"],
|
||||
leverage=data["leverage"],
|
||||
interest_rate=data["interest_rate"],
|
||||
liquidation_price=data["liquidation_price"],
|
||||
interest_rate=data.get("interest_rate"),
|
||||
liquidation_price=data.get("liquidation_price"),
|
||||
is_short=data["is_short"],
|
||||
trading_mode=data["trading_mode"],
|
||||
funding_fees=data["funding_fees"],
|
||||
trading_mode=data.get("trading_mode"),
|
||||
funding_fees=data.get("funding_fees"),
|
||||
amount_precision=data.get("amount_precision", None),
|
||||
price_precision=data.get("price_precision", None),
|
||||
precision_mode=data.get("precision_mode", None),
|
||||
@@ -1597,23 +1601,25 @@ class LocalTrade:
|
||||
amount=order["amount"],
|
||||
ft_amount=order["amount"],
|
||||
ft_order_side=order["ft_order_side"],
|
||||
ft_pair=order["pair"],
|
||||
ft_is_open=order["is_open"],
|
||||
order_id=order["order_id"],
|
||||
status=order["status"],
|
||||
average=order["average"],
|
||||
ft_pair=order.get("pair", data["pair"]),
|
||||
ft_is_open=order.get("is_open", False),
|
||||
order_id=order.get("order_id", uuid4().hex),
|
||||
status=order.get("status"),
|
||||
average=order.get("average", order.get("safe_price")),
|
||||
cost=order["cost"],
|
||||
filled=order["filled"],
|
||||
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT),
|
||||
filled=order.get("filled", order["amount"]),
|
||||
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT)
|
||||
if order.get("order_date")
|
||||
else None,
|
||||
order_filled_date=(
|
||||
datetime.fromtimestamp(order["order_filled_timestamp"] // 1000, tz=timezone.utc)
|
||||
if order["order_filled_timestamp"]
|
||||
else None
|
||||
),
|
||||
order_type=order["order_type"],
|
||||
price=order["price"],
|
||||
ft_price=order["price"],
|
||||
remaining=order["remaining"],
|
||||
order_type=order.get("order_type"),
|
||||
price=order.get("price", order.get("safe_price")),
|
||||
ft_price=order.get("price", order.get("safe_price")),
|
||||
remaining=order.get("remaining", 0.0),
|
||||
funding_fee=order.get("funding_fee", None),
|
||||
ft_order_tag=order.get("ft_order_tag", None),
|
||||
)
|
||||
@@ -1748,6 +1754,8 @@ class Trade(ModelBase, LocalTrade):
|
||||
Float(), nullable=True, default=None
|
||||
)
|
||||
|
||||
record_version: Mapped[int] = mapped_column(Integer, nullable=False, default=2) # type: ignore
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
from_json = kwargs.pop("__FROM_JSON", None)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@@ -242,6 +242,14 @@ class StrategyResolver(IResolver):
|
||||
if has_after_fill:
|
||||
strategy._ft_stop_uses_after_fill = True
|
||||
|
||||
if check_override(strategy, IStrategy, "adjust_order_price") and (
|
||||
check_override(strategy, IStrategy, "adjust_entry_price")
|
||||
or check_override(strategy, IStrategy, "adjust_exit_price")
|
||||
):
|
||||
raise OperationalException(
|
||||
"If you implement `adjust_order_price`, `adjust_entry_price` and "
|
||||
"`adjust_exit_price` will not be used. Please pick one approach for your strategy."
|
||||
)
|
||||
return strategy
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -110,13 +110,17 @@ def handleExchangePayload(payload: ExchangeModePayloadMixin, config_loc: Config)
|
||||
Handle exchange and trading mode payload.
|
||||
Updates the configuration with the payload values.
|
||||
"""
|
||||
from freqtrade.configuration.directory_operations import create_datadir
|
||||
|
||||
if payload.exchange:
|
||||
config_loc["exchange"]["name"] = payload.exchange
|
||||
config_loc.update({"datadir": create_datadir(config_loc, None)})
|
||||
if payload.trading_mode:
|
||||
config_loc["trading_mode"] = payload.trading_mode
|
||||
config_loc["candle_type_def"] = CandleType.get_default(
|
||||
config_loc.get("trading_mode", "spot") or "spot"
|
||||
)
|
||||
|
||||
if payload.margin_mode:
|
||||
config_loc["margin_mode"] = payload.margin_mode
|
||||
|
||||
|
||||
@@ -6,13 +6,12 @@ import logging
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Generator, Sequence
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from math import isnan
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import psutil
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from dateutil.tz import tzlocal
|
||||
from numpy import inf, int64, mean, nan
|
||||
from numpy import inf, int64, isnan, mean, nan
|
||||
from pandas import DataFrame, NaT
|
||||
from sqlalchemy import func, select
|
||||
|
||||
|
||||
@@ -690,6 +690,104 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
"""
|
||||
return current_order_rate
|
||||
|
||||
def adjust_exit_price(
|
||||
self,
|
||||
trade: Trade,
|
||||
order: Order | None,
|
||||
pair: str,
|
||||
current_time: datetime,
|
||||
proposed_rate: float,
|
||||
current_order_rate: float,
|
||||
entry_tag: str | None,
|
||||
side: str,
|
||||
**kwargs,
|
||||
) -> float:
|
||||
"""
|
||||
Exit price re-adjustment logic, returning the user desired limit price.
|
||||
This only executes when a order was already placed, still open (unfilled fully or partially)
|
||||
and not timed out on subsequent candles after entry trigger.
|
||||
|
||||
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-callbacks/
|
||||
|
||||
When not implemented by a strategy, returns current_order_rate as default.
|
||||
If current_order_rate is returned then the existing order is maintained.
|
||||
If None is returned then order gets canceled but not replaced by a new one.
|
||||
|
||||
:param pair: Pair that's currently analyzed
|
||||
:param trade: Trade object.
|
||||
:param order: Order object
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param proposed_rate: Rate, calculated based on pricing settings in entry_pricing.
|
||||
:param current_order_rate: Rate of the existing order in place.
|
||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||
:param side: 'long' or 'short' - indicating the direction of the proposed trade
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return float: New entry price value if provided
|
||||
|
||||
"""
|
||||
return current_order_rate
|
||||
|
||||
def adjust_order_price(
|
||||
self,
|
||||
trade: Trade,
|
||||
order: Order | None,
|
||||
pair: str,
|
||||
current_time: datetime,
|
||||
proposed_rate: float,
|
||||
current_order_rate: float,
|
||||
entry_tag: str | None,
|
||||
side: str,
|
||||
is_entry: bool,
|
||||
**kwargs,
|
||||
) -> float:
|
||||
"""
|
||||
Exit and entry order price re-adjustment logic, returning the user desired limit price.
|
||||
This only executes when a order was already placed, still open (unfilled fully or partially)
|
||||
and not timed out on subsequent candles after entry trigger.
|
||||
|
||||
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-callbacks/
|
||||
|
||||
When not implemented by a strategy, returns current_order_rate as default.
|
||||
If current_order_rate is returned then the existing order is maintained.
|
||||
If None is returned then order gets canceled but not replaced by a new one.
|
||||
|
||||
:param pair: Pair that's currently analyzed
|
||||
:param trade: Trade object.
|
||||
:param order: Order object
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param proposed_rate: Rate, calculated based on pricing settings in entry_pricing.
|
||||
:param current_order_rate: Rate of the existing order in place.
|
||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||
:param side: 'long' or 'short' - indicating the direction of the proposed trade
|
||||
:param is_entry: True if the order is an entry order, False if it's an exit order.
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return float: New entry price value if provided
|
||||
"""
|
||||
if is_entry:
|
||||
return self.adjust_entry_price(
|
||||
trade=trade,
|
||||
order=order,
|
||||
pair=pair,
|
||||
current_time=current_time,
|
||||
proposed_rate=proposed_rate,
|
||||
current_order_rate=current_order_rate,
|
||||
entry_tag=entry_tag,
|
||||
side=side,
|
||||
**kwargs,
|
||||
)
|
||||
else:
|
||||
return self.adjust_exit_price(
|
||||
trade=trade,
|
||||
order=order,
|
||||
pair=pair,
|
||||
current_time=current_time,
|
||||
proposed_rate=proposed_rate,
|
||||
current_order_rate=current_order_rate,
|
||||
entry_tag=entry_tag,
|
||||
side=side,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def leverage(
|
||||
self,
|
||||
pair: str,
|
||||
|
||||
@@ -40,7 +40,7 @@ def custom_entry_price(
|
||||
"""
|
||||
return proposed_rate
|
||||
|
||||
def adjust_entry_price(
|
||||
def adjust_order_price(
|
||||
self,
|
||||
trade: Trade,
|
||||
order: Order | None,
|
||||
@@ -50,10 +50,11 @@ def adjust_entry_price(
|
||||
current_order_rate: float,
|
||||
entry_tag: str | None,
|
||||
side: str,
|
||||
is_entry: bool,
|
||||
**kwargs,
|
||||
) -> float:
|
||||
"""
|
||||
Entry price re-adjustment logic, returning the user desired limit price.
|
||||
Exit and entry order price re-adjustment logic, returning the user desired limit price.
|
||||
This only executes when a order was already placed, still open (unfilled fully or partially)
|
||||
and not timed out on subsequent candles after entry trigger.
|
||||
|
||||
@@ -71,6 +72,7 @@ def adjust_entry_price(
|
||||
:param current_order_rate: Rate of the existing order in place.
|
||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||
:param side: 'long' or 'short' - indicating the direction of the proposed trade
|
||||
:param is_entry: True if the order is an entry order, False if it's an exit order.
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return float: New entry price value if provided
|
||||
|
||||
|
||||
@@ -197,7 +197,7 @@ class Wallets:
|
||||
# Position is not open ...
|
||||
continue
|
||||
size = self._exchange._contracts_to_amount(symbol, position["contracts"])
|
||||
collateral = safe_value_fallback(position, "collateral", "initialMargin", 0.0)
|
||||
collateral = safe_value_fallback(position, "initialMargin", "collateral", 0.0)
|
||||
leverage = position.get("leverage")
|
||||
_parsed_positions[symbol] = PositionWallet(
|
||||
symbol,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from freqtrade_client.ft_rest_client import FtRestClient
|
||||
|
||||
|
||||
__version__ = "2025.2-dev"
|
||||
__version__ = "2025.3-dev"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
@@ -287,8 +287,6 @@ max-complexity = 12
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"freqtrade/freqai/**/*.py" = [
|
||||
"S311", # Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||
"B006", # Bugbear - mutable default argument
|
||||
"B008", # bugbear - Do not perform function calls in argument defaults
|
||||
]
|
||||
"tests/**/*.py" = [
|
||||
"S101", # allow assert in tests
|
||||
|
||||
@@ -7,17 +7,17 @@
|
||||
-r docs/requirements-docs.txt
|
||||
|
||||
coveralls==4.0.1
|
||||
ruff==0.9.5
|
||||
ruff==0.9.9
|
||||
mypy==1.15.0
|
||||
pre-commit==4.1.0
|
||||
pytest==8.3.4
|
||||
pytest==8.3.5
|
||||
pytest-asyncio==0.25.3
|
||||
pytest-cov==6.0.0
|
||||
pytest-mock==3.14.0
|
||||
pytest-random-order==1.1.1
|
||||
pytest-timeout==2.3.1
|
||||
pytest-xdist==3.6.1
|
||||
isort==6.0.0
|
||||
isort==6.0.1
|
||||
# For datetime mocking
|
||||
time-machine==2.16.0
|
||||
|
||||
@@ -27,6 +27,6 @@ nbconvert==7.16.6
|
||||
# mypy types
|
||||
types-cachetools==5.5.0.20240820
|
||||
types-filelock==3.2.7
|
||||
types-requests==2.32.0.20241016
|
||||
types-requests==2.32.0.20250301
|
||||
types-tabulate==0.9.0.20241207
|
||||
types-python-dateutil==2.9.0.20241206
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
scikit-learn==1.6.1
|
||||
joblib==1.4.2
|
||||
catboost==1.2.7; 'arm' not in platform_machine
|
||||
lightgbm==4.5.0
|
||||
lightgbm==4.6.0
|
||||
xgboost==2.1.4
|
||||
tensorboard==2.18.0
|
||||
tensorboard==2.19.0
|
||||
datasieve==0.1.7
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
-r requirements.txt
|
||||
|
||||
# Required for hyperopt
|
||||
scipy==1.15.1
|
||||
scipy==1.15.2
|
||||
scikit-learn==1.6.1
|
||||
ft-scikit-optimize==0.9.2
|
||||
filelock==3.17.0
|
||||
|
||||
@@ -4,16 +4,15 @@ bottleneck==1.4.2
|
||||
numexpr==2.10.2
|
||||
pandas-ta==0.3.14b
|
||||
|
||||
ccxt==4.4.58
|
||||
cryptography==42.0.8; platform_machine == 'armv7l'
|
||||
cryptography==44.0.1; platform_machine != 'armv7l'
|
||||
ccxt==4.4.64
|
||||
cryptography==44.0.2
|
||||
aiohttp==3.9.5
|
||||
SQLAlchemy==2.0.38
|
||||
python-telegram-bot==21.10
|
||||
python-telegram-bot==21.11.1
|
||||
# can't be hard-pinned due to telegram-bot pinning httpx with ~
|
||||
httpx>=0.24.1
|
||||
humanize==4.11.0
|
||||
cachetools==5.5.1
|
||||
humanize==4.12.1
|
||||
cachetools==5.5.2
|
||||
requests==2.32.3
|
||||
urllib3==2.3.0
|
||||
jsonschema==4.23.0
|
||||
@@ -21,10 +20,10 @@ TA-Lib==0.4.38
|
||||
technical==1.5.0
|
||||
tabulate==0.9.0
|
||||
pycoingecko==3.2.0
|
||||
jinja2==3.1.5
|
||||
jinja2==3.1.6
|
||||
joblib==1.4.2
|
||||
rich==13.9.4
|
||||
pyarrow==19.0.0; platform_machine != 'armv7l'
|
||||
pyarrow==19.0.1; platform_machine != 'armv7l'
|
||||
|
||||
# find first, C search in arrays
|
||||
py_find_1st==1.1.7
|
||||
@@ -38,12 +37,12 @@ orjson==3.10.15
|
||||
sdnotify==0.3.2
|
||||
|
||||
# API Server
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
pydantic==2.10.6
|
||||
uvicorn==0.34.0
|
||||
pyjwt==2.10.1
|
||||
aiofiles==24.1.0
|
||||
psutil==6.1.1
|
||||
psutil==7.0.0
|
||||
|
||||
# Building config files interactively
|
||||
questionary==2.1.0
|
||||
@@ -56,7 +55,7 @@ pytz==2025.1
|
||||
schedule==1.2.2
|
||||
|
||||
#WS Messages
|
||||
websockets==14.2
|
||||
websockets==15.0
|
||||
janus==2.0.0
|
||||
|
||||
ast-comments==1.2.2
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from tests.conftest import is_arm, is_mac
|
||||
|
||||
MAXIMUM_STARTUP_TIME = 0.5
|
||||
|
||||
MAXIMUM_STARTUP_TIME = 0.7 if is_mac() and not is_arm() else 0.5
|
||||
|
||||
|
||||
def test_startup_time():
|
||||
@@ -14,4 +16,5 @@ def test_startup_time():
|
||||
elapsed = time.time() - start
|
||||
assert elapsed < MAXIMUM_STARTUP_TIME, (
|
||||
"The startup time is too long, try to use lazy import in the command entry function"
|
||||
f" (maximum {MAXIMUM_STARTUP_TIME}s, got {elapsed}s)"
|
||||
)
|
||||
|
||||
@@ -30,6 +30,7 @@ from freqtrade.data.metrics import (
|
||||
calculate_max_drawdown,
|
||||
calculate_sharpe,
|
||||
calculate_sortino,
|
||||
calculate_sqn,
|
||||
calculate_underwater,
|
||||
combine_dataframes_with_mean,
|
||||
combined_dataframes_with_rel_mean,
|
||||
@@ -457,6 +458,42 @@ def test_calculate_calmar(testdatadir):
|
||||
assert pytest.approx(calmar) == 559.040508
|
||||
|
||||
|
||||
def test_calculate_sqn(testdatadir):
|
||||
filename = testdatadir / "backtest_results/backtest-result.json"
|
||||
bt_data = load_backtest_data(filename)
|
||||
|
||||
sqn = calculate_sqn(DataFrame(), 0)
|
||||
assert sqn == 0.0
|
||||
|
||||
sqn = calculate_sqn(
|
||||
bt_data,
|
||||
0.01,
|
||||
)
|
||||
assert isinstance(sqn, float)
|
||||
assert pytest.approx(sqn) == 3.2991
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"profits,starting_balance,expected_sqn,description",
|
||||
[
|
||||
([1.0, -0.5, 2.0, -1.0, 0.5, 1.5, -0.5, 1.0], 100, 1.3229, "Mixed profits/losses"),
|
||||
([], 100, 0.0, "Empty dataframe"),
|
||||
([1.0, 0.5, 2.0, 1.5, 0.8], 100, 4.3657, "All winning trades"),
|
||||
([-1.0, -0.5, -2.0, -1.5, -0.8], 100, -4.3657, "All losing trades"),
|
||||
([1.0], 100, -100, "Single trade"),
|
||||
],
|
||||
)
|
||||
def test_calculate_sqn_cases(profits, starting_balance, expected_sqn, description):
|
||||
"""
|
||||
Test SQN calculation with various scenarios:
|
||||
"""
|
||||
trades = DataFrame({"profit_abs": profits})
|
||||
sqn = calculate_sqn(trades, starting_balance=starting_balance)
|
||||
|
||||
assert isinstance(sqn, float)
|
||||
assert pytest.approx(sqn, rel=1e-4) == expected_sqn
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start,end,days, expected",
|
||||
[
|
||||
|
||||
@@ -6,6 +6,7 @@ import ccxt
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from freqtrade.data.converter.trade_converter import trades_dict_to_list
|
||||
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
||||
from freqtrade.exceptions import DependencyException, InvalidOrderException, OperationalException
|
||||
from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_seconds
|
||||
@@ -1002,6 +1003,7 @@ def test_get_maintenance_ratio_and_amt_binance(
|
||||
|
||||
|
||||
async def test__async_get_trade_history_id_binance(default_conf_usdt, mocker, fetch_trades_result):
|
||||
default_conf_usdt["exchange"]["only_from_ccxt"] = True
|
||||
exchange = get_patched_exchange(mocker, default_conf_usdt, exchange="binance")
|
||||
|
||||
async def mock_get_trade_hist(pair, *args, **kwargs):
|
||||
@@ -1056,3 +1058,53 @@ async def test__async_get_trade_history_id_binance(default_conf_usdt, mocker, fe
|
||||
|
||||
# Clean up event loop to avoid warnings
|
||||
exchange.close()
|
||||
|
||||
|
||||
async def test__async_get_trade_history_id_binance_fast(
|
||||
default_conf_usdt, mocker, fetch_trades_result
|
||||
):
|
||||
default_conf_usdt["exchange"]["only_from_ccxt"] = False
|
||||
exchange = get_patched_exchange(mocker, default_conf_usdt, exchange="binance")
|
||||
|
||||
async def mock_get_trade_hist(pair, *args, **kwargs):
|
||||
if "since" in kwargs:
|
||||
pass
|
||||
# older than initial call
|
||||
# if kwargs["since"] < 1565798399752:
|
||||
# return []
|
||||
# else:
|
||||
# # Don't expect to get here
|
||||
# raise ValueError("Unexpected call")
|
||||
# # return fetch_trades_result[:-2]
|
||||
elif kwargs.get("params", {}).get(exchange._trades_pagination_arg) == "0":
|
||||
# Return first 3
|
||||
return fetch_trades_result[:-2]
|
||||
# elif kwargs.get("params", {}).get(exchange._trades_pagination_arg) in (
|
||||
# fetch_trades_result[-3]["id"],
|
||||
# 1565798399752,
|
||||
# ):
|
||||
# # Return 2
|
||||
# return fetch_trades_result[-3:-1]
|
||||
# else:
|
||||
# # Return last 2
|
||||
# return fetch_trades_result[-2:]
|
||||
|
||||
pair = "ETH/BTC"
|
||||
mocker.patch(
|
||||
"freqtrade.exchange.binance.download_archive_trades",
|
||||
return_value=(pair, trades_dict_to_list(fetch_trades_result[-2:])),
|
||||
)
|
||||
|
||||
exchange._api_async.fetch_trades = MagicMock(side_effect=mock_get_trade_hist)
|
||||
|
||||
ret = await exchange._async_get_trade_history(
|
||||
pair,
|
||||
since=fetch_trades_result[0]["timestamp"],
|
||||
until=fetch_trades_result[-1]["timestamp"] - 1,
|
||||
)
|
||||
|
||||
assert ret[0] == pair
|
||||
assert isinstance(ret[1], list)
|
||||
|
||||
# Clean up event loop to avoid warnings
|
||||
exchange.close()
|
||||
|
||||
@@ -14,11 +14,15 @@ from freqtrade.enums import CandleType
|
||||
from freqtrade.exchange.binance_public_data import (
|
||||
BadHttpStatus,
|
||||
Http404,
|
||||
binance_vision_trades_zip_url,
|
||||
binance_vision_zip_name,
|
||||
download_archive_ohlcv,
|
||||
download_archive_trades,
|
||||
get_daily_ohlcv,
|
||||
get_daily_trades,
|
||||
)
|
||||
from freqtrade.util.datetime_helpers import dt_ts, dt_utc
|
||||
from ft_client.test_client.test_rest_client import log_has_re
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@@ -337,3 +341,156 @@ async def test_get_daily_ohlcv(mocker, testdatadir):
|
||||
with pytest.raises(zipfile.BadZipFile):
|
||||
df = await get_daily_ohlcv(symbol, timeframe, CandleType.SPOT, date, session)
|
||||
assert get.call_count == 4 # 1 + 3 default retries
|
||||
|
||||
|
||||
async def test_download_archive_trades(mocker, caplog):
|
||||
pair = "BTC/USDT"
|
||||
|
||||
since_ms = dt_ts(dt_utc(2020, 1, 1))
|
||||
until_ms = dt_ts(dt_utc(2020, 1, 2))
|
||||
markets = {"BTC/USDT": {"id": "BTCUSDT"}, "BTC/USDT:USDT": {"id": "BTCUSDT"}}
|
||||
|
||||
mocker.patch("freqtrade.exchange.binance_public_data.get_daily_trades", return_value=[[2, 3]])
|
||||
|
||||
pair1, res = await download_archive_trades(
|
||||
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
|
||||
)
|
||||
assert pair1 == pair
|
||||
assert res == [[2, 3], [2, 3]]
|
||||
|
||||
mocker.patch(
|
||||
"freqtrade.exchange.binance_public_data.get_daily_trades",
|
||||
side_effect=Http404("xxx", dt_utc(2020, 1, 1), "http://example.com/something"),
|
||||
)
|
||||
|
||||
pair1, res = await download_archive_trades(
|
||||
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
|
||||
)
|
||||
|
||||
assert pair1 == pair
|
||||
assert res == []
|
||||
# exit on day 1
|
||||
assert log_has_re("Fast download is unavailable", caplog)
|
||||
|
||||
# Test fail on day 2
|
||||
caplog.clear()
|
||||
mocker.patch(
|
||||
"freqtrade.exchange.binance_public_data.get_daily_trades",
|
||||
side_effect=[
|
||||
[[2, 3]],
|
||||
[[2, 3]],
|
||||
Http404("xxx", dt_utc(2020, 1, 2), "http://example.com/something"),
|
||||
[[2, 3]],
|
||||
],
|
||||
)
|
||||
# Download 3 days
|
||||
until_ms = dt_ts(dt_utc(2020, 1, 3))
|
||||
|
||||
pair1, res = await download_archive_trades(
|
||||
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
|
||||
)
|
||||
|
||||
assert pair1 == pair
|
||||
assert res == [[2, 3], [2, 3]]
|
||||
assert log_has_re(r"Binance fast download .*stopped", caplog)
|
||||
|
||||
|
||||
async def test_download_archive_trades_exception(mocker, caplog):
|
||||
pair = "BTC/USDT"
|
||||
|
||||
since_ms = dt_ts(dt_utc(2020, 1, 1))
|
||||
until_ms = dt_ts(dt_utc(2020, 1, 2))
|
||||
|
||||
markets = {"BTC/USDT": {"id": "BTCUSDT"}, "BTC/USDT:USDT": {"id": "BTCUSDT"}}
|
||||
mocker.patch(
|
||||
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get", side_effect=RuntimeError
|
||||
)
|
||||
|
||||
pair1, res = await download_archive_trades(
|
||||
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
|
||||
)
|
||||
|
||||
assert pair1 == pair
|
||||
assert res == []
|
||||
mocker.patch(
|
||||
"freqtrade.exchange.binance_public_data._download_archive_trades", side_effect=RuntimeError
|
||||
)
|
||||
|
||||
await download_archive_trades(
|
||||
CandleType.SPOT, pair, since_ms=since_ms, until_ms=until_ms, markets=markets
|
||||
)
|
||||
assert pair1 == pair
|
||||
assert res == []
|
||||
assert log_has_re("An exception occurred during fast trades download", caplog)
|
||||
|
||||
|
||||
async def test_binance_vision_trades_zip_url():
|
||||
url = binance_vision_trades_zip_url("BTCUSDT", CandleType.SPOT, dt_utc(2023, 10, 27))
|
||||
assert (
|
||||
url == "https://data.binance.vision/data/spot/daily/aggTrades/"
|
||||
"BTCUSDT/BTCUSDT-aggTrades-2023-10-27.zip"
|
||||
)
|
||||
|
||||
url = binance_vision_trades_zip_url("BTCUSDT", CandleType.FUTURES, dt_utc(2023, 10, 28))
|
||||
assert (
|
||||
url == "https://data.binance.vision/data/futures/um/daily/aggTrades/"
|
||||
"BTCUSDT/BTCUSDT-aggTrades-2023-10-28.zip"
|
||||
)
|
||||
|
||||
|
||||
async def test_get_daily_trades(mocker, testdatadir):
|
||||
symbol = "PEPEUSDT"
|
||||
symbol_futures = "APEUSDT"
|
||||
date = dt_utc(2024, 10, 28).date()
|
||||
first_date = 1729987202368
|
||||
last_date = 1730073596350
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
spot_path = (
|
||||
testdatadir / "binance/binance_public_data/spot-PEPEUSDT-aggTrades-2024-10-27.zip"
|
||||
)
|
||||
get = mocker.patch(
|
||||
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
|
||||
return_value=MockResponse(spot_path.read_bytes(), 200),
|
||||
)
|
||||
res = await get_daily_trades(symbol, CandleType.SPOT, date, session)
|
||||
assert get.call_count == 1
|
||||
assert res[0][0] == first_date
|
||||
assert res[-1][0] == last_date
|
||||
|
||||
futures_path = (
|
||||
testdatadir / "binance/binance_public_data/futures-APEUSDT-aggTrades-2024-10-18.zip"
|
||||
)
|
||||
get = mocker.patch(
|
||||
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
|
||||
return_value=MockResponse(futures_path.read_bytes(), 200),
|
||||
)
|
||||
res_fut = await get_daily_trades(symbol_futures, CandleType.FUTURES, date, session)
|
||||
assert get.call_count == 1
|
||||
assert res_fut[0][0] == 1729209603958
|
||||
assert res_fut[-1][0] == 1729295981272
|
||||
|
||||
get = mocker.patch(
|
||||
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
|
||||
return_value=MockResponse(b"", 404),
|
||||
)
|
||||
with pytest.raises(Http404):
|
||||
await get_daily_trades(symbol, CandleType.SPOT, date, session, retry_delay=0)
|
||||
assert get.call_count == 1
|
||||
|
||||
get = mocker.patch(
|
||||
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
|
||||
return_value=MockResponse(b"", 500),
|
||||
)
|
||||
mocker.patch("asyncio.sleep")
|
||||
with pytest.raises(BadHttpStatus):
|
||||
await get_daily_trades(symbol, CandleType.SPOT, date, session)
|
||||
assert get.call_count == 4 # 1 + 3 default retries
|
||||
|
||||
get = mocker.patch(
|
||||
"freqtrade.exchange.binance_public_data.aiohttp.ClientSession.get",
|
||||
return_value=MockResponse(b"nop", 200),
|
||||
)
|
||||
with pytest.raises(zipfile.BadZipFile):
|
||||
await get_daily_trades(symbol, CandleType.SPOT, date, session)
|
||||
assert get.call_count == 4 # 1 + 3 default retries
|
||||
|
||||
@@ -2177,13 +2177,11 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_
|
||||
|
||||
caplog.clear()
|
||||
|
||||
async def mock_get_candle_hist_error(pair, *args, **kwargs):
|
||||
raise TimeoutError()
|
||||
|
||||
exchange._async_get_candle_history = MagicMock(side_effect=mock_get_candle_hist_error)
|
||||
ret = exchange.get_historic_ohlcv(
|
||||
pair, "5m", dt_ts(dt_now() - timedelta(seconds=since)), candle_type=candle_type
|
||||
)
|
||||
exchange._async_get_candle_history = get_mock_coro(side_effect=TimeoutError())
|
||||
with pytest.raises(TimeoutError):
|
||||
exchange.get_historic_ohlcv(
|
||||
pair, "5m", dt_ts(dt_now() - timedelta(seconds=since)), candle_type=candle_type
|
||||
)
|
||||
assert log_has_re(r"Async code raised an exception: .*", caplog)
|
||||
|
||||
|
||||
@@ -2373,6 +2371,8 @@ def test_refresh_latest_trades(
|
||||
caplog.set_level(logging.DEBUG)
|
||||
use_trades_conf = default_conf
|
||||
use_trades_conf["exchange"]["use_public_trades"] = True
|
||||
use_trades_conf["exchange"]["only_from_ccxt"] = True
|
||||
|
||||
use_trades_conf["datadir"] = tmp_path
|
||||
use_trades_conf["orderflow"] = {"max_candles": 1500}
|
||||
exchange = get_patched_exchange(mocker, use_trades_conf)
|
||||
@@ -3365,6 +3365,7 @@ async def test__async_fetch_trades_contract_size(
|
||||
async def test__async_get_trade_history_id(
|
||||
default_conf, mocker, exchange_name, fetch_trades_result
|
||||
):
|
||||
default_conf["exchange"]["only_from_ccxt"] = True
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange=exchange_name)
|
||||
if exchange._trades_pagination != "id":
|
||||
exchange.close()
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# pragma pylint: disable=missing-docstring, protected-access, invalid-name
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import isnan, nan
|
||||
|
||||
import pytest
|
||||
from ccxt import (
|
||||
@@ -321,6 +322,7 @@ def test_amount_to_precision(
|
||||
(2.9977, TICK_SIZE, 0.005, 3.0, ROUND),
|
||||
(234.24, TICK_SIZE, 0.5, 234.0, ROUND),
|
||||
(234.26, TICK_SIZE, 0.5, 234.5, ROUND),
|
||||
(nan, TICK_SIZE, 3, nan, ROUND),
|
||||
# Tests for TRUNCATTE
|
||||
(2.34559, DECIMAL_PLACES, 4, 2.3455, TRUNCATE),
|
||||
(2.34559, DECIMAL_PLACES, 5, 2.34559, TRUNCATE),
|
||||
@@ -359,10 +361,11 @@ def test_amount_to_precision(
|
||||
],
|
||||
)
|
||||
def test_price_to_precision(price, precision_mode, precision, expected, rounding_mode):
|
||||
assert (
|
||||
price_to_precision(price, precision, precision_mode, rounding_mode=rounding_mode)
|
||||
== expected
|
||||
)
|
||||
result = price_to_precision(price, precision, precision_mode, rounding_mode=rounding_mode)
|
||||
if not isnan(expected):
|
||||
assert result == expected
|
||||
else:
|
||||
assert isnan(result)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -150,7 +150,9 @@ def test_get_pair_data_for_features_with_prealoaded_data(mocker, freqai_conf):
|
||||
freqai.dd.load_all_pair_histories(timerange, freqai.dk)
|
||||
|
||||
_, base_df = freqai.dd.get_base_and_corr_dataframes(timerange, "LTC/BTC", freqai.dk)
|
||||
df = freqai.dk.get_pair_data_for_features("LTC/BTC", "5m", strategy, base_dataframes=base_df)
|
||||
df = freqai.dk.get_pair_data_for_features(
|
||||
"LTC/BTC", "5m", strategy, {}, base_dataframes=base_df
|
||||
)
|
||||
|
||||
assert df is base_df["5m"]
|
||||
assert not df.empty
|
||||
@@ -170,7 +172,9 @@ def test_get_pair_data_for_features_without_preloaded_data(mocker, freqai_conf):
|
||||
freqai.dd.load_all_pair_histories(timerange, freqai.dk)
|
||||
|
||||
base_df = {"5m": pd.DataFrame()}
|
||||
df = freqai.dk.get_pair_data_for_features("LTC/BTC", "5m", strategy, base_dataframes=base_df)
|
||||
df = freqai.dk.get_pair_data_for_features(
|
||||
"LTC/BTC", "5m", strategy, {}, base_dataframes=base_df
|
||||
)
|
||||
|
||||
assert df is not base_df["5m"]
|
||||
assert not df.empty
|
||||
|
||||
@@ -701,9 +701,9 @@ def test_process_trade_creation(
|
||||
assert pytest.approx(trade.amount) == 0
|
||||
assert pytest.approx(trade.amount_requested) == 60 / ticker_usdt.return_value[ticker_side]
|
||||
|
||||
assert log_has(
|
||||
assert log_has_re(
|
||||
f"{'Short' if is_short else 'Long'} signal found: about create a new trade for ETH/USDT "
|
||||
"with stake_amount: 60.0 ...",
|
||||
r"with stake_amount: 60.0 and price: .*",
|
||||
caplog,
|
||||
)
|
||||
mocker.patch("freqtrade.freqtradebot.FreqtradeBot._check_and_execute_exit")
|
||||
@@ -3743,8 +3743,9 @@ def test_trailing_stop_loss_positive(
|
||||
|
||||
@pytest.mark.parametrize("is_short", [False, True])
|
||||
def test_disable_ignore_roi_if_entry_signal(
|
||||
default_conf_usdt, limit_order, limit_order_open, is_short, fee, mocker
|
||||
default_conf_usdt, limit_order, limit_order_open, is_short, fee, mocker, time_machine
|
||||
) -> None:
|
||||
time_machine.move_to("2025-01-10 08:00:16 +00:00")
|
||||
patch_RPCManager(mocker)
|
||||
patch_exchange(mocker)
|
||||
eside = entry_side(is_short)
|
||||
@@ -3773,6 +3774,13 @@ def test_disable_ignore_roi_if_entry_signal(
|
||||
patch_get_signal(freqtrade, enter_long=not is_short, enter_short=is_short, exit_short=is_short)
|
||||
assert freqtrade.handle_trade(trade) is True
|
||||
|
||||
# Test if entry-signal is absent
|
||||
patch_get_signal(freqtrade)
|
||||
# Signal was evaluated already - no action.
|
||||
assert freqtrade.handle_trade(trade) is False
|
||||
|
||||
# Move to after the candle expired
|
||||
time_machine.shift(timedelta(hours=5))
|
||||
# Test if entry-signal is absent
|
||||
patch_get_signal(freqtrade)
|
||||
assert freqtrade.handle_trade(trade) is True
|
||||
|
||||
@@ -436,6 +436,7 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
|
||||
|
||||
# Replace new order with diff. order at a lower price
|
||||
freqtrade.strategy.adjust_entry_price = MagicMock(return_value=1.95)
|
||||
freqtrade.strategy.adjust_exit_price = MagicMock(side_effect=ValueError)
|
||||
freqtrade.strategy.adjust_trade_position = MagicMock(return_value=None)
|
||||
freqtrade.process()
|
||||
trade = Trade.get_trades().first()
|
||||
@@ -445,6 +446,8 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
|
||||
assert pytest.approx(trade.stake_amount) == 60
|
||||
assert trade.orders[-1].price == 1.95
|
||||
assert pytest.approx(trade.orders[-1].cost) == 120 * leverage
|
||||
assert freqtrade.strategy.adjust_entry_price.call_count == 1
|
||||
assert freqtrade.strategy.adjust_exit_price.call_count == 0
|
||||
|
||||
# Fill DCA order
|
||||
freqtrade.strategy.adjust_trade_position = MagicMock(return_value=None)
|
||||
@@ -469,6 +472,7 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
|
||||
mocker.patch(f"{EXMS}._dry_is_price_crossed", return_value=False)
|
||||
freqtrade.strategy.custom_exit = MagicMock(return_value="Exit now")
|
||||
freqtrade.strategy.adjust_entry_price = MagicMock(return_value=2.02)
|
||||
freqtrade.strategy.adjust_exit_price = MagicMock(side_effect=ValueError)
|
||||
freqtrade.process()
|
||||
trade = Trade.get_trades().first()
|
||||
assert len(trade.orders) == 5
|
||||
@@ -478,8 +482,9 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
|
||||
assert pytest.approx(trade.amount) == 91.689215 * leverage
|
||||
assert pytest.approx(trade.orders[-1].amount) == 91.689215 * leverage
|
||||
assert freqtrade.strategy.adjust_entry_price.call_count == 0
|
||||
assert freqtrade.strategy.adjust_exit_price.call_count == 0
|
||||
|
||||
# Process again, should not adjust entry price
|
||||
# Process again, should not adjust price
|
||||
freqtrade.process()
|
||||
trade = Trade.get_trades().first()
|
||||
|
||||
@@ -490,6 +495,21 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
|
||||
assert trade.orders[-1].price == 2.02
|
||||
# Adjust entry price cannot be called - this is an exit order
|
||||
assert freqtrade.strategy.adjust_entry_price.call_count == 0
|
||||
assert freqtrade.strategy.adjust_exit_price.call_count == 1
|
||||
|
||||
freqtrade.strategy.adjust_exit_price = MagicMock(return_value=2.03)
|
||||
|
||||
# Process again, should adjust exit price
|
||||
freqtrade.process()
|
||||
trade = Trade.get_trades().first()
|
||||
|
||||
assert trade.orders[-2].status == "canceled"
|
||||
assert len(trade.orders) == 6
|
||||
assert trade.orders[-1].side == trade.exit_side
|
||||
assert trade.orders[-1].status == "open"
|
||||
assert trade.orders[-1].price == 2.03
|
||||
assert freqtrade.strategy.adjust_entry_price.call_count == 0
|
||||
assert freqtrade.strategy.adjust_exit_price.call_count == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize("leverage", [1, 2])
|
||||
|
||||
@@ -45,6 +45,7 @@ class BTContainer(NamedTuple):
|
||||
leverage: float = 1.0
|
||||
timeout: int | None = None
|
||||
adjust_entry_price: float | None = None
|
||||
adjust_exit_price: float | None = None
|
||||
adjust_trade_position: list[float] | None = None
|
||||
|
||||
|
||||
|
||||
@@ -1217,6 +1217,46 @@ tc57 = BTContainer(
|
||||
],
|
||||
)
|
||||
|
||||
# Test 58: Custom-exit-price short - below all candles
|
||||
tc58 = BTContainer(
|
||||
data=[
|
||||
# D O H L C V EL XL ES Xs BT
|
||||
[0, 5000, 5050, 4950, 5000, 6172, 0, 0, 1, 0],
|
||||
[1, 5000, 5200, 4951, 5000, 6172, 0, 0, 0, 0], # enter trade (signal on last candle)
|
||||
[2, 4900, 5250, 4900, 5100, 6172, 0, 0, 0, 1], # Exit - delayed
|
||||
[3, 5100, 5100, 4650, 4750, 6172, 0, 0, 0, 0], #
|
||||
[4, 4750, 5100, 4350, 4750, 6172, 0, 0, 0, 0],
|
||||
],
|
||||
stop_loss=-0.10,
|
||||
roi={"0": 1.00},
|
||||
profit_perc=-0.01,
|
||||
use_exit_signal=True,
|
||||
timeout=1000,
|
||||
custom_exit_price=4300,
|
||||
adjust_exit_price=5050,
|
||||
trades=[BTrade(exit_reason=ExitType.EXIT_SIGNAL, open_tick=1, close_tick=4, is_short=True)],
|
||||
)
|
||||
|
||||
# Test 59: Custom-exit-price above all candles - readjust order
|
||||
tc59 = BTContainer(
|
||||
data=[
|
||||
# D O H L C V EL XL ES Xs BT
|
||||
[0, 5000, 5050, 4950, 5000, 6172, 1, 0],
|
||||
[1, 5000, 5500, 4951, 5000, 6172, 0, 0],
|
||||
[2, 4900, 5250, 4500, 5100, 6172, 0, 1], # exit
|
||||
[3, 5100, 5100, 4650, 4750, 6172, 0, 0], # order readjust
|
||||
[4, 4750, 4950, 4350, 4750, 6172, 0, 0],
|
||||
],
|
||||
stop_loss=-0.2,
|
||||
roi={"0": 0.10},
|
||||
profit_perc=-0.02,
|
||||
use_exit_signal=True,
|
||||
timeout=1000,
|
||||
custom_exit_price=5300,
|
||||
adjust_exit_price=4900,
|
||||
trades=[BTrade(exit_reason=ExitType.EXIT_SIGNAL, open_tick=1, close_tick=4, is_short=False)],
|
||||
)
|
||||
|
||||
|
||||
TESTS = [
|
||||
tc0,
|
||||
@@ -1277,6 +1317,8 @@ TESTS = [
|
||||
tc55,
|
||||
tc56,
|
||||
tc57,
|
||||
tc58,
|
||||
tc59,
|
||||
]
|
||||
|
||||
|
||||
@@ -1330,6 +1372,8 @@ def test_backtest_results(default_conf, mocker, caplog, data: BTContainer) -> No
|
||||
)
|
||||
if data.adjust_entry_price:
|
||||
backtesting.strategy.adjust_entry_price = MagicMock(return_value=data.adjust_entry_price)
|
||||
if data.adjust_exit_price:
|
||||
backtesting.strategy.adjust_exit_price = MagicMock(return_value=data.adjust_exit_price)
|
||||
|
||||
backtesting.strategy.use_custom_stoploss = data.use_custom_stoploss
|
||||
backtesting.strategy.leverage = lambda **kwargs: data.leverage
|
||||
|
||||
@@ -162,7 +162,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=None)
|
||||
assert pytest.approx(trade.liquidation_price) == (0.10278333 if leverage == 1 else 1.2122249)
|
||||
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_enter, current_time)
|
||||
trade = backtesting._check_adjust_trade_for_candle(trade, row_enter, current_time)
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 100.0
|
||||
assert pytest.approx(trade.amount) == 47.61904762 * leverage
|
||||
@@ -170,7 +170,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
||||
# Increase position by 100
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=(100, "PartIncrease"))
|
||||
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_enter, current_time)
|
||||
trade = backtesting._check_adjust_trade_for_candle(trade, row_enter, current_time)
|
||||
|
||||
liq_price = 0.1038916 if leverage == 1 else 1.2127791
|
||||
assert trade
|
||||
@@ -184,7 +184,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=-500)
|
||||
current_time = row_exit[0].to_pydatetime()
|
||||
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_exit, current_time)
|
||||
trade = backtesting._check_adjust_trade_for_candle(trade, row_exit, current_time)
|
||||
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 200.0
|
||||
@@ -195,7 +195,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
||||
|
||||
# Reduce position by 50
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=(-100, "partDecrease"))
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_exit, current_time)
|
||||
trade = backtesting._check_adjust_trade_for_candle(trade, row_exit, current_time)
|
||||
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 100.0
|
||||
@@ -208,7 +208,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
||||
|
||||
# Adjust below minimum
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=-99)
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_exit, current_time)
|
||||
trade = backtesting._check_adjust_trade_for_candle(trade, row_exit, current_time)
|
||||
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 100.0
|
||||
@@ -220,5 +220,5 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
||||
|
||||
# Adjust to close trade
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=-trade.stake_amount)
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row_exit, current_time)
|
||||
trade = backtesting._check_adjust_trade_for_candle(trade, row_exit, current_time)
|
||||
assert trade.is_open is False
|
||||
|
||||
@@ -276,3 +276,67 @@ def test_trade_serialize_load_back(fee):
|
||||
|
||||
trade3 = LocalTrade.from_json(trade_string)
|
||||
assert len(trade3.orders) == len(t.orders)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
def test_trade_fromjson_backtesting():
|
||||
"""
|
||||
trade.from_json should be able to load a trade output via backtesting.
|
||||
"""
|
||||
trade_string = """
|
||||
{
|
||||
"pair":"XRP/USDT:USDT",
|
||||
"stake_amount":3015.294001,
|
||||
"max_stake_amount":60305.88002,
|
||||
"amount":94612.3,
|
||||
"open_date":"2024-03-02 10:40:00+00:00",
|
||||
"close_date":"2024-03-02 11:10:00+00:00",
|
||||
"open_rate":0.6374,
|
||||
"close_rate":0.6399,
|
||||
"fee_open":0.0005,
|
||||
"fee_close":0.0005,
|
||||
"trade_duration":30,
|
||||
"profit_ratio":-0.09853216535933962,
|
||||
"profit_abs":-296.95489539,
|
||||
"exit_reason":"trailing_stop_loss",
|
||||
"initial_stop_loss_abs":0.6689,
|
||||
"initial_stop_loss_ratio":-0.99,
|
||||
"stop_loss_abs":0.6399,
|
||||
"stop_loss_ratio":-0.3368287257705749,
|
||||
"min_rate":0.6294,
|
||||
"max_rate":0.6421,
|
||||
"is_open":false,
|
||||
"enter_tag":"[0.6373, 0.5993, 0.64]",
|
||||
"leverage":20,
|
||||
"is_short":true,
|
||||
"open_timestamp":1709376000000,
|
||||
"close_timestamp":1709377800000,
|
||||
"orders":[
|
||||
{
|
||||
"amount":94612.3,
|
||||
"safe_price":0.6374,
|
||||
"ft_order_side":"sell",
|
||||
"order_filled_timestamp":1709376000000,
|
||||
"ft_is_entry":true,
|
||||
"ft_order_tag":"[0.6373, 0.5993, 0.64]",
|
||||
"cost":60336.032960009994
|
||||
},
|
||||
{
|
||||
"amount":94612.3,
|
||||
"safe_price":0.6399,
|
||||
"ft_order_side":"buy",
|
||||
"order_filled_timestamp":1709377800000,
|
||||
"ft_is_entry":false,
|
||||
"ft_order_tag":"trailing_stop_loss",
|
||||
"cost":60572.681975385
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
|
||||
trade = Trade.from_json(trade_string)
|
||||
Trade.session.add(trade)
|
||||
Trade.commit()
|
||||
|
||||
# Trade-id not given - use first available
|
||||
assert trade.id == 1
|
||||
|
||||
@@ -577,7 +577,7 @@ def test_rpc_balance_handle(default_conf_usdt, mocker, tickers, proxy_coin, marg
|
||||
"symbol": "ETH/USDT:USDT",
|
||||
"timestamp": None,
|
||||
"datetime": None,
|
||||
"initialMargin": 0.0,
|
||||
"initialMargin": 20,
|
||||
"initialMarginPercentage": None,
|
||||
"maintenanceMargin": 0.0,
|
||||
"maintenanceMarginPercentage": 0.005,
|
||||
@@ -590,8 +590,9 @@ def test_rpc_balance_handle(default_conf_usdt, mocker, tickers, proxy_coin, marg
|
||||
"marginRatio": None,
|
||||
"liquidationPrice": 0.0,
|
||||
"markPrice": 2896.41,
|
||||
"collateral": 20,
|
||||
"marginType": "isolated",
|
||||
# Collateral is in USDT - and can be higher than position size in cross mode
|
||||
"collateral": 50,
|
||||
"marginType": "cross",
|
||||
"side": "short",
|
||||
"percentage": None,
|
||||
}
|
||||
|
||||
@@ -21,10 +21,12 @@ class TestStrategyNoImplementSell(TestStrategyNoImplements):
|
||||
return super().populate_entry_trend(dataframe, metadata)
|
||||
|
||||
|
||||
class TestStrategyImplementCustomSell(TestStrategyNoImplementSell):
|
||||
class TestStrategyImplementEmptyWorking(TestStrategyNoImplementSell):
|
||||
def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
return super().populate_exit_trend(dataframe, metadata)
|
||||
|
||||
|
||||
class TestStrategyImplementCustomSell(TestStrategyImplementEmptyWorking):
|
||||
def custom_sell(
|
||||
self,
|
||||
pair: str,
|
||||
@@ -55,3 +57,34 @@ class TestStrategyImplementSellTimeout(TestStrategyNoImplementSell):
|
||||
self, pair: str, trade, order: Order, current_time: datetime, **kwargs
|
||||
) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class TestStrategyAdjustOrderPrice(TestStrategyImplementEmptyWorking):
|
||||
def adjust_entry_price(
|
||||
self,
|
||||
trade,
|
||||
order,
|
||||
pair,
|
||||
current_time,
|
||||
proposed_rate,
|
||||
current_order_rate,
|
||||
entry_tag,
|
||||
side,
|
||||
**kwargs,
|
||||
):
|
||||
return proposed_rate
|
||||
|
||||
def adjust_order_price(
|
||||
self,
|
||||
trade,
|
||||
order,
|
||||
pair,
|
||||
current_time,
|
||||
proposed_rate,
|
||||
current_order_rate,
|
||||
entry_tag,
|
||||
side,
|
||||
is_entry,
|
||||
**kwargs,
|
||||
):
|
||||
return proposed_rate
|
||||
|
||||
@@ -460,6 +460,10 @@ def test_missing_implements(default_conf, caplog):
|
||||
):
|
||||
StrategyResolver.load_strategy(default_conf)
|
||||
|
||||
default_conf["strategy"] = "TestStrategyAdjustOrderPrice"
|
||||
with pytest.raises(OperationalException, match=r"If you implement `adjust_order_price`.*"):
|
||||
StrategyResolver.load_strategy(default_conf)
|
||||
|
||||
|
||||
def test_call_deprecated_function(default_conf):
|
||||
default_location = Path(__file__).parent / "strats/broken_strats/"
|
||||
|
||||
BIN
tests/testdata/binance/binance_public_data/futures-APEUSDT-aggTrades-2024-10-18.zip
vendored
Normal file
BIN
tests/testdata/binance/binance_public_data/futures-APEUSDT-aggTrades-2024-10-18.zip
vendored
Normal file
Binary file not shown.
BIN
tests/testdata/binance/binance_public_data/spot-PEPEUSDT-aggTrades-2024-10-27.zip
vendored
Normal file
BIN
tests/testdata/binance/binance_public_data/spot-PEPEUSDT-aggTrades-2024-10-27.zip
vendored
Normal file
Binary file not shown.
Reference in New Issue
Block a user