Merge branch 'develop' into fix_merge_informative_pair

This commit is contained in:
Matthias
2025-10-12 09:51:43 +02:00
113 changed files with 13747 additions and 6416 deletions

View File

@@ -14,10 +14,11 @@ MANIFEST.in
README.md
freqtrade.service
freqtrade.egg-info
.venv/
config.json*
*.sqlite
user_data
user_data/
*.log
.vscode

92
.github/actions/docker-tags/action.yml vendored Normal file
View File

@@ -0,0 +1,92 @@
name: 'docker-tags'
description: 'Set Docker default Tag environment variables'
# inputs:
outputs:
BRANCH_NAME:
description: 'The branch name'
value: ${{ steps.tags.outputs.BRANCH_NAME }}
TAG:
description: 'The Docker tag'
value: ${{ steps.tags.outputs.TAG }}
TAG_PLOT:
description: 'The Docker tag for the plot'
value: ${{ steps.tags.outputs.TAG_PLOT }}
TAG_FREQAI:
description: 'The Docker tag for the freqai'
value: ${{ steps.tags.outputs.TAG_FREQAI }}
TAG_FREQAI_RL:
description: 'The Docker tag for the freqai_rl'
value: ${{ steps.tags.outputs.TAG_FREQAI_RL }}
TAG_FREQAI_TORCH:
description: 'The Docker tag for the freqai_torch'
value: ${{ steps.tags.outputs.TAG_FREQAI_TORCH }}
TAG_ARM:
description: 'The Docker tag for the arm'
value: ${{ steps.tags.outputs.TAG_ARM }}
TAG_PLOT_ARM:
description: 'The Docker tag for the plot arm'
value: ${{ steps.tags.outputs.TAG_PLOT_ARM }}
TAG_FREQAI_ARM:
description: 'The Docker tag for the freqai arm'
value: ${{ steps.tags.outputs.TAG_FREQAI_ARM }}
TAG_FREQAI_RL_ARM:
description: 'The Docker tag for the freqai_rl arm'
value: ${{ steps.tags.outputs.TAG_FREQAI_RL_ARM }}
TAG_PI:
description: 'The Docker tag for the pi'
value: ${{ steps.tags.outputs.TAG_PI }}
CACHE_TAG_PI:
description: 'The Docker cache tag for the pi'
value: ${{ steps.tags.outputs.CACHE_TAG_PI }}
runs:
using: "composite"
steps:
- name: Extract branch name
shell: bash
id: tags
env:
BRANCH_NAME_INPUT: ${{ github.event.inputs.branch_name }}
run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
BRANCH_NAME="${BRANCH_NAME_INPUT}"
else
BRANCH_NAME="${GITHUB_REF##*/}"
fi
# Replace / with _ to create a valid tag
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
TAG_PLOT=${TAG}_plot
TAG_FREQAI=${TAG}_freqai
TAG_FREQAI_RL=${TAG_FREQAI}rl
TAG_FREQAI_TORCH=${TAG_FREQAI}torch
TAG_ARM=${TAG}_arm
TAG_PLOT_ARM=${TAG_PLOT}_arm
TAG_FREQAI_ARM=${TAG_FREQAI}_arm
TAG_FREQAI_RL_ARM=${TAG_FREQAI_RL}_arm
TAG_PI="${TAG}_pi"
CACHE_TAG_PI=${CACHE_IMAGE}:${TAG_PI}_cache
echo "BRANCH_NAME=${BRANCH_NAME}" >> "$GITHUB_OUTPUT"
echo "TAG=${TAG}" >> "$GITHUB_OUTPUT"
echo "TAG_PLOT=${TAG_PLOT}" >> "$GITHUB_OUTPUT"
echo "TAG_FREQAI=${TAG_FREQAI}" >> "$GITHUB_OUTPUT"
echo "TAG_FREQAI_RL=${TAG_FREQAI_RL}" >> "$GITHUB_OUTPUT"
echo "TAG_FREQAI_TORCH=${TAG_FREQAI_TORCH}" >> "$GITHUB_OUTPUT"
echo "TAG_ARM=${TAG_ARM}" >> "$GITHUB_OUTPUT"
echo "TAG_PLOT_ARM=${TAG_PLOT_ARM}" >> "$GITHUB_OUTPUT"
echo "TAG_FREQAI_ARM=${TAG_FREQAI_ARM}" >> "$GITHUB_OUTPUT"
echo "TAG_FREQAI_RL_ARM=${TAG_FREQAI_RL_ARM}" >> "$GITHUB_OUTPUT"
echo "TAG_PI=${TAG_PI}" >> "$GITHUB_OUTPUT"
echo "CACHE_TAG_PI=${CACHE_TAG_PI}" >> "$GITHUB_OUTPUT"
cat "$GITHUB_OUTPUT"
- name: Save commit SHA to file
shell: bash
# Add commit to docker container
run: |
echo "${GITHUB_SHA}" > freqtrade_commit

View File

@@ -19,7 +19,7 @@ jobs:
with:
persist-credentials: false
- uses: actions/setup-python@v5
- uses: actions/setup-python@v6
with:
python-version: "3.12"

View File

@@ -19,12 +19,12 @@ concurrency:
permissions:
repository-projects: read
jobs:
build-linux:
tests:
name: "Tests and Linting"
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ "ubuntu-22.04", "ubuntu-24.04" ]
os: [ "ubuntu-22.04", "ubuntu-24.04", "macos-14", "macos-15" , "windows-2022", "windows-2025" ]
python-version: ["3.11", "3.12", "3.13"]
steps:
@@ -33,21 +33,27 @@ jobs:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
with:
activate-environment: true
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: "requirements**.txt"
cache-suffix: "${{ matrix.python-version }}"
prune-cache: false
- name: Installation - *nix
- name: Installation - macOS (Brew)
if: ${{ runner.os == 'macOS' }}
run: |
# brew update
# TODO: Should be the brew upgrade
brew install libomp
- name: Installation (python)
run: |
uv pip install --upgrade wheel
uv pip install -r requirements-dev.txt
@@ -61,7 +67,7 @@ jobs:
- name: Tests
if: (!(runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-24.04'))
run: |
pytest --random-order
pytest --random-order --durations 20 -n auto
- name: Tests with Coveralls
if: (runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-24.04')
@@ -88,9 +94,9 @@ jobs:
run: |
python build_helpers/create_command_partials.py
- name: Check for repository changes
- name: Check for repository changes - *nix
# TODO: python 3.13 slightly changed the output of argparse.
if: (matrix.python-version != '3.13')
if: ${{ (matrix.python-version != '3.13') && (runner.os != 'Windows') }}
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "Repository is dirty, changes detected:"
@@ -101,13 +107,27 @@ jobs:
echo "Repository is clean, no changes detected."
fi
- name: Check for repository changes - Windows
if: ${{ runner.os == 'Windows' && (matrix.python-version != '3.13') }}
run: |
if (git status --porcelain) {
Write-Host "Repository is dirty, changes detected:"
git status
git diff
exit 1
}
else {
Write-Host "Repository is clean, no changes detected."
}
- name: Backtesting (multi)
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
cp tests/testdata/config.tests.json user_data/config.json
freqtrade new-strategy -s AwesomeStrategy
freqtrade new-strategy -s AwesomeStrategyMin --template minimal
freqtrade backtesting --datadir tests/testdata --strategy-list AwesomeStrategy AwesomeStrategyMin -i 5m
freqtrade new-strategy -s AwesomeStrategyAdv --template advanced
freqtrade backtesting --datadir tests/testdata --strategy-list AwesomeStrategy AwesomeStrategyMin AwesomeStrategyAdv -i 5m
- name: Hyperopt
run: |
@@ -128,208 +148,13 @@ jobs:
ruff format --check
- name: Mypy
if: matrix.os == 'ubuntu-24.04'
run: |
mypy freqtrade scripts tests
- name: Discord notification
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: error
details: Freqtrade CI failed on ${{ matrix.os }}
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
build-macos:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ "macos-14", "macos-15" ]
python-version: ["3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: Install uv
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
with:
activate-environment: true
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: "requirements**.txt"
cache-suffix: "${{ matrix.python-version }}"
prune-cache: false
- name: Installation - macOS (Brew)
run: |
# brew update
# TODO: Should be the brew upgrade
# homebrew fails to update python due to unlinking failures
# https://github.com/actions/runner-images/issues/6817
rm /usr/local/bin/2to3 || true
rm /usr/local/bin/2to3-3.11 || true
rm /usr/local/bin/2to3-3.12 || true
rm /usr/local/bin/idle3 || true
rm /usr/local/bin/idle3.11 || true
rm /usr/local/bin/idle3.12 || true
rm /usr/local/bin/pydoc3 || true
rm /usr/local/bin/pydoc3.11 || true
rm /usr/local/bin/pydoc3.12 || true
rm /usr/local/bin/python3 || true
rm /usr/local/bin/python3.11 || true
rm /usr/local/bin/python3.12 || true
rm /usr/local/bin/python3-config || true
rm /usr/local/bin/python3.11-config || true
rm /usr/local/bin/python3.12-config || true
brew install libomp
- name: Installation (python)
run: |
uv pip install wheel
uv pip install -r requirements-dev.txt
uv pip install -e ft_client/
uv pip install -e .
- name: Tests
run: |
pytest --random-order
- name: Check for repository changes
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "Repository is dirty, changes detected:"
git status
git diff
exit 1
else
echo "Repository is clean, no changes detected."
fi
- name: Backtesting
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade new-strategy -s AwesomeStrategyAdv --template advanced
freqtrade backtesting --datadir tests/testdata --strategy AwesomeStrategyAdv
- name: Hyperopt
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
- name: Sort imports (isort)
run: |
isort --check .
- name: Run Ruff
run: |
ruff check --output-format=github
- name: Run Ruff format check
run: |
ruff format --check
- name: Mypy
if: matrix.os == 'macos-15'
run: |
mypy freqtrade scripts
- name: Discord notification
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: info
details: Test Succeeded!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
build-windows:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ "windows-2022", "windows-2025" ]
python-version: ["3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
with:
activate-environment: true
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: "requirements**.txt"
cache-suffix: "${{ matrix.python-version }}"
prune-cache: false
- name: Installation
run: |
function uvpipFunction { uv pip $args }
Set-Alias -name pip -value uvpipFunction
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
pip install -e .
- name: Tests
run: |
pytest --random-order --durations 20 -n auto
- name: Check for repository changes
run: |
if (git status --porcelain) {
Write-Host "Repository is dirty, changes detected:"
git status
git diff
exit 1
}
else {
Write-Host "Repository is clean, no changes detected."
}
- name: Backtesting
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade backtesting --datadir tests/testdata --strategy SampleStrategy
- name: Hyperopt
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
- name: Run Ruff
run: |
ruff check --output-format=github
- name: Run Ruff format check
run: |
ruff format --check
- name: Mypy
if: ${{ matrix.os == 'ubuntu-24.04' || matrix.os == 'macos-15' }}
run: |
mypy freqtrade scripts tests
- name: Run Pester tests (PowerShell)
if: ${{ runner.os == 'Windows' }}
shell: powershell
run: |
$PSVersionTable
Set-PSRepository psgallery -InstallationPolicy trusted
@@ -338,25 +163,24 @@ jobs:
Invoke-Pester -Path "tests" -CI
if ($Error.Length -gt 0) {exit 1}
shell: powershell
- name: Discord notification
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
if: ${{ failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false) }}
with:
severity: error
details: Test Failed
details: Freqtrade CI failed on ${{ matrix.os }} with Python ${{ matrix.python-version }}!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
mypy-version-check:
runs-on: ubuntu-22.04
name: "Mypy Version Check"
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version: "3.12"
@@ -366,18 +190,20 @@ jobs:
python build_helpers/pre_commit_update.py
pre-commit:
name: "Pre-commit checks"
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v5
with:
persist-credentials: false
- uses: actions/setup-python@v5
- uses: actions/setup-python@v6
with:
python-version: "3.12"
- uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1
docs-check:
name: "Documentation build"
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v5
@@ -389,7 +215,7 @@ jobs:
./tests/test_docs.sh
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version: "3.12"
@@ -409,6 +235,7 @@ jobs:
build-linux-online:
# Run pytest with "live" checks
name: "Tests and Linting - Online tests"
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
@@ -416,19 +243,18 @@ jobs:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version: "3.12"
- name: Install uv
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
with:
activate-environment: true
enable-cache: true
python-version: "3.12"
cache-dependency-glob: "requirements**.txt"
cache-suffix: "3.12"
prune-cache: false
- name: Installation - *nix
run: |
@@ -447,9 +273,7 @@ jobs:
# Notify only once - when CI completes (and after deploy) in case it's successful
notify-complete:
needs: [
build-linux,
build-macos,
build-windows,
tests,
docs-check,
mypy-version-check,
pre-commit,
@@ -481,7 +305,7 @@ jobs:
build:
name: "Build"
needs: [ build-linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
needs: [ tests, docs-check, mypy-version-check, pre-commit ]
runs-on: ubuntu-22.04
steps:
@@ -490,7 +314,7 @@ jobs:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version: "3.12"
@@ -544,7 +368,7 @@ jobs:
merge-multiple: true
- name: Publish to PyPI (Test)
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
with:
repository-url: https://test.pypi.org/legacy/
@@ -573,12 +397,17 @@ jobs:
merge-multiple: true
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
docker-build:
name: "Docker Build and Deploy"
needs: [ build-linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
needs: [
tests,
docs-check,
mypy-version-check,
pre-commit
]
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'
uses: ./.github/workflows/docker-build.yml
permissions:
@@ -588,3 +417,14 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
packages-cleanup:
name: "Docker Package Cleanup"
uses: ./.github/workflows/packages-cleanup.yml
# Only run on push, schedule, or release events
if: (github.event_name == 'push' || github.event_name == 'schedule') && github.repository == 'freqtrade/freqtrade'
permissions:
packages: write
with:
package_name: 'freqtrade'

View File

@@ -24,7 +24,7 @@ jobs:
persist-credentials: true
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version: '3.12'

View File

@@ -28,7 +28,7 @@ jobs:
with:
persist-credentials: false
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.actor }}

View File

@@ -24,6 +24,7 @@ env:
IMAGE_NAME: "freqtradeorg/freqtrade"
CACHE_IMAGE: "freqtradeorg/freqtrade_cache"
GHCR_IMAGE_NAME: "ghcr.io/freqtrade/freqtrade"
PI_PLATFORM: "linux/arm/v7"
jobs:
deploy-docker:
@@ -36,33 +37,20 @@ jobs:
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Extract branch name
env:
BRANCH_NAME_INPUT: ${{ github.event.inputs.branch_name }}
run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
BRANCH_NAME="${BRANCH_NAME_INPUT}"
else
BRANCH_NAME="${GITHUB_REF##*/}"
fi
echo "GITHUB_REF='${GITHUB_REF}'"
echo "BRANCH_NAME='${BRANCH_NAME}'"
echo "BRANCH_NAME=${BRANCH_NAME}" >> "$GITHUB_ENV"
- name: Set docker tag names
id: tags
uses: ./.github/actions/docker-tags
- name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
with:
cache-image: false
- name: Set up Docker Buildx
id: buildx
@@ -73,9 +61,86 @@ jobs:
env:
PLATFORMS: ${{ steps.buildx.outputs.platforms }}
- name: Build and test and push docker images
- name: Build image without cache
if: github.event_name == 'schedule'
env:
TAG: ${{ steps.tags.outputs.TAG }}
run: |
build_helpers/publish_docker_multi.sh
docker build -t ${CACHE_IMAGE}:${TAG} .
- name: Build ARMHF image without cache
if: github.event_name == 'schedule'
env:
TAG_PI: ${{ steps.tags.outputs.TAG_PI }}
CACHE_TAG_PI: ${{ steps.tags.outputs.CACHE_TAG_PI }}
run: |
docker buildx build \
--cache-to=type=registry,ref=${CACHE_TAG_PI} \
-f docker/Dockerfile.armhf \
--platform ${PI_PLATFORM} \
-t ${IMAGE_NAME}:${TAG_PI} \
--push \
--provenance=false \
.
- name: Build image with cache
if: github.event_name != 'schedule'
env:
TAG: ${{ steps.tags.outputs.TAG }}
run: |
docker pull ${IMAGE_NAME}:${TAG} || true
docker build --cache-from ${IMAGE_NAME}:${TAG} -t ${CACHE_IMAGE}:${TAG} .
- name: Build ARMHF image with cache
if: github.event_name != 'schedule'
# disable provenance due to https://github.com/docker/buildx/issues/1509
env:
TAG_PI: ${{ steps.tags.outputs.TAG_PI }}
CACHE_TAG_PI: ${{ steps.tags.outputs.CACHE_TAG_PI }}
run: |
docker buildx build \
--cache-from=type=registry,ref=${CACHE_TAG_PI} \
--cache-to=type=registry,ref=${CACHE_TAG_PI} \
-f docker/Dockerfile.armhf \
--platform ${PI_PLATFORM} \
-t ${IMAGE_NAME}:${TAG_PI} \
--push \
--provenance=false \
.
- name: Run build for AI images
env:
TAG: ${{ steps.tags.outputs.TAG }}
TAG_PLOT: ${{ steps.tags.outputs.TAG_PLOT }}
TAG_FREQAI: ${{ steps.tags.outputs.TAG_FREQAI }}
TAG_FREQAI_RL: ${{ steps.tags.outputs.TAG_FREQAI_RL }}
run: |
docker build --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG} -t ${CACHE_IMAGE}:${TAG_PLOT} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG} -t ${CACHE_IMAGE}:${TAG_FREQAI} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_FREQAI} -t ${CACHE_IMAGE}:${TAG_FREQAI_RL} -f docker/Dockerfile.freqai_rl .
- name: Run backtest in Docker
env:
TAG: ${{ steps.tags.outputs.TAG }}
run: |
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests ${CACHE_IMAGE}:${TAG} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
- name: Push cache images
env:
TAG: ${{ steps.tags.outputs.TAG }}
TAG_PLOT: ${{ steps.tags.outputs.TAG_PLOT }}
TAG_FREQAI: ${{ steps.tags.outputs.TAG_FREQAI }}
TAG_FREQAI_RL: ${{ steps.tags.outputs.TAG_FREQAI_RL }}
run: |
docker push ${CACHE_IMAGE}:$TAG
docker push ${CACHE_IMAGE}:$TAG_PLOT
docker push ${CACHE_IMAGE}:$TAG_FREQAI
docker push ${CACHE_IMAGE}:$TAG_FREQAI_RL
- name: list Images
run: |
docker images
deploy-arm:
name: "Deploy Docker ARM64"
@@ -91,31 +156,135 @@ jobs:
with:
persist-credentials: false
- name: Extract branch name
env:
BRANCH_NAME_INPUT: ${{ github.event.inputs.branch_name }}
run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
BRANCH_NAME="${BRANCH_NAME_INPUT}"
else
BRANCH_NAME="${GITHUB_REF##*/}"
fi
echo "GITHUB_REF='${GITHUB_REF}'"
echo "BRANCH_NAME='${BRANCH_NAME}'"
echo "BRANCH_NAME=${BRANCH_NAME}" >> "$GITHUB_ENV"
- name: Set docker tag names
id: tags
uses: ./.github/actions/docker-tags
- name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and test and push docker images
- name: Login to github
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build image without cache
if: github.event_name == 'schedule'
env:
GHCR_USERNAME: ${{ github.actor }}
GHCR_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAG_ARM: ${{ steps.tags.outputs.TAG_ARM }}
run: |
build_helpers/publish_docker_arm64.sh
docker build -t ${IMAGE_NAME}:${TAG_ARM} .
- name: Build image with cache
if: github.event_name != 'schedule'
env:
TAG_ARM: ${{ steps.tags.outputs.TAG_ARM }}
run: |
docker pull ${IMAGE_NAME}:${TAG_ARM} || true
docker build --cache-from ${IMAGE_NAME}:${TAG_ARM} -t ${CACHE_IMAGE}:${TAG_ARM} .
- name: Run build for AI images
env:
TAG_ARM: ${{ steps.tags.outputs.TAG_ARM }}
TAG_PLOT_ARM: ${{ steps.tags.outputs.TAG_PLOT_ARM }}
TAG_FREQAI_ARM: ${{ steps.tags.outputs.TAG_FREQAI_ARM }}
TAG_FREQAI_RL_ARM: ${{ steps.tags.outputs.TAG_FREQAI_RL_ARM }}
run: |
docker build --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t ${CACHE_IMAGE}:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t ${CACHE_IMAGE}:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_FREQAI_ARM} -t ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
- name: Run backtest in Docker
env:
TAG_ARM: ${{ steps.tags.outputs.TAG_ARM }}
run: |
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests ${CACHE_IMAGE}:${TAG_ARM} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
- name: Docker images
run: |
docker images
- name: Push cache images
env:
TAG_ARM: ${{ steps.tags.outputs.TAG_ARM }}
TAG_PLOT_ARM: ${{ steps.tags.outputs.TAG_PLOT_ARM }}
TAG_FREQAI_ARM: ${{ steps.tags.outputs.TAG_FREQAI_ARM }}
TAG_FREQAI_RL_ARM: ${{ steps.tags.outputs.TAG_FREQAI_RL_ARM }}
run: |
docker push ${CACHE_IMAGE}:$TAG_PLOT_ARM
docker push ${CACHE_IMAGE}:$TAG_FREQAI_ARM
docker push ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
docker push ${CACHE_IMAGE}:$TAG_ARM
- name: Create manifests
env:
TAG_ARM: ${{ steps.tags.outputs.TAG_ARM }}
TAG: ${{ steps.tags.outputs.TAG }}
TAG_PI: ${{ steps.tags.outputs.TAG_PI }}
run: |
docker buildx imagetools create \
--tag ${IMAGE_NAME}:${TAG} \
--tag ${GHCR_IMAGE_NAME}:${TAG} \
${CACHE_IMAGE}:${TAG} ${CACHE_IMAGE}:${TAG_ARM} ${IMAGE_NAME}:${TAG_PI}
- name: Create multiarch image - Plot
env:
TAG_PLOT: ${{ steps.tags.outputs.TAG_PLOT }}
TAG_PLOT_ARM: ${{ steps.tags.outputs.TAG_PLOT_ARM }}
run: |
docker buildx imagetools create \
--tag ${IMAGE_NAME}:${TAG_PLOT} \
--tag ${GHCR_IMAGE_NAME}:${TAG_PLOT} \
${CACHE_IMAGE}:${TAG_PLOT} ${CACHE_IMAGE}:${TAG_PLOT_ARM}
- name: Create multiarch image - FreqAI
env:
TAG_FREQAI: ${{ steps.tags.outputs.TAG_FREQAI }}
TAG_FREQAI_ARM: ${{ steps.tags.outputs.TAG_FREQAI_ARM }}
run: |
docker buildx imagetools create \
--tag ${IMAGE_NAME}:${TAG_FREQAI} \
--tag ${GHCR_IMAGE_NAME}:${TAG_FREQAI} \
${CACHE_IMAGE}:${TAG_FREQAI} ${CACHE_IMAGE}:${TAG_FREQAI_ARM}
- name: Create multiarch image - FreqAI RL
env:
TAG_FREQAI_RL: ${{ steps.tags.outputs.TAG_FREQAI_RL }}
TAG_FREQAI_RL_ARM: ${{ steps.tags.outputs.TAG_FREQAI_RL_ARM }}
TAG_FREQAI_TORCH: ${{ steps.tags.outputs.TAG_FREQAI_TORCH }}
run: |
# Create special Torch tag - which is identical to the RL tag.
docker buildx imagetools create \
--tag ${IMAGE_NAME}:${TAG_FREQAI_RL} \
--tag ${GHCR_IMAGE_NAME}:${TAG_FREQAI_RL} \
--tag ${IMAGE_NAME}:${TAG_FREQAI_TORCH} \
--tag ${GHCR_IMAGE_NAME}:${TAG_FREQAI_TORCH} \
${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
- name: Tag latest
if: env.TAG == 'develop'
env:
TAG: ${{ steps.tags.outputs.TAG }}
run: |
# Tag image as latest
docker buildx imagetools create \
--tag ${GHCR_IMAGE_NAME}:${TAG} \
--tag ${GHCR_IMAGE_NAME}:latest \
${IMAGE_NAME}:${TAG}
- name: Docker images
run: |
docker images
- name: Image cleanup
run: |
docker image prune -a --force --filter "until=24h"
- name: Discord notification
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1

View File

@@ -16,7 +16,7 @@ jobs:
persist-credentials: false
- name: Docker Hub Description
uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
uses: peter-evans/dockerhub-description@1b9a80c056b620d92cedb9d9b5a223409c68ddfa # v5.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

46
.github/workflows/packages-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,46 @@
name: Cleanup Packages
on:
workflow_call:
inputs:
package_name:
description: 'Package name to clean up'
required: false
default: 'freqtrade'
type: string
workflow_dispatch:
inputs:
package_name:
description: 'Package name to clean up'
required: false
default: 'freqtrade'
type: choice
options:
- 'freqtrade'
- 'freqtrade-devcontainer'
delete-untagged:
description: 'Whether to delete only untagged images'
required: false
default: true
type: boolean
env:
PACKAGE_NAME: "freqtrade"
jobs:
deploy-docker:
name: "Delete Packages"
runs-on: ubuntu-24.04
if: github.repository == 'freqtrade/freqtrade'
permissions:
packages: write
steps:
- name: "Delete untagged Package Versions"
uses: actions/delete-package-versions@v5
with:
package-name: ${{ inputs.package_name || env.PACKAGE_NAME }}
package-type: 'container'
min-versions-to-keep: 10
delete-only-untagged-versions: ${{ inputs.delete-untagged || 'true' }}

View File

@@ -17,7 +17,7 @@ jobs:
with:
persist-credentials: false
- uses: actions/setup-python@v5
- uses: actions/setup-python@v6
with:
python-version: "3.12"

View File

@@ -26,4 +26,4 @@ jobs:
persist-credentials: false
- name: Run zizmor 🌈
uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2
uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0

View File

@@ -21,22 +21,22 @@ repos:
# stages: [push]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.17.1"
rev: "v1.18.2"
hooks:
- id: mypy
exclude: build_helpers
additional_dependencies:
- types-cachetools==6.2.0.20250827
- types-filelock==3.2.7
- types-requests==2.32.4.20250809
- types-requests==2.32.4.20250913
- types-tabulate==0.9.0.20241207
- types-python-dateutil==2.9.0.20250822
- scipy-stubs==1.16.1.1
- scipy-stubs==1.16.2.0
- SQLAlchemy==2.0.43
# stages: [push]
- repo: https://github.com/pycqa/isort
rev: "6.0.1"
rev: "6.1.0"
hooks:
- id: isort
name: isort (python)
@@ -44,7 +44,7 @@ repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.12.11'
rev: 'v0.13.3'
hooks:
- id: ruff
- id: ruff-format
@@ -83,6 +83,6 @@ repos:
# Ensure github actions remain safe
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.12.1
rev: v1.14.2
hooks:
- id: zizmor

View File

@@ -9,8 +9,10 @@ Issues labeled [good first issue](https://github.com/freqtrade/freqtrade/labels/
Few pointers for contributions:
- Create your PR against the `develop` branch, not `stable`.
- New features need to contain unit tests, must conform to PEP8 (max-line-length = 100) and should be documented with the introduction PR.
- PR's can be declared as `[WIP]` - which signify Work in Progress Pull Requests (which are not finished).
- Stick to english in both commit messages, PR descriptions and code comments and variable names.
- New features need to contain unit tests, must pass CI (run pre-commit and pytest to get an early feedback) and should be documented with the introduction PR.
- PR's can be declared as draft - signaling Work in Progress for Pull Requests (which are not finished). We'll still aim to provide feedback on draft PR's in a timely manner.
- If you're using AI for your PR, please both mention it in the PR description and do a thorough review of the generated code. The final responsibility for the code with the PR author, not with the AI.
If you are unsure, discuss the feature on our [discord server](https://discord.gg/p7nuUNVfP7) or in a [issue](https://github.com/freqtrade/freqtrade/issues) before a Pull Request.
@@ -43,43 +45,43 @@ pytest tests/test_<file_name>.py
pytest tests/test_<file_name>.py::test_<method_name>
```
### 2. Test if your code is PEP8 compliant
### 2. Test if your code corresponds to our style guide
#### Run Ruff
We receive a lot of code that fails preliminary CI checks.
To help with that, we encourage contributors to install the git pre-commit hook that will let you know immediately when you try to commit code that fails these checks.
You can manually run pre-commit with `pre-commit run -a` - or install the git hook with `pre-commit install` to have it run automatically on each commit.
Running `pre-commit run -a` will run all checks, including `ruff`, `mypy` and `codespell` (among others).
#### Additional styles applied
- Have docstrings on all public methods
- Use double-quotes for docstrings
- Multiline docstrings should be indented to the level of the first quote
- Doc-strings should follow the reST format (`:param xxx: ...`, `:return: ...`, `:raises KeyError: ...`)
#### Manually run the individual checks
The following sections describe how to run the individual checks that are running as part of the pre-commit hook.
##### Run ruff
Check your code with ruff to ensure that it follows the style guide.
```bash
ruff check .
ruff format .
```
We receive a lot of code that fails the `ruff` checks.
To help with that, we encourage you to install the git pre-commit
hook that will warn you when you try to commit code that fails these checks.
##### Run mypy
you can manually run pre-commit with `pre-commit run -a`.
##### Additional styles applied
* Have docstrings on all public methods
* Use double-quotes for docstrings
* Multiline docstrings should be indented to the level of the first quote
* Doc-strings should follow the reST format (`:param xxx: ...`, `:return: ...`, `:raises KeyError: ... `)
### 3. Test if all type-hints are correct
#### Run mypy
Check your code with mypy to ensure that it follows the type-hinting rules.
``` bash
mypy freqtrade
```
### 4. Ensure formatting is correct
#### Run ruff
``` bash
ruff format .
```
## (Core)-Committer Guide
### Process: Pull Requests
@@ -118,18 +120,18 @@ Exceptions:
- Ensure cross-platform compatibility for every change that's accepted. Windows, Mac & Linux.
- Ensure no malicious code is introduced into the core code.
- Create issues for any major changes and enhancements that you wish to make. Discuss things transparently and get community feedback.
- Keep feature versions as small as possible, preferably one new feature per version.
- Keep feature PR's as small as possible, preferably one new feature per PR.
- Be welcoming to newcomers and encourage diverse new contributors from all backgrounds. See the Python Community Code of Conduct (https://www.python.org/psf/codeofconduct/).
### Becoming a Committer
Contributors may be given commit privileges. Preference will be given to those with:
1. Past contributions to Freqtrade and other related open-source projects. Contributions to Freqtrade include both code (both accepted and pending) and friendly participation in the issue tracker and Pull request reviews. Both quantity and quality are considered.
1. Past contributions to Freqtrade and other related open source projects. Contributions to Freqtrade include both code (both accepted and pending) and friendly participation in the issue tracker and Pull request reviews. Both quantity and quality are considered.
1. A coding style that the other core committers find simple, minimal, and clean.
1. Access to resources for cross-platform development and testing.
1. Time to devote to the project regularly.
Being a Committer does not grant write permission on `develop` or `stable` for security reasons (Users trust Freqtrade with their Exchange API keys).
Being a Committer does not automatically grant write permission on `develop` or `stable` for security reasons (Users trust Freqtrade with their Exchange API keys).
After being Committer for some time, a Committer may be named Core Committer and given full repository access.

View File

@@ -1,4 +1,4 @@
FROM python:3.13.7-slim-bookworm AS base
FROM python:3.13.8-slim-bookworm AS base
# Setup env
ENV LANG=C.UTF-8

View File

@@ -1,114 +0,0 @@
#!/bin/sh
# Use BuildKit, otherwise building on ARM fails
# Replace / with _ to create a valid tag
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
TAG_PLOT=${TAG}_plot
TAG_FREQAI=${TAG}_freqai
TAG_FREQAI_RL=${TAG_FREQAI}rl
TAG_FREQAI_TORCH=${TAG_FREQAI}torch
TAG_PI="${TAG}_pi"
TAG_ARM=${TAG}_arm
TAG_PLOT_ARM=${TAG_PLOT}_arm
TAG_FREQAI_ARM=${TAG_FREQAI}_arm
TAG_FREQAI_RL_ARM=${TAG_FREQAI_RL}_arm
echo "Running for ${TAG}"
# Add commit and commit_message to docker container
echo "${GITHUB_SHA}" > freqtrade_commit
if [ "${GITHUB_EVENT_NAME}" = "schedule" ]; then
echo "event ${GITHUB_EVENT_NAME}: full rebuild - skipping cache"
# Build regular image
docker build -t freqtrade:${TAG_ARM} .
else
echo "event ${GITHUB_EVENT_NAME}: building with cache"
# Build regular image
docker pull ${IMAGE_NAME}:${TAG_ARM}
docker build --cache-from ${IMAGE_NAME}:${TAG_ARM} -t freqtrade:${TAG_ARM} .
fi
if [ $? -ne 0 ]; then
echo "failed building multiarch images"
return 1
fi
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
# Tag image for upload and next build step
docker tag freqtrade:$TAG_ARM ${CACHE_IMAGE}:$TAG_ARM
docker tag freqtrade:$TAG_PLOT_ARM ${CACHE_IMAGE}:$TAG_PLOT_ARM
docker tag freqtrade:$TAG_FREQAI_ARM ${CACHE_IMAGE}:$TAG_FREQAI_ARM
docker tag freqtrade:$TAG_FREQAI_RL_ARM ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
# Run backtest
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG_ARM} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
if [ $? -ne 0 ]; then
echo "failed running backtest"
return 1
fi
docker images
docker push ${CACHE_IMAGE}:$TAG_PLOT_ARM
docker push ${CACHE_IMAGE}:$TAG_FREQAI_ARM
docker push ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
docker push ${CACHE_IMAGE}:$TAG_ARM
# Create multi-arch image
# Make sure that all images contained here are pushed to github first.
# Otherwise installation might fail.
echo "create manifests"
docker manifest create ${IMAGE_NAME}:${TAG} ${CACHE_IMAGE}:${TAG} ${CACHE_IMAGE}:${TAG_ARM} ${IMAGE_NAME}:${TAG_PI}
docker manifest push -p ${IMAGE_NAME}:${TAG}
docker manifest create ${IMAGE_NAME}:${TAG_PLOT} ${CACHE_IMAGE}:${TAG_PLOT} ${CACHE_IMAGE}:${TAG_PLOT_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_PLOT}
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI} ${CACHE_IMAGE}:${TAG_FREQAI} ${CACHE_IMAGE}:${TAG_FREQAI_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI}
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_RL}
# Create special Torch tag - which is identical to the RL tag.
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_TORCH} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_TORCH}
# copy images to ghcr.io
alias crane="docker run --rm -i -v $(pwd)/.crane:/home/nonroot/.docker/ gcr.io/go-containerregistry/crane"
mkdir .crane
chmod a+rwx .crane
echo "${GHCR_TOKEN}" | crane auth login ghcr.io -u "${GHCR_USERNAME}" --password-stdin
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_RL}
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_TORCH}
crane copy ${IMAGE_NAME}:${TAG_FREQAI} ${GHCR_IMAGE_NAME}:${TAG_FREQAI}
crane copy ${IMAGE_NAME}:${TAG_PLOT} ${GHCR_IMAGE_NAME}:${TAG_PLOT}
crane copy ${IMAGE_NAME}:${TAG} ${GHCR_IMAGE_NAME}:${TAG}
# Tag as latest for develop builds
if [ "${TAG}" = "develop" ]; then
echo 'Tagging image as latest'
docker manifest create ${IMAGE_NAME}:latest ${CACHE_IMAGE}:${TAG_ARM} ${IMAGE_NAME}:${TAG_PI} ${CACHE_IMAGE}:${TAG}
docker manifest push -p ${IMAGE_NAME}:latest
crane copy ${IMAGE_NAME}:latest ${GHCR_IMAGE_NAME}:latest
fi
docker images
rm -rf .crane
# Cleanup old images from arm64 node.
docker image prune -a --force --filter "until=24h"

View File

@@ -1,87 +0,0 @@
#!/bin/sh
# The below assumes a correctly setup docker buildx environment
# Replace / with _ to create a valid tag
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
TAG_PLOT=${TAG}_plot
TAG_FREQAI=${TAG}_freqai
TAG_FREQAI_RL=${TAG_FREQAI}rl
TAG_PI="${TAG}_pi"
PI_PLATFORM="linux/arm/v7"
echo "Running for ${TAG}"
CACHE_TAG=${CACHE_IMAGE}:${TAG_PI}_cache
# Add commit and commit_message to docker container
echo "${GITHUB_SHA}" > freqtrade_commit
if [ "${GITHUB_EVENT_NAME}" = "schedule" ]; then
echo "event ${GITHUB_EVENT_NAME}: full rebuild - skipping cache"
# Build regular image
docker build -t freqtrade:${TAG} .
# Build PI image
docker buildx build \
--cache-to=type=registry,ref=${CACHE_TAG} \
-f docker/Dockerfile.armhf \
--platform ${PI_PLATFORM} \
-t ${IMAGE_NAME}:${TAG_PI} \
--push \
--provenance=false \
.
else
echo "event ${GITHUB_EVENT_NAME}: building with cache"
# Build regular image
docker pull ${IMAGE_NAME}:${TAG}
docker build --cache-from ${IMAGE_NAME}:${TAG} -t freqtrade:${TAG} .
# Pull last build to avoid rebuilding the whole image
# docker pull --platform ${PI_PLATFORM} ${IMAGE_NAME}:${TAG}
# disable provenance due to https://github.com/docker/buildx/issues/1509
docker buildx build \
--cache-from=type=registry,ref=${CACHE_TAG} \
--cache-to=type=registry,ref=${CACHE_TAG} \
-f docker/Dockerfile.armhf \
--platform ${PI_PLATFORM} \
-t ${IMAGE_NAME}:${TAG_PI} \
--push \
--provenance=false \
.
fi
if [ $? -ne 0 ]; then
echo "failed building multiarch images"
return 1
fi
# Tag image for upload and next build step
docker tag freqtrade:$TAG ${CACHE_IMAGE}:$TAG
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_PLOT} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_FREQAI} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI} -t freqtrade:${TAG_FREQAI_RL} -f docker/Dockerfile.freqai_rl .
docker tag freqtrade:$TAG_PLOT ${CACHE_IMAGE}:$TAG_PLOT
docker tag freqtrade:$TAG_FREQAI ${CACHE_IMAGE}:$TAG_FREQAI
docker tag freqtrade:$TAG_FREQAI_RL ${CACHE_IMAGE}:$TAG_FREQAI_RL
# Run backtest
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
if [ $? -ne 0 ]; then
echo "failed running backtest"
return 1
fi
docker images
docker push ${CACHE_IMAGE}:$TAG
docker push ${CACHE_IMAGE}:$TAG_PLOT
docker push ${CACHE_IMAGE}:$TAG_FREQAI
docker push ${CACHE_IMAGE}:$TAG_FREQAI_RL
docker images
if [ $? -ne 0 ]; then
echo "failed building image"
return 1
fi

View File

@@ -587,6 +587,7 @@
"RemotePairList",
"MarketCapPairList",
"AgeFilter",
"DelistFilter",
"FullTradesFilter",
"OffsetFilter",
"PerformanceFilter",
@@ -1460,6 +1461,11 @@
"type": "boolean",
"default": false
},
"override_exchange_check": {
"description": "Override the exchange check to force FreqAI to use exchanges that may not have enough historic data. Turn this to True if you know your FreqAI model and strategy do not require historical data.",
"type": "boolean",
"default": false
},
"feature_parameters": {
"description": "The parameters used to engineer the feature set",
"type": "object",

View File

@@ -25,10 +25,10 @@
"trading_mode": "spot",
"margin_mode": "",
"minimal_roi": {
"40": 0.0,
"30": 0.01,
"20": 0.02,
"0": 0.04
"40": 0.0,
"30": 0.01,
"20": 0.02,
"0": 0.04
},
"stoploss": -0.10,
"unfilledtimeout": {
@@ -47,7 +47,7 @@
"bids_to_ask_delta": 1
}
},
"exit_pricing":{
"exit_pricing": {
"price_side": "same",
"use_order_book": true,
"order_book_top": 1,
@@ -70,18 +70,38 @@
"exit": "GTC"
},
"pairlists": [
{"method": "StaticPairList"},
{"method": "FullTradesFilter"},
{
"method": "StaticPairList"
},
{
"method": "DelistFilter",
"max_days_from_now": 0,
},
{
"method": "FullTradesFilter"
},
{
"method": "VolumePairList",
"number_assets": 20,
"sort_key": "quoteVolume",
"refresh_period": 1800
},
{"method": "AgeFilter", "min_days_listed": 10},
{"method": "PrecisionFilter"},
{"method": "PriceFilter", "low_price_ratio": 0.01, "min_price": 0.00000010},
{"method": "SpreadFilter", "max_spread_ratio": 0.005},
{
"method": "AgeFilter",
"min_days_listed": 10
},
{
"method": "PrecisionFilter"
},
{
"method": "PriceFilter",
"low_price_ratio": 0.01,
"min_price": 0.00000010
},
{
"method": "SpreadFilter",
"max_spread_ratio": 0.005
},
{
"method": "RangeStabilityFilter",
"lookback_days": 10,
@@ -166,12 +186,12 @@
"external_message_consumer": {
"enabled": false,
"producers": [
{
"name": "default",
"host": "127.0.0.2",
"port": 8080,
"ws_token": "secret_ws_t0ken."
}
{
"name": "default",
"host": "127.0.0.2",
"port": 8080,
"ws_token": "secret_ws_t0ken."
}
],
"wait_timeout": 300,
"ping_timeout": 10,
@@ -195,4 +215,4 @@
"reduce_df_footprint": false,
"dataformat_ohlcv": "feather",
"dataformat_trades": "feather"
}
}

View File

@@ -34,8 +34,7 @@ COPY build_helpers/* /tmp/
# Install dependencies
COPY --chown=ftuser:ftuser requirements.txt /freqtrade/
USER ftuser
RUN pip install --user --prefer-binary --no-cache-dir "numpy<3.0" build \
&& pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib \
RUN pip install --user --only-binary=:all: --find-links /tmp/ pyarrow TA-Lib \
&& pip install --user --no-cache-dir -r requirements.txt
# Copy dependencies to runtime-image

View File

@@ -10,6 +10,7 @@ usage: freqtrade backtesting [-h] [-v] [--no-color] [--logfile FILE] [-V]
[--stake-amount STAKE_AMOUNT] [--fee FLOAT]
[-p PAIRS [PAIRS ...]] [--eps]
[--enable-protections]
[--enable-dynamic-pairlist]
[--dry-run-wallet DRY_RUN_WALLET]
[--timeframe-detail TIMEFRAME_DETAIL]
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
@@ -44,9 +45,14 @@ options:
Allow buying the same pair multiple times (position
stacking).
--enable-protections, --enableprotections
Enable protections for backtesting.Will slow
Enable protections for backtesting. Will slow
backtesting down by a considerable amount, but will
include configured protections
--enable-dynamic-pairlist
Enables dynamic pairlist refreshes in backtesting. The
pairlist will be generated for each new candle if
you're using a pairlist handler that supports this
feature, for example, ShuffleFilter.
--dry-run-wallet DRY_RUN_WALLET, --starting-balance DRY_RUN_WALLET
Starting balance, used for backtesting / hyperopt and
dry-runs.

View File

@@ -4,6 +4,7 @@ usage: freqtrade download-data [-h] [-v] [--no-color] [--logfile FILE] [-V]
[-p PAIRS [PAIRS ...]] [--pairs-file FILE]
[--days INT] [--new-pairs-days INT]
[--include-inactive-pairs]
[--no-parallel-download]
[--timerange TIMERANGE] [--dl-trades]
[--convert] [--exchange EXCHANGE]
[-t TIMEFRAMES [TIMEFRAMES ...]] [--erase]
@@ -24,6 +25,9 @@ options:
Default: `None`.
--include-inactive-pairs
Also download data from inactive pairs.
--no-parallel-download
Disable parallel startup download. Only use this if
you experience issues.
--timerange TIMERANGE
Specify what timerange of data to use.
--dl-trades Download trades instead of OHLCV data.

View File

@@ -44,7 +44,7 @@ options:
Allow buying the same pair multiple times (position
stacking).
--enable-protections, --enableprotections
Enable protections for backtesting.Will slow
Enable protections for backtesting. Will slow
backtesting down by a considerable amount, but will
include configured protections
--dry-run-wallet DRY_RUN_WALLET, --starting-balance DRY_RUN_WALLET

View File

@@ -11,6 +11,7 @@ usage: freqtrade lookahead-analysis [-h] [-v] [--no-color] [--logfile FILE]
[--stake-amount STAKE_AMOUNT]
[--fee FLOAT] [-p PAIRS [PAIRS ...]]
[--enable-protections]
[--enable-dynamic-pairlist]
[--dry-run-wallet DRY_RUN_WALLET]
[--timeframe-detail TIMEFRAME_DETAIL]
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
@@ -21,6 +22,7 @@ usage: freqtrade lookahead-analysis [-h] [-v] [--no-color] [--logfile FILE]
[--minimum-trade-amount INT]
[--targeted-trade-amount INT]
[--lookahead-analysis-exportfilename LOOKAHEAD_ANALYSIS_EXPORTFILENAME]
[--allow-limit-orders]
options:
-h, --help show this help message and exit
@@ -43,9 +45,14 @@ options:
Limit command to these pairs. Pairs are space-
separated.
--enable-protections, --enableprotections
Enable protections for backtesting.Will slow
Enable protections for backtesting. Will slow
backtesting down by a considerable amount, but will
include configured protections
--enable-dynamic-pairlist
Enables dynamic pairlist refreshes in backtesting. The
pairlist will be generated for each new candle if
you're using a pairlist handler that supports this
feature, for example, ShuffleFilter.
--dry-run-wallet DRY_RUN_WALLET, --starting-balance DRY_RUN_WALLET
Starting balance, used for backtesting / hyperopt and
dry-runs.
@@ -79,6 +86,8 @@ options:
--lookahead-analysis-exportfilename LOOKAHEAD_ANALYSIS_EXPORTFILENAME
Use this csv-filename to store lookahead-analysis-
results
--allow-limit-orders Allow limit orders in lookahead analysis (could cause
false positives in lookahead analysis results).
Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).

View File

@@ -571,9 +571,7 @@ Commonly used time in force are:
**GTC (Good Till Canceled):**
This is most of the time the default time in force. It means the order will remain
on exchange till it is cancelled by the user. It can be fully or partially fulfilled.
If partially fulfilled, the remaining will stay on the exchange till cancelled.
This is most of the time the default time in force. It means the order will remain on exchange till it is cancelled by the user. It can be fully or partially fulfilled. If partially fulfilled, the remaining will stay on the exchange till cancelled.
**FOK (Fill Or Kill):**
@@ -581,8 +579,9 @@ It means if the order is not executed immediately AND fully then it is cancelled
**IOC (Immediate Or Canceled):**
It is the same as FOK (above) except it can be partially fulfilled. The remaining part
is automatically cancelled by the exchange.
It is the same as FOK (above) except it can be partially fulfilled. The remaining part is automatically cancelled by the exchange.
Not necessarily recommended, as this can lead to partial fills below the minimum trade size.
**PO (Post only):**

View File

@@ -2,6 +2,10 @@
This page combines common gotchas and Information which are exchange-specific and most likely don't apply to other exchanges.
## Quick overview of supported exchange features
--8<-- "includes/exchange-features.md"
## Exchange configuration
Freqtrade is based on [CCXT library](https://github.com/ccxt/ccxt) that supports over 100 cryptocurrency

View File

@@ -297,6 +297,13 @@ Should you be asked to expose your exchange keys or send funds to some random wa
Failing to follow these guidelines will not be responsibility of freqtrade.
## Support policy
We provide free support for Freqtrade on our [Discord server](https://discord.gg/p7nuUNVfP7) and via GitHub issues.
We only support the most recent release (e.g. 2025.8) and the current development branch (e.g. 2025.9-dev).
If you're on an older version, please follow the [upgrade instructions](updating.md) and see if your problem has already been addressed.
## "Freqtrade token"
Freqtrade does not have a Crypto token offering.

View File

@@ -4,7 +4,7 @@ Freqtrade provides a builtin webserver, which can serve [FreqUI](https://github.
By default, the UI is automatically installed as part of the installation (script, docker).
freqUI can also be manually installed by using the `freqtrade install-ui` command.
This same command can also be used to update freqUI to new new releases.
This same command can also be used to update freqUI to new releases.
Once the bot is started in trade / dry-run mode (with `freqtrade trade`) - the UI will be available under the configured API port (by default `http://127.0.0.1:8080`).
@@ -70,7 +70,16 @@ Things you can change (among others):
![FreqUI - Settings view](assets/frequi-settings-dark.png#only-dark)
![FreqUI - Settings view](assets/frequi-settings-light.png#only-light)
## Backtesting
## Webserver mode
when freqtrade is started in [webserver mode](utils.md#webserver-mode) (freqtrade started with `freqtrade webserver`), the webserver will start in a special mode allowing for additional features, for example:
* Downloading data
* Testing pairlists
* [Backtesting strategies](#backtesting)
* ... to be expanded
### Backtesting
When freqtrade is started in [webserver mode](utils.md#webserver-mode) (freqtrade started with `freqtrade webserver`), the backtesting view becomes available.
This view allows you to backtest strategies and visualize the results.

View File

@@ -79,7 +79,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
| `model_type` | Model string from stable_baselines3 or SBcontrib. Available strings include: `'TRPO', 'ARS', 'RecurrentPPO', 'MaskablePPO', 'PPO', 'A2C', 'DQN'`. User should ensure that `model_training_parameters` match those available to the corresponding stable_baselines3 model by visiting their documentation. [PPO doc](https://stable-baselines3.readthedocs.io/en/master/modules/ppo.html) (external website) <br> **Datatype:** string.
| `policy_type` | One of the available policy types from stable_baselines3 <br> **Datatype:** string.
| `max_training_drawdown_pct` | The maximum drawdown that the agent is allowed to experience during training. <br> **Datatype:** float. <br> Default: 0.8
| `cpu_count` | Number of threads/cpus to dedicate to the Reinforcement Learning training process (depending on if `ReinforcementLearning_multiproc` is selected or not). Recommended to leave this untouched, by default, this value is set to the total number of physical cores minus 1. <br> **Datatype:** int.
| `cpu_count` | Number of threads/cpus to dedicate to the Reinforcement Learning training process (depending on if `ReinforcementLearner_multiproc` is selected or not). Recommended to leave this untouched, by default, this value is set to the total number of physical cores minus 1. <br> **Datatype:** int.
| `model_reward_parameters` | Parameters used inside the customizable `calculate_reward()` function in `ReinforcementLearner.py` <br> **Datatype:** int.
| `add_state_info` | Tell FreqAI to include state information in the feature set for training and inferencing. The current state variables include trade duration, current profit, trade position. This is only available in dry/live runs, and is automatically switched to false for backtesting. <br> **Datatype:** bool. <br> Default: `False`.
| `net_arch` | Network architecture which is well described in [`stable_baselines3` doc](https://stable-baselines3.readthedocs.io/en/master/guide/custom_policy.html#examples). In summary: `[<shared layers>, dict(vf=[<non-shared value network layers>], pi=[<non-shared policy network layers>])]`. By default this is set to `[128, 128]`, which defines 2 shared hidden layers with 128 units each.

View File

@@ -7,7 +7,7 @@
FreqAI is a software designed to automate a variety of tasks associated with training a predictive machine learning model to generate market forecasts given a set of input signals. In general, FreqAI aims to be a sandbox for easily deploying robust machine learning libraries on real-time data ([details](#freqai-position-in-open-source-machine-learning-landscape)).
!!! Note
FreqAI is, and always will be, a not-for-profit, open-source project. FreqAI does *not* have a crypto token, FreqAI does *not* sell signals, and FreqAI does not have a domain besides the present [freqtrade documentation](https://www.freqtrade.io/en/latest/freqai/).
FreqAI is, and always will be, a not-for-profit, open source project. FreqAI does *not* have a crypto token, FreqAI does *not* sell signals, and FreqAI does not have a domain besides the present [freqtrade documentation](https://www.freqtrade.io/en/latest/freqai/).
Features include:
@@ -81,9 +81,9 @@ If you are using docker, a dedicated tag with FreqAI dependencies is available a
!!! note "docker-compose-freqai.yml"
We do provide an explicit docker-compose file for this in `docker/docker-compose-freqai.yml` - which can be used via `docker compose -f docker/docker-compose-freqai.yml run ...` - or can be copied to replace the original docker file. This docker-compose file also contains a (disabled) section to enable GPU resources within docker containers. This obviously assumes the system has GPU resources available.
### FreqAI position in open-source machine learning landscape
### FreqAI position in open source machine learning landscape
Forecasting chaotic time-series based systems, such as equity/cryptocurrency markets, requires a broad set of tools geared toward testing a wide range of hypotheses. Fortunately, a recent maturation of robust machine learning libraries (e.g. `scikit-learn`) has opened up a wide range of research possibilities. Scientists from a diverse range of fields can now easily prototype their studies on an abundance of established machine learning algorithms. Similarly, these user-friendly libraries enable "citizen scientists" to use their basic Python skills for data exploration. However, leveraging these machine learning libraries on historical and live chaotic data sources can be logistically difficult and expensive. Additionally, robust data collection, storage, and handling presents a disparate challenge. [`FreqAI`](#freqai) aims to provide a generalized and extensible open-sourced framework geared toward live deployments of adaptive modeling for market forecasting. The `FreqAI` framework is effectively a sandbox for the rich world of open-source machine learning libraries. Inside the `FreqAI` sandbox, users find they can combine a wide variety of third-party libraries to test creative hypotheses on a free live 24/7 chaotic data source - cryptocurrency exchange data.
Forecasting chaotic time-series based systems, such as equity/cryptocurrency markets, requires a broad set of tools geared toward testing a wide range of hypotheses. Fortunately, a recent maturation of robust machine learning libraries (e.g. `scikit-learn`) has opened up a wide range of research possibilities. Scientists from a diverse range of fields can now easily prototype their studies on an abundance of established machine learning algorithms. Similarly, these user-friendly libraries enable "citizen scientists" to use their basic Python skills for data exploration. However, leveraging these machine learning libraries on historical and live chaotic data sources can be logistically difficult and expensive. Additionally, robust data collection, storage, and handling presents a disparate challenge. [`FreqAI`](#freqai) aims to provide a generalized and extensible open-sourced framework geared toward live deployments of adaptive modeling for market forecasting. The `FreqAI` framework is effectively a sandbox for the rich world of open source machine learning libraries. Inside the `FreqAI` sandbox, users find they can combine a wide variety of third-party libraries to test creative hypotheses on a free live 24/7 chaotic data source - cryptocurrency exchange data.
### Citing FreqAI

View File

@@ -0,0 +1,19 @@
| Exchange | Mode | Margin mode | Stoploss type |
|---------|---------|------|------------------|
| [Binance](exchanges.md#binance) | spot | | limit |
| [Binance](exchanges.md#binance) | futures | isolated, cross | market, limit |
| [Bingx](exchanges.md#bingx) | spot | | market, limit |
| [Bitmart](exchanges.md#bitmart) | spot | | ❌ (not supported) |
| [Bybit](exchanges.md#bybit) | spot | | ❌ (not supported) |
| [Bybit](exchanges.md#bybit) | futures | isolated | market, limit |
| [Gate.io](exchanges.md#gateio) | spot | | limit |
| [Gate.io](exchanges.md#gateio) | futures | isolated | limit |
| [HTX](exchanges.md#htx) | spot | | limit |
| [Hyperliquid](exchanges.md#hyperliquid) | spot | | ❌ (not supported) |
| [Hyperliquid](exchanges.md#hyperliquid) | futures | isolated, cross | limit |
| [Kraken](exchanges.md#kraken) | spot | | market, limit |
| [OKX](exchanges.md#okx) | spot | | limit |
| [OKX](exchanges.md#okx) | futures | isolated | limit |
| [Bitvavo](exchanges.md#bitvavo) | spot | | ❌ (not supported) |
| [Kucoin](exchanges.md#kucoin) | spot | | market, limit |

View File

@@ -4,7 +4,7 @@ Pairlist Handlers define the list of pairs (pairlist) that the bot should trade.
In your configuration, you can use Static Pairlist (defined by the [`StaticPairList`](#static-pair-list) Pairlist Handler) and Dynamic Pairlist (defined by the [`VolumePairList`](#volume-pair-list) and [`PercentChangePairList`](#percent-change-pair-list) Pairlist Handlers).
Additionally, [`AgeFilter`](#agefilter), [`PrecisionFilter`](#precisionfilter), [`PriceFilter`](#pricefilter), [`ShuffleFilter`](#shufflefilter), [`SpreadFilter`](#spreadfilter) and [`VolatilityFilter`](#volatilityfilter) act as Pairlist Filters, removing certain pairs and/or moving their positions in the pairlist.
Additionally, [`AgeFilter`](#agefilter), [`DelistFilter`](#delistfilter), [`PrecisionFilter`](#precisionfilter), [`PriceFilter`](#pricefilter), [`ShuffleFilter`](#shufflefilter), [`SpreadFilter`](#spreadfilter) and [`VolatilityFilter`](#volatilityfilter) act as Pairlist Filters, removing certain pairs and/or moving their positions in the pairlist.
If multiple Pairlist Handlers are used, they are chained and a combination of all Pairlist Handlers forms the resulting pairlist the bot uses for trading and backtesting. Pairlist Handlers are executed in the sequence they are configured. You can define either `StaticPairList`, `VolumePairList`, `ProducerPairList`, `RemotePairList`, `MarketCapPairList` or `PercentChangePairList` as the starting Pairlist Handler.
@@ -27,6 +27,7 @@ You may also use something like `.*DOWN/BTC` or `.*UP/BTC` to exclude leveraged
* [`RemotePairList`](#remotepairlist)
* [`MarketCapPairList`](#marketcappairlist)
* [`AgeFilter`](#agefilter)
* [`DelistFilter`](#delistfilter)
* [`FullTradesFilter`](#fulltradesfilter)
* [`OffsetFilter`](#offsetfilter)
* [`PerformanceFilter`](#performancefilter)
@@ -38,7 +39,7 @@ You may also use something like `.*DOWN/BTC` or `.*UP/BTC` to exclude leveraged
* [`VolatilityFilter`](#volatilityfilter)
!!! Tip "Testing pairlists"
Pairlist configurations can be quite tricky to get right. Best use the [`test-pairlist`](utils.md#test-pairlist) utility sub-command to test your configuration quickly.
Pairlist configurations can be quite tricky to get right. Best use freqUI in [webserver mode](freq-ui.md#webserver-mode) or the [`test-pairlist`](utils.md#test-pairlist) utility sub-command to test your Pairlist configuration quickly.
#### Static Pair List
@@ -180,7 +181,7 @@ More sophisticated approach can be used, by using `lookback_timeframe` for candl
* `refresh_period`: Defines the interval (in seconds) at which the pairlist will be refreshed. The default is 1800 seconds (30 minutes).
* `lookback_days`: Number of days to look back. When `lookback_days` is selected, the `lookback_timeframe` is defaulted to 1 day.
* `lookback_timeframe`: Timeframe to use for the lookback period.
* `lookback_period`: Number of periods to look back at.
* `lookback_period`: Number of periods to look back at.
When PercentChangePairList is used after other Pairlist Handlers, it will operate on the outputs of those handlers. If it is the leading Pairlist Handler, it will select pairs from all available markets with the specified stake currency.
@@ -270,7 +271,6 @@ You can limit the length of the pairlist with the optional parameter `number_ass
],
```
!!! Tip "Combining pairlists"
This pairlist can be combined with all other pairlists and filters for further pairlist reduction, and can also act as an "additional" pairlist, on top of already defined pairs.
`ProducerPairList` can also be used multiple times in sequence, combining the pairs from multiple producers.
@@ -312,7 +312,7 @@ The `pairlist_url` option specifies the URL of the remote server where the pairl
The `save_to_file` option, when provided with a valid filename, saves the processed pairlist to that file in JSON format. This option is optional, and by default, the pairlist is not saved to a file.
??? Example "Multi bot with shared pairlist example"
`save_to_file` can be used to save the pairlist to a file with Bot1:
```json
@@ -407,6 +407,16 @@ be caught out buying before the pair has finished dropping in price.
This filter allows freqtrade to ignore pairs until they have been listed for at least `min_days_listed` days and listed before `max_days_listed`.
#### DelistFilter
Removes pairs that will be delisted on the exchange maximum `max_days_from_now` days from now (defaults to `0` which remove all future delisted pairs no matter how far from now). Currently this filter only supports following exchanges:
!!! Note "Available exchanges"
Delist filter is only available on Binance, where Binance Futures will work for both dry and live modes, while Binance Spot is limited to live mode (for technical reasons).
!!! Warning "Backtesting"
`DelistFilter` does not support backtesting mode.
#### FullTradesFilter
Shrink whitelist to consist only in-trade pairs when the trade slots are full (when `max_open_trades` isn't being set to `-1` in the config).
@@ -438,7 +448,7 @@ Example to remove the first 10 pairs from the pairlist, and takes the next 20 (t
```
!!! Warning
When `OffsetFilter` is used to split a larger pairlist among multiple bots in combination with `VolumeFilter`
When `OffsetFilter` is used to split a larger pairlist among multiple bots in combination with `VolumeFilter`
it can not be guaranteed that pairs won't overlap due to slightly different refresh intervals for the
`VolumeFilter`.
@@ -601,7 +611,7 @@ Adding `"sort_direction": "asc"` or `"sort_direction": "desc"` enables sorting m
### Full example of Pairlist Handlers
The below example blacklists `BNB/BTC`, uses `VolumePairList` with `20` assets, sorting pairs by `quoteVolume` and applies [`PrecisionFilter`](#precisionfilter) and [`PriceFilter`](#pricefilter), filtering all assets where 1 price unit is > 1%. Then the [`SpreadFilter`](#spreadfilter) and [`VolatilityFilter`](#volatilityfilter) is applied and pairs are finally shuffled with the random seed set to some predefined value.
The below example blacklists `BNB/BTC`, uses `VolumePairList` with `20` assets, sorting pairs by `quoteVolume`, then filter future delisted pairs using [`DelistFilter`](#delistfilter) and [`AgeFilter`](#agefilter) to remove pairs that are listed less than 10 days ago. After that [`PrecisionFilter`](#precisionfilter) and [`PriceFilter`](#pricefilter) are applied, filtering all assets where 1 price unit is > 1%. Then the [`SpreadFilter`](#spreadfilter) and [`VolatilityFilter`](#volatilityfilter) are applied and pairs are finally shuffled with the random seed set to some predefined value.
```json
"exchange": {
@@ -614,6 +624,10 @@ The below example blacklists `BNB/BTC`, uses `VolumePairList` with `20` assets,
"number_assets": 20,
"sort_key": "quoteVolume"
},
{
"method": "DelistFilter",
"max_days_from_now": 0,
},
{"method": "AgeFilter", "min_days_listed": 10},
{"method": "PrecisionFilter"},
{"method": "PriceFilter", "low_price_ratio": 0.01},

View File

@@ -1,11 +1,11 @@
This section will highlight a few projects from members of the community.
!!! Note
The projects below are for the most part not maintained by the freqtrade , therefore use your own caution before using them.
The projects below are for the most part not maintained by the freqtrade team, therefore use your own caution before using them.
- [Example freqtrade strategies](https://github.com/freqtrade/freqtrade-strategies/)
- [FrequentHippo - Statistics of dry/live runs and backtests](http://frequenthippo.ddns.net) (by hippocritical).
- [Online pairlist generator](https://remotepairlist.com/) (by Blood4rc).
- [Freqtrade Backtesting Project](https://strat.ninja/) (by Blood4rc).
- [Freqtrade analysis notebook](https://github.com/froggleston/freqtrade_analysis_notebook) (by Froggleston).
- [TUI for freqtrade](https://github.com/froggleston/freqtrade-frogtrade9000) (by Froggleston).
- [FTUI - Terminal UI for freqtrade](https://github.com/freqtrade/ftui) (by Froggleston).
- [Bot Academy](https://botacademy.ddns.net/) (by stash86) - Blog about crypto bot projects.

View File

@@ -92,6 +92,11 @@ One account is used to share collateral between markets (trading pairs). Margin
Please read the [exchange specific notes](exchanges.md) for exchanges that support this mode and how they differ.
!!! Warning "Increased risk of liquidation"
Cross margin mode increases the risk of full account liquidation, as all trades share the same collateral.
A loss on one trade can affect the liquidation price of other trades.
Also, cross-position influence may not be fully simulated in dry-run or backtesting mode.
## Set leverage to use
Different strategies and risk profiles will require different levels of leverage.

View File

@@ -22,6 +22,7 @@ This is done by not looking at the strategy code itself, but at changed indicato
- `--dry-run-wallet` is forced to be basically infinite (1 billion).
- `--stake-amount` is forced to be a static 10000 (10k).
- `--enable-protections` is forced to be off.
- `order_types` are forced to be "market" (late entries) unless `--lookahead-allow-limit-orders` is set.
These are set to avoid users accidentally generating false positives.
@@ -99,6 +100,9 @@ This would lead to a false-negative, i.e. the strategy will be reported as non-b
Please don't use any options like enabling position stacking as this will distort the number of checked signals.
If you decide to do so, then make doubly sure that you won't ever run out of `max_open_trades` slots,
and that you have enough capital in the backtest wallet configuration.
- limit orders in combination with `custom_entry_price()` and `custom_exit_price()` callbacks can cause late / delayed entries and exists, causing false positives.
To avoid this - market orders are forced for this command. This implicitly means that `custom_entry_price()` and `custom_exit_price()` callbacks are not called.
Using `--lookahead-allow-limit-orders` will skip the override and use your configured order types - however has shown to eventually produce false positives.
- In the results table, the `biased_indicators` column
will falsely flag FreqAI target indicators defined in `set_freqai_targets()` as biased.
**These are not biased and can safely be ignored.**

View File

@@ -1,6 +1,6 @@
markdown==3.8.2
markdown==3.9
mkdocs==1.6.1
mkdocs-material==9.6.18
mkdocs-material==9.6.21
mdx_truly_sane_lists==1.3
pymdown-extensions==10.16.1
jinja2==3.1.6

View File

@@ -140,6 +140,11 @@ This method will work for all arguments - check the "show" command for a list of
# Get the status of the bot
ping = client.ping()
print(ping)
# Add pairs to blacklist
client.blacklist("BTC/USDT", "ETH/USDT")
# Add pairs to blacklist by supplying a list
client.blacklist(*listPairs)
# ...
```
@@ -155,63 +160,63 @@ freqtrade-client help
Possible commands:
available_pairs
Return available pair (backtest data) based on timeframe / stake_currency selection
Return available pair (backtest data) based on timeframe / stake_currency selection
:param timeframe: Only pairs with this timeframe available.
:param stake_currency: Only pairs that include this timeframe
balance
Get the account balance.
Get the account balance.
blacklist
Show the current blacklist.
Show the current blacklist.
:param add: List of coins to add (example: "BNB/BTC")
cancel_open_order
Cancel open order for trade.
Cancel open order for trade.
:param trade_id: Cancels open orders for this trade.
count
Return the amount of open trades.
Return the amount of open trades.
daily
Return the profits for each day, and amount of trades.
Return the profits for each day, and amount of trades.
delete_lock
Delete (disable) lock from the database.
Delete (disable) lock from the database.
:param lock_id: ID for the lock to delete
delete_trade
Delete trade from the database.
Delete trade from the database.
Tries to close open orders. Requires manual handling of this asset on the exchange.
:param trade_id: Deletes the trade with this ID from the database.
forcebuy
Buy an asset.
Buy an asset.
:param pair: Pair to buy (ETH/BTC)
:param price: Optional - price to buy
forceenter
Force entering a trade
Force entering a trade
:param pair: Pair to buy (ETH/BTC)
:param side: 'long' or 'short'
:param price: Optional - price to buy
forceexit
Force-exit a trade.
Force-exit a trade.
:param tradeid: Id of the trade (can be received via status command)
:param ordertype: Order type to use (must be market or limit)
:param amount: Amount to sell. Full sell if not given
health
Provides a quick health check of the running bot.
Provides a quick health check of the running bot.
lock_add
Manually lock a specific pair
@@ -222,22 +227,22 @@ lock_add
:param reason: Reason for the lock
locks
Return current locks
Return current locks
logs
Show latest logs.
Show latest logs.
:param limit: Limits log messages to the last <limit> logs. No limit to get the entire log.
pair_candles
Return live dataframe for <pair><timeframe>.
Return live dataframe for <pair><timeframe>.
:param pair: Pair to get data for
:param timeframe: Only pairs with this timeframe available.
:param limit: Limit result to the last n candles.
pair_history
Return historic, analyzed dataframe
Return historic, analyzed dataframe
:param pair: Pair to get data for
:param timeframe: Only pairs with this timeframe available.
@@ -245,59 +250,59 @@ pair_history
:param timerange: Timerange to get data for (same format than --timerange endpoints)
performance
Return the performance of the different coins.
Return the performance of the different coins.
ping
simple ping
simple ping
plot_config
Return plot configuration if the strategy defines one.
Return plot configuration if the strategy defines one.
profit
Return the profit summary.
Return the profit summary.
reload_config
Reload configuration.
Reload configuration.
show_config
Returns part of the configuration, relevant for trading operations.
Returns part of the configuration, relevant for trading operations.
start
Start the bot if it's in the stopped state.
Start the bot if it's in the stopped state.
pause
Pause the bot if it's in the running state. If triggered on stopped state will handle open positions.
Pause the bot if it's in the running state. If triggered on stopped state will handle open positions.
stats
Return the stats report (durations, sell-reasons).
Return the stats report (durations, sell-reasons).
status
Get the status of open trades.
Get the status of open trades.
stop
Stop the bot. Use `start` to restart.
Stop the bot. Use `start` to restart.
stopbuy
Stop buying (but handle sells gracefully). Use `reload_config` to reset.
Stop buying (but handle sells gracefully). Use `reload_config` to reset.
strategies
Lists available strategies
Lists available strategies
strategy
Get strategy details
Get strategy details
:param strategy: Strategy class name
sysinfo
Provides system information (CPU, RAM usage)
Provides system information (CPU, RAM usage)
trade
Return specific trade
Return specific trade
:param trade_id: Specify which trade to get.
trades
Return trades history, sorted by id
Return trades history, sorted by id
:param limit: Limits trades to the X last trades. Max 500 trades.
:param offset: Offset by this amount of trades.
@@ -316,10 +321,10 @@ list_custom_data
:param key: str, optional - Key of the custom-data
version
Return the version of the bot.
Return the version of the bot.
whitelist
Show the current whitelist.
Show the current whitelist.
```
@@ -339,32 +344,32 @@ All endpoints in the below table need to be prefixed with the base URL of the AP
| `/reload_config` | POST | Reloads the configuration file.
| `/trades` | GET | List last trades. Limited to 500 trades per call.
| `/trade/<tradeid>` | GET | Get specific trade.<br/>*Params:*<br/>- `tradeid` (`int`)
| `/trades/<tradeid>` | DELETE | Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.<br/>*Params:*<br/>- `tradeid` (`int`)
| `/trades/<tradeid>/open-order` | DELETE | Cancel open order for this trade.<br/>*Params:*<br/>- `tradeid` (`int`)
| `/trades/<tradeid>/reload` | POST | Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.<br/>*Params:*<br/>- `tradeid` (`int`)
| `/trades/<tradeid>` | DELETE | Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.<br/>*Params:*<br/>- `tradeid` (`int`)
| `/trades/<tradeid>/open-order` | DELETE | Cancel open order for this trade.<br/>*Params:*<br/>- `tradeid` (`int`)
| `/trades/<tradeid>/reload` | POST | Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.<br/>*Params:*<br/>- `tradeid` (`int`)
| `/show_config` | GET | Shows part of the current configuration with relevant settings to operation.
| `/logs` | GET | Shows last log messages.
| `/status` | GET | Lists all open trades.
| `/count` | GET | Displays number of trades used and available.
| `/entries` | GET | Shows profit statistics for each enter tags for given pair (or all pairs if pair isn't given). Pair is optional.<br/>*Params:*<br/>- `pair` (`str`)
| `/exits` | GET | Shows profit statistics for each exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.<br/>*Params:*<br/>- `pair` (`str`)
| `/mix_tags` | GET | Shows profit statistics for each combinations of enter tag + exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.<br/>*Params:*<br/>- `pair` (`str`)
| `/entries` | GET | Shows profit statistics for each enter tags for given pair (or all pairs if pair isn't given). Pair is optional.<br/>*Params:*<br/>- `pair` (`str`)
| `/exits` | GET | Shows profit statistics for each exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.<br/>*Params:*<br/>- `pair` (`str`)
| `/mix_tags` | GET | Shows profit statistics for each combinations of enter tag + exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.<br/>*Params:*<br/>- `pair` (`str`)
| `/locks` | GET | Displays currently locked pairs.
| `/locks` | POST | Locks a pair until "until". (Until will be rounded up to the nearest timeframe). Side is optional and is either `long` or `short` (default is `long`). Reason is optional.<br/>*Params:*<br/>- `<pair>` (`str`)<br/>- `<until>` (`datetime`)<br/>- `[side]` (`str`)<br/>- `[reason]` (`str`)
| `/locks/<lockid>` | DELETE | Deletes (disables) the lock by id.<br/>*Params:*<br/>- `lockid` (`int`)
| `/locks` | POST | Locks a pair until "until". (Until will be rounded up to the nearest timeframe). Side is optional and is either `long` or `short` (default is `long`). Reason is optional.<br/>*Params:*<br/>- `<pair>` (`str`)<br/>- `<until>` (`datetime`)<br/>- `[side]` (`str`)<br/>- `[reason]` (`str`)
| `/locks/<lockid>` | DELETE | Deletes (disables) the lock by id.<br/>*Params:*<br/>- `lockid` (`int`)
| `/profit` | GET | Display a summary of your profit/loss from close trades and some stats about your performance.
| `/forceexit` | POST | Instantly exits the given trade (ignoring `minimum_roi`), using the given order type ("market" or "limit", uses your config setting if not specified), and the chosen amount (full sell if not specified). If `all` is supplied as the `tradeid`, then all currently open trades will be forced to exit.<br/>*Params:*<br/>- `<tradeid>` (`int` or `str`)<br/>- `<ordertype>` (`str`)<br/>- `[amount]` (`float`)
| `/forceenter` | POST | Instantly enters the given pair. Side is optional and is either `long` or `short` (default is `long`). Rate is optional. (`force_entry_enable` must be set to True)<br/>*Params:*<br/>- `<pair>` (`str`)<br/>- `<side>` (`str`)<br/>- `[rate]` (`float`)
| `/performance` | GET | Show performance of each finished trade grouped by pair.
| `/balance` | GET | Show account balance per currency.
| `/daily` | GET | Shows profit or loss per day, over the last n days (n defaults to 7).<br/>*Params:*<br/>- `<n>` (`int`)
| `/weekly` | GET | Shows profit or loss per week, over the last n days (n defaults to 4).<br/>*Params:*<br/>- `<n>` (`int`)
| `/monthly` | GET | Shows profit or loss per month, over the last n days (n defaults to 3).<br/>*Params:*<br/>- `<n>` (`int`)
| `/daily` | GET | Shows profit or loss per day, over the last n days (n defaults to 7).<br/>*Params:*<br/>- `timescale` (`int`)
| `/weekly` | GET | Shows profit or loss per week, over the last n days (n defaults to 4).<br/>*Params:*<br/>- `timescale` (`int`)
| `/monthly` | GET | Shows profit or loss per month, over the last n days (n defaults to 3).<br/>*Params:*<br/>- `timescale` (`int`)
| `/stats` | GET | Display a summary of profit / loss reasons as well as average holding times.
| `/whitelist` | GET | Show the current whitelist.
| `/blacklist` | GET | Show the current blacklist.
| `/blacklist` | POST | Adds the specified pair to the blacklist.<br/>*Params:*<br/>- `pair` (`str`)
| `/blacklist` | DELETE | Deletes the specified list of pairs from the blacklist.<br/>*Params:*<br/>- `[pair,pair]` (`list[str]`)
| `/blacklist` | POST | Adds the specified pair to the blacklist.<br/>*Params:*<br/>- `blacklist` (`str`)
| `/blacklist` | DELETE | Deletes the specified list of pairs from the blacklist.<br/>*Params:*<br/>- `[pair,pair]` (`list[str]`)
| `/pair_candles` | GET | Returns dataframe for a pair / timeframe combination while the bot is running. **Alpha**
| `/pair_candles` | POST | Returns dataframe for a pair / timeframe combination while the bot is running, filtered by a provided list of columns to return. **Alpha**<br/>*Params:*<br/>- `<column_list>` (`list[str]`)
| `/pair_history` | GET | Returns an analyzed dataframe for a given timerange, analyzed by a given strategy. **Alpha**
@@ -488,7 +493,7 @@ To properly configure your reverse proxy (securely), please consult it's documen
### OpenAPI interface
To enable the builtin openAPI interface (Swagger UI), specify `"enable_openapi": true` in the api_server configuration.
This will enable the Swagger UI at the `/docs` endpoint. By default, that's running at http://localhost:8080/docs - but it'll depend on your settings.
This will enable the Swagger UI at the `/docs` endpoint. By default, that's running at <http://localhost:8080/docs> - but it'll depend on your settings.
### Advanced API usage using JWT tokens

View File

@@ -26,18 +26,9 @@ These modes can be configured with these values:
Stoploss on exchange is only supported for the following exchanges, and not all exchanges support both stop-limit and stop-market.
The Order-type will be ignored if only one mode is available.
| Exchange | stop-loss type |
|----------|-------------|
| Binance | limit |
| Binance Futures | market, limit |
| Bingx | market, limit |
| Bitget | market, limit |
| HTX | limit |
| kraken | market, limit |
| Gate | limit |
| Okx | limit |
| Kucoin | stop-limit, stop-market|
| Hyperliquid (futures only) | limit |
??? info "Supported exchanges and stoploss types"
--8<-- "includes/exchange-features.md"
!!! Note "Tight stoploss"
<ins>Do not set too low/tight stoploss value when using stop loss on exchange!</ins>

View File

@@ -1243,15 +1243,23 @@ class AwesomeStrategy(IStrategy):
```
!!! Tip "Learn more about storing data"
You can learn more about storing data on the [Storing custom trade data](strategy-advanced.md#storing-information-persistent) section.
Please keep in mind that this is considered advanced usage, and should be used with care.
## Plot annotations callback
The plot annotations callback is called whenever freqUI requests data to display a chart.
This callback has no meaning in the trade cycle context and is only used for charting purposes.
The strategy can then return a list of `AnnotationType` objects to be displayed on the chart.
Depending on the content returned - the chart can display horizontal areas, vertical areas, or boxes.
Depending on the content returned - the chart can display horizontal areas, vertical areas, boxes or lines.
The full object looks like this:
### Annotation types
Currently two types of annotations are supported, `area` and `line`.
#### Area
``` json
{
@@ -1261,10 +1269,29 @@ The full object looks like this:
"y_start": 94000.2, // Price / y axis value
"y_end": 98000, // Price / y axis value
"color": "",
"z_level": 5, // z-level, higher values are drawn on top of lower values. Positions relative to the Chart elements need to be set in freqUI.
"label": "some label"
}
```
#### Line
``` json
{
"type": "line", // Type of the annotation, currently only "line" is supported
"start": "2024-01-01 15:00:00", // Start date of the line
"end": "2024-01-01 16:00:00", // End date of the line
"y_start": 94000.2, // Price / y axis value
"y_end": 98000, // Price / y axis value
"color": "",
"z_level": 5, // z-level, higher values are drawn on top of lower values. Positions relative to the Chart elements need to be set in freqUI.
"label": "some label",
"width": 2, // Optional, line width in pixels. Defaults to 1
"line_style": "dashed", // Optional, can be "solid", "dashed" or "dotted". Defaults to "solid"
}
```
The below example will mark the chart with areas for the hours 8 and 15, with a grey color, highlighting the market open and close hours.
This is obviously a very basic example.
@@ -1332,7 +1359,7 @@ Entries will be validated, and won't be passed to the UI if they don't correspon
while start_dt < end_date:
start_dt += timedelta(hours=1)
if (start_dt.hour % 4) == 0:
mark_areas.append(
annotations.append(
{
"type": "area",
"label": "4h",
@@ -1343,7 +1370,7 @@ Entries will be validated, and won't be passed to the UI if they don't correspon
)
elif (start_dt.hour % 2) == 0:
price = dataframe.loc[dataframe["date"] == start_dt, ["close"]].mean()
mark_areas.append(
annotations.append(
{
"type": "area",
"label": "2h",
@@ -1352,6 +1379,7 @@ Entries will be validated, and won't be passed to the UI if they don't correspon
"y_end": price * 1.01,
"y_start": price * 0.99,
"color": "rgba(0, 255, 0, 0.4)",
"z_level": 5,
}
)

View File

@@ -84,6 +84,7 @@ Check the [configuration documentation](configuration.md) about how to set the b
**Always use dry mode when testing as this gives you an idea of how your strategy will work in reality without risking capital.**
## Diving in deeper
**For the following section we will use the [user_data/strategies/sample_strategy.py](https://github.com/freqtrade/freqtrade/blob/develop/freqtrade/templates/sample_strategy.py)
file as reference.**
@@ -99,9 +100,9 @@ file as reference.**
Some common patterns for this are listed in the [Common Mistakes](#common-mistakes-when-developing-strategies) section of this document.
??? Hint "Lookahead and recursive analysis"
Freqtrade includes two helpful commands to help assess common lookahead (using future data) and
recursive bias (variance in indicator values) issues. Before running a strategy in dry or live more,
you should always use these commands first. Please check the relevant documentation for
Freqtrade includes two helpful commands to help assess common lookahead (using future data) and
recursive bias (variance in indicator values) issues. Before running a strategy in dry or live more,
you should always use these commands first. Please check the relevant documentation for
[lookahead](lookahead-analysis.md) and [recursive](recursive-analysis.md) analysis.
### Dataframe
@@ -154,7 +155,7 @@ Vectorized operations perform calculations across the whole range of data and ar
!!! Warning "Trade order assumptions"
In backtesting, signals are generated on candle close. Trades are then initiated immeditely on next candle open.
In dry and live, this may be delayed due to all pair dataframes needing to be analysed first, then trade processing
for each of those pairs happens. This means that in dry/live you need to be mindful of having as low a computation
delay as possible, usually by running a low number of pairs and having a CPU with a good clock speed.
@@ -284,7 +285,7 @@ It's important to always return the dataframe without removing/modifying the col
This method will also define a new column, `"enter_long"` (`"enter_short"` for shorts), which needs to contain `1` for entries, and `0` for "no action". `enter_long` is a mandatory column that must be set even if the strategy is shorting only.
You can name your entry signals by using the `"enter_tag"` column, which can help debug and assess your strategy later.
You can name your entry signals by using the `"enter_tag"` column, which can help debug and assess your strategy later.
Sample from `user_data/strategies/sample_strategy.py`:
@@ -555,7 +556,7 @@ A full sample can be found [in the DataProvider section](#complete-dataprovider-
??? Note "Alternative candle types"
Informative_pairs can also provide a 3rd tuple element defining the candle type explicitly.
Availability of alternative candle-types will depend on the trading-mode and the exchange.
Availability of alternative candle-types will depend on the trading-mode and the exchange.
In general, spot pairs cannot be used in futures markets, and futures candles can't be used as informative pairs for spot bots.
Details about this may vary, if they do, this can be found in the exchange documentation.
@@ -783,6 +784,8 @@ Please always check the mode of operation to select the correct method to get da
- `ohlcv(pair, timeframe)` - Currently cached candle (OHLCV) data for the pair, returns DataFrame or empty DataFrame.
- [`orderbook(pair, maximum)`](#orderbookpair-maximum) - Returns latest orderbook data for the pair, a dict with bids/asks with a total of `maximum` entries.
- [`ticker(pair)`](#tickerpair) - Returns current ticker data for the pair. See [ccxt documentation](https://github.com/ccxt/ccxt/wiki/Manual#price-tickers) for more details on the Ticker data structure.
- [`check_delisting(pair)`](#check_delistingpair) - Return Datetime of the pair delisting schedule if any, otherwise return None
- [`funding_rate(pair)`](#funding_ratepair) - Returns current funding rate data for the pair.
- `runmode` - Property containing the current runmode.
### Example Usages
@@ -854,6 +857,8 @@ dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=metadata['pair'],
### *orderbook(pair, maximum)*
Retrieve the current order book for a pair.
``` python
if self.dp.runmode.value in ('live', 'dry_run'):
ob = self.dp.orderbook(metadata['pair'], 1)
@@ -903,6 +908,69 @@ if self.dp.runmode.value in ('live', 'dry_run'):
!!! Warning "Warning about backtesting"
This method will always return up-to-date / real-time values. As such, usage during backtesting / hyperopt without runmode checks will lead to wrong results, e.g. your whole dataframe will contain the same single value in all rows.
### *check_delisting(pair)*
```python
def custom_exit(self, pair: str, trade: Trade, current_time: datetime, current_rate: float, current_profit: float, **kwargs):
if self.dp.runmode.value in ('live', 'dry_run'):
delisting_dt = self.dp.check_delisting(pair)
if delisting_dt is not None:
return "delist"
```
!!! Note "Availabiity of delisting information"
This method is only available for certain exchanges and will return `None` in cases this is not available or if the pair is not scheduled for delisting.
!!! Warning "Warning about backtesting"
This method will always return up-to-date / real-time values. As such, usage during backtesting / hyperopt without runmode checks will lead to wrong results, e.g. your whole dataframe will contain the same single value in all rows.
### *funding_rate(pair)*
Retrieves the current funding rate for the pair and only works for futures pairs in the format of `base/quote:settle` (e.g. `ETH/USDT:USDT`).
``` python
if self.dp.runmode.value in ('live', 'dry_run'):
funding_rate = self.dp.funding_rate(metadata['pair'])
dataframe['current_funding_rate'] = funding_rate['fundingRate']
dataframe['next_funding_timestamp'] = funding_rate['fundingTimestamp']
dataframe['next_funding_datetime'] = funding_rate['fundingDatetime']
```
The funding rate structure is aligned with the funding rate structure from [ccxt](https://github.com/ccxt/ccxt/wiki/Manual#funding-rate-structure), so the result will be formatted as follows:
``` python
{
"info": {
# ...
},
"symbol": "BTC/USDT:USDT",
"markPrice": 110730.7,
"indexPrice": 110782.52,
"interestRate": 0.0001,
"estimatedSettlePrice": 110822.67200153,
"timestamp": 1757146321001,
"datetime": "2025-09-06T08:12:01.001Z",
"fundingRate": 5.609e-05,
"fundingTimestamp": 1757174400000,
"fundingDatetime": "2025-09-06T16:00:00.000Z",
"nextFundingRate": None,
"nextFundingTimestamp": None,
"nextFundingDatetime": None,
"previousFundingRate": None,
"previousFundingTimestamp": None,
"previousFundingDatetime": None,
"interval": None,
}
```
Therefore, using `funding_rate['fundingRate']` as demonstrated above will use the current funding rate.
Actually available data will vary between exchanges, so this code may not work as expected across exchanges.
!!! Warning "Warning about backtesting"
Current funding-rate is not part of the historic data which means backtesting and hyperopt will not work correctly if this method is used, as the method will return up-to-date values.
We recommend to use the historically available funding rate for backtesting (which is automatically downloaded, and is at the frequency of what the exchange provides, usually 4h or 8h).
`self.dp.get_pair_dataframe(pair=metadata['pair'], timeframe='8h', candle_type="funding_rate")`
### Send Notification
The dataprovider `.send_msg()` function allows you to send custom notifications from your strategy.

View File

@@ -1,6 +1,6 @@
"""Freqtrade bot"""
__version__ = "2025.9-dev"
__version__ = "2025.10-dev"
if "dev" in __version__:
from pathlib import Path

View File

@@ -49,6 +49,7 @@ ARGS_BACKTEST = [
*ARGS_COMMON_OPTIMIZE,
"position_stacking",
"enable_protections",
"enable_dynamic_pairlist",
"dry_run_wallet",
"timeframe_detail",
"strategy_list",
@@ -164,6 +165,7 @@ ARGS_DOWNLOAD_DATA = [
"days",
"new_pairs_days",
"include_inactive",
"no_parallel_download",
"timerange",
"download_trades",
"convert_trades",
@@ -259,7 +261,12 @@ ARGS_LOOKAHEAD_ANALYSIS = [
a
for a in ARGS_BACKTEST
if a not in ("position_stacking", "backtest_cache", "backtest_breakdown", "backtest_notes")
] + ["minimum_trade_amount", "targeted_trade_amount", "lookahead_analysis_exportfilename"]
] + [
"minimum_trade_amount",
"targeted_trade_amount",
"lookahead_analysis_exportfilename",
"lookahead_allow_limit_orders",
]
ARGS_RECURSIVE_ANALYSIS = ["timeframe", "timerange", "dataformat_ohlcv", "pairs", "startup_candle"]

View File

@@ -184,12 +184,20 @@ AVAILABLE_CLI_OPTIONS = {
"enable_protections": Arg(
"--enable-protections",
"--enableprotections",
help="Enable protections for backtesting."
help="Enable protections for backtesting. "
"Will slow backtesting down by a considerable amount, but will include "
"configured protections",
action="store_true",
default=False,
),
"enable_dynamic_pairlist": Arg(
"--enable-dynamic-pairlist",
help="Enables dynamic pairlist refreshes in backtesting. "
"The pairlist will be generated for each new candle if you're using a "
"pairlist handler that supports this feature, for example, ShuffleFilter.",
action="store_true",
default=False,
),
"strategy_list": Arg(
"--strategy-list",
help="Provide a space-separated list of strategies to backtest. "
@@ -454,6 +462,11 @@ AVAILABLE_CLI_OPTIONS = {
help="Also download data from inactive pairs.",
action="store_true",
),
"no_parallel_download": Arg(
"--no-parallel-download",
help="Disable parallel startup download. Only use this if you experience issues.",
action="store_true",
),
"new_pairs_days": Arg(
"--new-pairs-days",
help="Download data of new pairs for given number of days. Default: `%(default)s`.",
@@ -801,6 +814,14 @@ AVAILABLE_CLI_OPTIONS = {
help="Specify startup candles to be checked (`199`, `499`, `999`, `1999`).",
nargs="+",
),
"lookahead_allow_limit_orders": Arg(
"--allow-limit-orders",
help=(
"Allow limit orders in lookahead analysis (could cause false positives "
"in lookahead analysis results)."
),
action="store_true",
),
"show_sensitive": Arg(
"--show-sensitive",
help="Show secrets in the output.",

View File

@@ -66,7 +66,7 @@ def start_list_exchanges(args: dict[str, Any]) -> None:
if exchange["is_alias"]:
name.stylize("strike")
classname.stylize("strike")
classname.append(f" (use {exchange['alias_for']})", style="italic")
classname.append(f"\n -> use {exchange['alias_for']}", style="italic")
trade_modes = Text(
", ".join(

View File

@@ -1142,6 +1142,15 @@ CONF_SCHEMA = {
"type": "boolean",
"default": False,
},
"override_exchange_check": {
"description": (
"Override the exchange check to force FreqAI to use exchanges "
"that may not have enough historic data. Turn this to True if "
"you know your FreqAI model and strategy do not require historical data."
),
"type": "boolean",
"default": False,
},
"feature_parameters": {
"description": "The parameters used to engineer the feature set",
"type": "object",

View File

@@ -113,7 +113,6 @@ def _validate_price_config(conf: dict[str, Any]) -> None:
"""
When using market orders, price sides must be using the "other" side of the price
"""
# TODO: The below could be an enforced setting when using market orders
if conf.get("order_types", {}).get("entry") == "market" and conf.get("entry_pricing", {}).get(
"price_side"
) not in ("ask", "other"):

View File

@@ -12,13 +12,16 @@ from typing import Any
from freqtrade import constants
from freqtrade.configuration.deprecated_settings import process_temporary_deprecated_settings
from freqtrade.configuration.directory_operations import create_datadir, create_userdata_dir
from freqtrade.configuration.environment_vars import enironment_vars_to_dict
from freqtrade.configuration.environment_vars import environment_vars_to_dict
from freqtrade.configuration.load_config import load_file, load_from_files
from freqtrade.constants import Config
from freqtrade.enums import (
NON_UTIL_MODES,
TRADE_MODES,
CandleType,
MarginMode,
RunMode,
TradingMode,
)
from freqtrade.exceptions import OperationalException
from freqtrade.loggers import setup_logging
@@ -77,7 +80,7 @@ class Configuration:
from freqtrade.commands.arguments import NO_CONF_ALLOWED
if self.args.get("command") not in NO_CONF_ALLOWED:
env_data = enironment_vars_to_dict()
env_data = environment_vars_to_dict()
config = deep_merge_dicts(env_data, config)
# Normalize config
@@ -230,6 +233,9 @@ class Configuration:
config["exportdirectory"] = config["user_data_dir"] / "backtest_results"
if not config.get("exportfilename"):
config["exportfilename"] = None
if config.get("exportfilename"):
# ensure exportfilename is a Path object
config["exportfilename"] = Path(config["exportfilename"])
config["exportdirectory"] = Path(config["exportdirectory"])
if self.args.get("show_sensitive"):
@@ -256,7 +262,13 @@ class Configuration:
self._args_to_config(
config,
argname="enable_protections",
logstring="Parameter --enable-protections detected, enabling Protections. ...",
logstring="Parameter --enable-protections detected, enabling Protections ...",
)
self._args_to_config(
config,
argname="enable_dynamic_pairlist",
logstring="Parameter --enable-dynamic-pairlist detected, enabling dynamic pairlist ...",
)
if self.args.get("max_open_trades"):
@@ -312,7 +324,6 @@ class Configuration:
"recursive_strategy_search",
"Recursively searching for a strategy in the strategies folder.",
),
("timeframe", "Overriding timeframe with Command line argument"),
("export", "Parameter --export detected: {} ..."),
("backtest_breakdown", "Parameter --breakdown detected ..."),
("backtest_cache", "Parameter --cache={} detected ..."),
@@ -391,6 +402,7 @@ class Configuration:
("timeframes", "timeframes --timeframes: {}"),
("days", "Detected --days: {}"),
("include_inactive", "Detected --include-inactive-pairs: {}"),
("no_parallel_download", "Detected --no-parallel-download: {}"),
("download_trades", "Detected --dl-trades: {}"),
("convert_trades", "Detected --convert: {} - Converting Trade data to OHCV {}"),
("dataformat_ohlcv", 'Using "{}" to store OHLCV data.'),
@@ -406,6 +418,14 @@ class Configuration:
self._args_to_config(
config, argname="trading_mode", logstring="Detected --trading-mode: {}"
)
# TODO: The following 3 lines (candle_type_def, trading_mode, margin_mode) are actually
# set in the exchange class. They're however necessary as fallback to avoid
# random errors in commands that don't initialize an exchange.
config["candle_type_def"] = CandleType.get_default(
config.get("trading_mode", "spot") or "spot"
)
config["trading_mode"] = TradingMode(config.get("trading_mode", "spot") or "spot")
config["margin_mode"] = MarginMode(config.get("margin_mode", "") or "")
self._args_to_config(
config, argname="candle_types", logstring="Detected --candle-types: {}"
)

View File

@@ -73,7 +73,7 @@ def _flat_vars_to_nested_dict(env_dict: dict[str, Any], prefix: str) -> dict[str
return relevant_vars
def enironment_vars_to_dict() -> dict[str, Any]:
def environment_vars_to_dict() -> dict[str, Any]:
"""
Read environment variables and return a nested dict for relevant variables
Relevant variables must follow the FREQTRADE__{section}__{key} pattern

View File

@@ -80,6 +80,9 @@ class TimeRange:
val = stopdt.strftime(DATETIME_PRINT_FORMAT)
return val
def __repr__(self) -> str:
return f"TimeRange({self.timerange_str})"
def __eq__(self, other):
"""Override the default Equals behavior"""
return (

View File

@@ -49,6 +49,7 @@ AVAILABLE_PAIRLISTS = [
"RemotePairList",
"MarketCapPairList",
"AgeFilter",
"DelistFilter",
"FullTradesFilter",
"OffsetFilter",
"PerformanceFilter",

View File

@@ -181,7 +181,6 @@ def trim_dataframes(
def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
"""
TODO: This should get a dedicated test
Gets order book list, returns dataframe with below format per suggested by creslin
-------------------------------------------------------------------
b_sum b_size bids asks a_size a_sum

View File

@@ -23,7 +23,7 @@ from freqtrade.data.history import get_datahandler, load_pair_history
from freqtrade.enums import CandleType, RPCMessageType, RunMode, TradingMode
from freqtrade.exceptions import ExchangeError, OperationalException
from freqtrade.exchange import Exchange, timeframe_to_prev_date, timeframe_to_seconds
from freqtrade.exchange.exchange_types import OrderBook
from freqtrade.exchange.exchange_types import FundingRate, OrderBook
from freqtrade.misc import append_candles_to_dataframe
from freqtrade.rpc import RPCManager
from freqtrade.rpc.rpc_types import RPCAnalyzedDFMsg
@@ -548,6 +548,7 @@ class DataProvider:
def ticker(self, pair: str):
"""
Return last ticker data from exchange
Warning: Performs a network request - so use with common sense.
:param pair: Pair to get the data for
:return: Ticker dict from exchange or empty dict if ticker is not available for the pair
"""
@@ -561,7 +562,7 @@ class DataProvider:
def orderbook(self, pair: str, maximum: int) -> OrderBook:
"""
Fetch latest l2 orderbook data
Warning: Does a network request - so use with common sense.
Warning: Performs a network request - so use with common sense.
:param pair: pair to get the data for
:param maximum: Maximum number of orderbook entries to query
:return: dict including bids/asks with a total of `maximum` entries.
@@ -570,6 +571,23 @@ class DataProvider:
raise OperationalException(NO_EXCHANGE_EXCEPTION)
return self._exchange.fetch_l2_order_book(pair, maximum)
def funding_rate(self, pair: str) -> FundingRate:
"""
Return Funding rate from the exchange
Warning: Performs a network request - so use with common sense.
:param pair: Pair to get the data for
:return: Funding rate dict from exchange or empty dict if funding rate is not available
If available, the "fundingRate" field will contain the funding rate.
"fundingTimestamp" and "fundingDatetime" will contain the next funding times.
Actually filled fields may vary between exchanges.
"""
if self._exchange is None:
raise OperationalException(NO_EXCHANGE_EXCEPTION)
try:
return self._exchange.fetch_funding_rate(pair)
except ExchangeError:
return {}
def send_msg(self, message: str, *, always_send: bool = False) -> None:
"""
Send custom RPC Notifications from your bot.
@@ -586,3 +604,19 @@ class DataProvider:
if always_send or message not in self.__msg_cache:
self._msg_queue.append(message)
self.__msg_cache[message] = True
def check_delisting(self, pair: str) -> datetime | None:
"""
Check if a pair gonna be delisted on the exchange.
Will only return datetime if the pair is gonna be delisted.
:param pair: Pair to check
:return: Datetime of the pair's delisting, None otherwise
"""
if self._exchange is None:
raise OperationalException(NO_EXCHANGE_EXCEPTION)
try:
return self._exchange.check_delisting_time(pair)
except ExchangeError:
logger.warning(f"Could not fetch market data for {pair}. Assuming no delisting.")
return None

View File

@@ -6,7 +6,14 @@ from pathlib import Path
from pandas import DataFrame, concat
from freqtrade.configuration import TimeRange
from freqtrade.constants import DATETIME_PRINT_FORMAT, DL_DATA_TIMEFRAMES, DOCS_LINK, Config
from freqtrade.constants import (
DATETIME_PRINT_FORMAT,
DL_DATA_TIMEFRAMES,
DOCS_LINK,
Config,
ListPairsWithTimeframes,
PairWithTimeframe,
)
from freqtrade.data.converter import (
clean_ohlcv_dataframe,
convert_trades_to_ohlcv,
@@ -17,6 +24,7 @@ from freqtrade.data.history.datahandlers import IDataHandler, get_datahandler
from freqtrade.enums import CandleType, TradingMode
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import Exchange
from freqtrade.exchange.exchange_utils import date_minus_candles
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
from freqtrade.util import dt_now, dt_ts, format_ms_time, format_ms_time_det
from freqtrade.util.migrations import migrate_data
@@ -226,6 +234,7 @@ def _download_pair_history(
candle_type: CandleType,
erase: bool = False,
prepend: bool = False,
pair_candles: DataFrame | None = None,
) -> bool:
"""
Download latest candles from the exchange for the pair and timeframe passed in parameters
@@ -238,6 +247,7 @@ def _download_pair_history(
:param timerange: range of time to download
:param candle_type: Any of the enum CandleType (must match trading mode!)
:param erase: Erase existing data
:param pair_candles: Optional with "1 call" pair candles.
:return: bool with success state
"""
data_handler = get_datahandler(datadir, data_handler=data_handler)
@@ -271,21 +281,40 @@ def _download_pair_history(
"Current End: %s",
f"{data.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}" if not data.empty else "None",
)
# Default since_ms to 30 days if nothing is given
new_dataframe = exchange.get_historic_ohlcv(
pair=pair,
timeframe=timeframe,
since_ms=(
since_ms
if since_ms
else int((datetime.now() - timedelta(days=new_pairs_days)).timestamp()) * 1000
),
is_new_pair=data.empty,
candle_type=candle_type,
until_ms=until_ms if until_ms else None,
# used to check if the passed in pair_candles (parallel downloaded) covers since_ms.
# If we need more data, we have to fall back to the standard method.
pair_candles_since_ms = (
dt_ts(pair_candles.iloc[0]["date"])
if pair_candles is not None and len(pair_candles.index) > 0
else 0
)
logger.info(f"Downloaded data for {pair} with length {len(new_dataframe)}.")
if (
pair_candles is None
or len(pair_candles.index) == 0
or data.empty
or prepend is True
or erase is True
or pair_candles_since_ms > (since_ms if since_ms else 0)
):
new_dataframe = exchange.get_historic_ohlcv(
pair=pair,
timeframe=timeframe,
since_ms=(
since_ms
if since_ms
else int((datetime.now() - timedelta(days=new_pairs_days)).timestamp()) * 1000
),
is_new_pair=data.empty,
candle_type=candle_type,
until_ms=until_ms if until_ms else None,
)
logger.info(f"Downloaded data for {pair} with length {len(new_dataframe)}.")
else:
new_dataframe = pair_candles
logger.info(
f"Downloaded data for {pair} with length {len(new_dataframe)}. Parallel Method."
)
if data.empty:
data = new_dataframe
else:
@@ -330,6 +359,7 @@ def refresh_backtest_ohlcv_data(
data_format: str | None = None,
prepend: bool = False,
progress_tracker: CustomProgress | None = None,
no_parallel_download: bool = False,
) -> list[str]:
"""
Refresh stored ohlcv data for backtesting and hyperopt operations.
@@ -339,6 +369,7 @@ def refresh_backtest_ohlcv_data(
progress_tracker = retrieve_progress_tracker(progress_tracker)
pairs_not_available = []
fast_candles: dict[PairWithTimeframe, DataFrame] = {}
data_handler = get_datahandler(datadir, data_format)
candle_type = CandleType.get_default(trading_mode)
with progress_tracker as progress:
@@ -355,6 +386,30 @@ def refresh_backtest_ohlcv_data(
logger.info(f"Skipping pair {pair}...")
continue
for timeframe in timeframes:
# Get fast candles via parallel method on first loop through per timeframe
# and candle type. Downloads all the pairs in the list and stores them.
if (
not no_parallel_download
and exchange.get_option("download_data_parallel_quick", True)
and (
((pair, timeframe, candle_type) not in fast_candles)
and (erase is False)
and (prepend is False)
)
):
fast_candles.update(
_download_all_pairs_history_parallel(
exchange=exchange,
pairs=pairs,
timeframe=timeframe,
candle_type=candle_type,
timerange=timerange,
)
)
# get the already downloaded pair candles if they exist
pair_candles = fast_candles.pop((pair, timeframe, candle_type), None)
progress.update(timeframe_task, description=f"Timeframe {timeframe}")
logger.debug(f"Downloading pair {pair}, {candle_type}, interval {timeframe}.")
_download_pair_history(
@@ -368,6 +423,7 @@ def refresh_backtest_ohlcv_data(
candle_type=candle_type,
erase=erase,
prepend=prepend,
pair_candles=pair_candles, # optional pass of dataframe of parallel candles
)
progress.update(timeframe_task, advance=1)
if trading_mode == "futures":
@@ -404,6 +460,41 @@ def refresh_backtest_ohlcv_data(
return pairs_not_available
def _download_all_pairs_history_parallel(
exchange: Exchange,
pairs: list[str],
timeframe: str,
candle_type: CandleType,
timerange: TimeRange | None = None,
) -> dict[PairWithTimeframe, DataFrame]:
"""
Allows to use the faster parallel async download method for many coins
but only if the data is short enough to be retrieved in one call.
Used by freqtrade download-data subcommand.
:return: Candle pairs with timeframes
"""
candles: dict[PairWithTimeframe, DataFrame] = {}
since = 0
if timerange:
if timerange.starttype == "date":
since = timerange.startts * 1000
candle_limit = exchange.ohlcv_candle_limit(timeframe, candle_type)
one_call_min_time_dt = dt_ts(date_minus_candles(timeframe, candle_limit))
# check if we can get all candles in one go, if so then we can download them in parallel
if since > one_call_min_time_dt:
logger.info(
f"Downloading parallel candles for {timeframe} for all pairs "
f"since {format_ms_time(since)}"
)
needed_pairs: ListPairsWithTimeframes = [
(p, timeframe, candle_type) for p in [p for p in pairs]
]
candles = exchange.refresh_latest_ohlcv(needed_pairs, since_ms=since, cache=False)
return candles
def _download_trades_history(
exchange: Exchange,
pair: str,
@@ -702,6 +793,7 @@ def download_data(
trading_mode=config.get("trading_mode", "spot"),
prepend=config.get("prepend_data", False),
progress_tracker=progress_tracker,
no_parallel_download=config.get("no_parallel_download", False),
)
finally:
if pairs_not_available:

View File

@@ -11,6 +11,7 @@ from freqtrade.exchange.bitmart import Bitmart
from freqtrade.exchange.bitpanda import Bitpanda
from freqtrade.exchange.bitvavo import Bitvavo
from freqtrade.exchange.bybit import Bybit
from freqtrade.exchange.coinex import Coinex
from freqtrade.exchange.cryptocom import Cryptocom
from freqtrade.exchange.exchange_utils import (
ROUND_DOWN,

View File

@@ -5,10 +5,11 @@ from datetime import UTC, datetime
from pathlib import Path
import ccxt
from cachetools import TTLCache
from pandas import DataFrame
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
from freqtrade.enums import TRADE_MODES, CandleType, MarginMode, PriceType, RunMode, TradingMode
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
from freqtrade.exchange import Exchange
from freqtrade.exchange.binance_public_data import (
@@ -40,6 +41,7 @@ class Binance(Exchange):
"fetch_orders_limit_minutes": None,
"l2_limit_range": [5, 10, 20, 50, 100, 500, 1000],
"ws_enabled": True,
"has_delisting": True,
}
_ft_has_futures: FtHas = {
"funding_fee_candle_limit": 1000,
@@ -68,6 +70,10 @@ class Binance(Exchange):
(TradingMode.FUTURES, MarginMode.ISOLATED),
]
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._spot_delist_schedule_cache: TTLCache = TTLCache(maxsize=100, ttl=300)
def get_proxy_coin(self) -> str:
"""
Get the proxy coin for the given coin
@@ -391,7 +397,7 @@ class Binance(Exchange):
async def _async_get_trade_history_id(
self, pair: str, until: int, since: int, from_id: str | None = None
) -> tuple[str, list[list]]:
logger.info(f"Fetching trades from Binance, {from_id=}, {since=}, {until=}")
logger.info(f"Fetching trades for {pair} from Binance, {from_id=}, {since=}, {until=}")
if not self._config["exchange"].get("only_from_ccxt", False):
if from_id is None or not since:
@@ -432,3 +438,105 @@ class Binance(Exchange):
return await super()._async_get_trade_history_id(
pair, until=until, since=since, from_id=from_id
)
def _check_delisting_futures(self, pair: str) -> datetime | None:
delivery_time = self.markets.get(pair, {}).get("info", {}).get("deliveryDate", None)
if delivery_time:
if isinstance(delivery_time, str) and (delivery_time != ""):
delivery_time = int(delivery_time)
# Binance set a very high delivery time for all perpetuals.
# We compare with delivery time of BTC/USDT:USDT which assumed to never be delisted
btc_delivery_time = (
self.markets.get("BTC/USDT:USDT", {}).get("info", {}).get("deliveryDate", None)
)
if delivery_time == btc_delivery_time:
return None
delivery_time = dt_from_ts(delivery_time)
return delivery_time
def check_delisting_time(self, pair: str) -> datetime | None:
"""
Check if the pair gonna be delisted.
By default, it returns None.
:param pair: Market symbol
:return: Datetime if the pair gonna be delisted, None otherwise
"""
if self._config["runmode"] not in TRADE_MODES:
return None
if self.trading_mode == TradingMode.FUTURES:
return self._check_delisting_futures(pair)
return self._get_spot_pair_delist_time(pair, refresh=False)
def _get_spot_delist_schedule(self):
"""
Get the delisting schedule for spot pairs
Only works in live mode as it requires API keys,
Return sample:
[{
"delistTime": "1759114800000",
"symbols": [
"OMNIBTC",
"OMNIFDUSD",
"OMNITRY",
"OMNIUSDC",
"OMNIUSDT"
]
}]
"""
try:
delist_schedule = self._api.sapi_get_spot_delist_schedule()
return delist_schedule
except ccxt.DDoSProtection as e:
raise DDosProtection(e) from e
except (ccxt.NetworkError, ccxt.OperationFailed, ccxt.ExchangeError) as e:
raise TemporaryError(
f"Could not get delist schedule {e.__class__.__name__}. Message: {e}"
) from e
except ccxt.BaseError as e:
raise OperationalException(e) from e
def _get_spot_pair_delist_time(self, pair: str, refresh: bool = False) -> datetime | None:
"""
Get the delisting time for a pair if it will be delisted
:param pair: Pair to get the delisting time for
:param refresh: true if you need fresh data
:return: int: delisting time None if not delisting
"""
if not pair or not self._config["runmode"] == RunMode.LIVE:
# Endpoint only works in live mode as it requires API keys
return None
cache = self._spot_delist_schedule_cache
if not refresh:
if delist_time := cache.get(pair, None):
return delist_time
delist_schedule = self._get_spot_delist_schedule()
if delist_schedule is None:
return None
for schedule in delist_schedule:
delist_dt = dt_from_ts(int(schedule["delistTime"]))
for symbol in schedule["symbols"]:
ft_symbol = next(
(
pair
for pair, market in self.markets.items()
if market.get("id", None) == symbol
),
None,
)
if ft_symbol is None:
continue
cache[ft_symbol] = delist_dt
return cache.get(pair, None)

File diff suppressed because it is too large Load Diff

View File

@@ -2,8 +2,6 @@
import logging
from ccxt import DECIMAL_PLACES
from freqtrade.exchange import Exchange
from freqtrade.exchange.exchange_types import FtHas
@@ -24,11 +22,3 @@ class Bitvavo(Exchange):
_ft_has: FtHas = {
"ohlcv_candle_limit": 1440,
}
@property
def precisionMode(self) -> int:
"""
Exchange ccxt precisionMode
Override due to https://github.com/ccxt/ccxt/issues/20408
"""
return DECIMAL_PLACES

View File

@@ -0,0 +1,24 @@
import logging
from freqtrade.exchange import Exchange
from freqtrade.exchange.exchange_types import FtHas
logger = logging.getLogger(__name__)
class Coinex(Exchange):
"""
CoinEx exchange class. Contains adjustments needed for Freqtrade to work
with this exchange.
Please note that this exchange is not included in the list of exchanges
officially supported by the Freqtrade development team. So some features
may still not work as expected.
"""
_ft_has: FtHas = {
"l2_limit_range": [5, 10, 20, 50],
"tickers_have_bid_ask": False,
"tickers_have_quoteVolume": False,
}

View File

@@ -73,6 +73,7 @@ from freqtrade.exchange.exchange_types import (
CcxtOrder,
CcxtPosition,
FtHas,
FundingRate,
OHLCVResponse,
OrderBook,
Ticker,
@@ -137,6 +138,7 @@ class Exchange:
"ohlcv_has_history": True, # Some exchanges (Kraken) don't provide history via ohlcv
"ohlcv_partial_candle": True,
"ohlcv_require_since": False,
"download_data_parallel_quick": True,
"always_require_api_keys": False, # purge API keys for Dry-run. Must default to false.
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
"ohlcv_volume_currency": "base", # "base" or "quote"
@@ -164,6 +166,7 @@ class Exchange:
"proxy_coin_mapping": {}, # Mapping for proxy coins
# Expected to be in the format {"fetchOHLCV": True} or {"fetchOHLCV": False}
"ws_enabled": False, # Set to true for exchanges with tested websocket support
"has_delisting": False, # Set to true for exchanges that have delisting pair checks
}
_ft_has: FtHas = {}
_ft_has_futures: FtHas = {}
@@ -690,12 +693,13 @@ class Exchange:
# Reload async markets, then assign them to sync api
retrier(self._load_async_markets, retries=retries)(reload=True)
self._markets = self._api_async.markets
self._api.set_markets(self._api_async.markets, self._api_async.currencies)
self._api.set_markets_from_exchange(self._api_async)
# Assign options array, as it contains some temporary information from the exchange.
# TODO: investigate with ccxt if it's safe to remove `.options`
self._api.options = self._api_async.options
if self._exchange_ws:
# Set markets to avoid reloading on websocket api
self._ws_async.set_markets(self._api.markets, self._api.currencies)
self._ws_async.set_markets_from_exchange(self._api_async)
self._ws_async.options = self._api.options
self._last_markets_refresh = dt_ts()
@@ -828,10 +832,16 @@ class Exchange:
def validate_freqai(self, config: Config) -> None:
freqai_enabled = config.get("freqai", {}).get("enabled", False)
if freqai_enabled and not self._ft_has["ohlcv_has_history"]:
override = config.get("freqai", {}).get("override_exchange_checks", False)
if not override and freqai_enabled and not self._ft_has["ohlcv_has_history"]:
raise ConfigurationError(
f"Historic OHLCV data not available for {self.name}. Can't use freqAI."
)
elif override and freqai_enabled and not self._ft_has["ohlcv_has_history"]:
logger.warning(
"Overriding exchange checks for freqAI. Make sure that your exchange supports "
"fetching historic OHLCV data, otherwise freqAI will not work."
)
def validate_required_startup_candles(self, startup_candles: int, timeframe: str) -> int:
"""
@@ -890,6 +900,19 @@ class Exchange:
f"Freqtrade does not support '{mm_value}' '{trading_mode}' on {self.name}."
)
@classmethod
def combine_ft_has(cls, include_futures: bool) -> FtHas:
"""
Combine all ft_has options from the class hierarchy.
Child classes override parent classes.
Doesn't apply overrides from the configuration.
"""
_ft_has = deep_merge_dicts(cls._ft_has, deepcopy(cls._ft_has_default))
if include_futures:
_ft_has = deep_merge_dicts(cls._ft_has_futures, _ft_has)
return _ft_has
def build_ft_has(self, exchange_conf: ExchangeConfig) -> None:
"""
Deep merge ft_has with default ft_has options
@@ -897,9 +920,8 @@ class Exchange:
This is called on initialization of the exchange object.
It must be called before ft_has is used.
"""
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
if self.trading_mode == TradingMode.FUTURES:
self._ft_has = deep_merge_dicts(self._ft_has_futures, self._ft_has)
self._ft_has = self.combine_ft_has(include_futures=self.trading_mode == TradingMode.FUTURES)
if exchange_conf.get("_ft_has_params"):
self._ft_has = deep_merge_dicts(exchange_conf.get("_ft_has_params"), self._ft_has)
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
@@ -2001,6 +2023,30 @@ class Exchange:
except ccxt.BaseError as e:
raise OperationalException(e) from e
@retrier
def fetch_funding_rate(self, pair: str) -> FundingRate:
"""
Get current Funding rate from exchange.
On Futures markets, this is the interest rate for holding a position.
Won't work for non-futures markets
"""
try:
if pair not in self.markets or self.markets[pair].get("active", False) is False:
raise ExchangeError(f"Pair {pair} not available")
return self._api.fetch_funding_rate(pair)
except ccxt.NotSupported as e:
raise OperationalException(
f"Exchange {self._api.name} does not support fetching funding rate. Message: {e}"
) from e
except ccxt.DDoSProtection as e:
raise DDosProtection(e) from e
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
raise TemporaryError(
f"Could not get funding rate due to {e.__class__.__name__}. Message: {e}"
) from e
except ccxt.BaseError as e:
raise OperationalException(e) from e
@staticmethod
def get_next_limit_in_list(
limit: int,
@@ -2456,7 +2502,14 @@ class Exchange:
data.extend(new_data)
# Sort data again after extending the result - above calls return in "async order"
data = sorted(data, key=lambda x: x[0])
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
return (
pair,
timeframe,
candle_type,
data,
# funding_rates are always complete, so never need to be dropped.
self._ohlcv_partial_candle if candle_type != CandleType.FUNDING_RATE else False,
)
def _try_build_from_websocket(
self, pair: str, timeframe: str, candle_type: CandleType
@@ -2566,14 +2619,24 @@ class Exchange:
input_coroutines: list[Coroutine[Any, Any, OHLCVResponse]] = []
cached_pairs = []
for pair, timeframe, candle_type in set(pair_list):
if timeframe not in self.timeframes and candle_type in (
invalid_funding = (
candle_type == CandleType.FUNDING_RATE
and timeframe != self.get_option("funding_fee_timeframe")
)
invalid_timeframe = timeframe not in self.timeframes and candle_type in (
CandleType.SPOT,
CandleType.FUTURES,
):
)
if invalid_timeframe or invalid_funding:
timeframes_ = (
", ".join(self.timeframes)
if candle_type != CandleType.FUNDING_RATE
else self.get_option("funding_fee_timeframe")
)
logger.warning(
f"Cannot download ({pair}, {timeframe}) combination as this timeframe is "
f"not available on {self.name}. Available timeframes are "
f"{', '.join(self.timeframes)}."
f"Cannot download ({pair}, {timeframe}, {candle_type}) combination as this "
f"timeframe is not available on {self.name}. Available timeframes are "
f"{timeframes_}."
)
continue
@@ -2756,7 +2819,7 @@ class Exchange:
timeframe, candle_type=candle_type, since_ms=since_ms
)
if candle_type and candle_type != CandleType.SPOT:
if candle_type and candle_type not in (CandleType.SPOT, CandleType.FUTURES):
params.update({"price": candle_type.value})
if candle_type != CandleType.FUNDING_RATE:
data = await self._api_async.fetch_ohlcv(
@@ -2771,8 +2834,6 @@ class Exchange:
since_ms=since_ms,
)
# Some exchanges sort OHLCV in ASC order and others in DESC.
# Ex: Bittrex returns the list of OHLCV in ASC order (oldest first, newest last)
# while GDAX returns the list of OHLCV in DESC order (newest first, oldest last)
# Only sort if necessary to save computing time
try:
if data and data[0][0] > data[-1][0]:
@@ -2781,7 +2842,14 @@ class Exchange:
logger.exception("Error loading %s. Result was %s.", pair, data)
return pair, timeframe, candle_type, [], self._ohlcv_partial_candle
logger.debug("Done fetching pair %s, %s interval %s...", pair, candle_type, timeframe)
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
return (
pair,
timeframe,
candle_type,
data,
# funding_rates are always complete, so never need to be dropped.
self._ohlcv_partial_candle if candle_type != CandleType.FUNDING_RATE else False,
)
except ccxt.NotSupported as e:
raise OperationalException(
@@ -3229,7 +3297,7 @@ class Exchange:
for sig in [signal.SIGINT, signal.SIGTERM]:
try:
self.loop.add_signal_handler(sig, task.cancel)
except NotImplementedError:
except (NotImplementedError, RuntimeError):
# Not all platforms implement signals (e.g. windows)
pass
return self.loop.run_until_complete(task)
@@ -3811,7 +3879,10 @@ class Exchange:
"""
market = self.markets[pair]
taker_fee_rate = market["taker"]
# default to some default fee if not available from exchange
taker_fee_rate = market["taker"] or self._api.describe().get("fees", {}).get(
"trading", {}
).get("taker", 0.001)
mm_ratio, _ = self.get_maintenance_ratio_and_amt(pair, stake_amount)
if self.trading_mode == TradingMode.FUTURES and self.margin_mode == MarginMode.ISOLATED:
@@ -3863,3 +3934,14 @@ class Exchange:
# describes the min amt for a tier, and the lowest tier will always go down to 0
else:
raise ExchangeError(f"Cannot get maintenance ratio using {self.name}")
def check_delisting_time(self, pair: str) -> datetime | None:
"""
Check if the pair gonna be delisted.
This function should be overridden by the exchange class if the exchange
provides such information.
By default, it returns None.
:param pair: Market symbol
:return: Datetime if the pair gonna be delisted, None otherwise
"""
return None

View File

@@ -1,5 +1,8 @@
from typing import Any, Literal, TypedDict
# Re-export for easier use
from ccxt.base.types import FundingRate # noqa: F401
from freqtrade.enums import CandleType
@@ -25,6 +28,8 @@ class FtHas(TypedDict, total=False):
ohlcv_volume_currency: str
ohlcv_candle_limit_per_timeframe: dict[str, int]
always_require_api_keys: bool
# allow disabling of parallel download-data for specific exchanges
download_data_parallel_quick: bool
# Tickers
tickers_have_quoteVolume: bool
tickers_have_percentage: bool
@@ -58,6 +63,9 @@ class FtHas(TypedDict, total=False):
# Websocket control
ws_enabled: bool
# Delisting check
has_delisting: bool
class Ticker(TypedDict):
symbol: str

View File

@@ -28,6 +28,7 @@ class Hyperliquid(Exchange):
"stoploss_on_exchange": False,
"exchange_has_overrides": {"fetchTrades": False},
"marketOrderRequiresPrice": True,
"download_data_parallel_quick": False,
"ws_enabled": True,
}
_ft_has_futures: FtHas = {
@@ -43,6 +44,7 @@ class Hyperliquid(Exchange):
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
(TradingMode.SPOT, MarginMode.NONE),
(TradingMode.FUTURES, MarginMode.ISOLATED),
(TradingMode.FUTURES, MarginMode.CROSS),
]
@property
@@ -98,7 +100,6 @@ class Hyperliquid(Exchange):
'SOL/USDC:USDC': 43}}
"""
# Defining/renaming variables to match the documentation
isolated_margin = wallet_balance
position_size = amount
price = open_rate
position_value = price * position_size
@@ -116,8 +117,14 @@ class Hyperliquid(Exchange):
# 3. Divide this by 2
maintenance_margin_required = position_value / max_leverage / 2
# Docs: margin_available (isolated) = isolated_margin - maintenance_margin_required
margin_available = isolated_margin - maintenance_margin_required
if self.margin_mode == MarginMode.ISOLATED:
# Docs: margin_available (isolated) = isolated_margin - maintenance_margin_required
margin_available = stake_amount - maintenance_margin_required
elif self.margin_mode == MarginMode.CROSS:
# Docs: margin_available (cross) = account_value - maintenance_margin_required
margin_available = wallet_balance - maintenance_margin_required
else:
raise OperationalException("Unsupported margin mode for liquidation price calculation")
# Docs: The maintenance margin is half of the initial margin at max leverage
# The docs don't explicitly specify maintenance leverage, but this works.

View File

@@ -65,15 +65,22 @@ class Okx(Exchange):
"""
Exchange ohlcv candle limit
OKX has the following behaviour:
* 300 candles for up-to-date data
* 100 candles for historic data
* 100 candles for additional candles (not futures or spot).
* spot and futures:
* 300 candles for regular candles
* mark and premium-index:
* 300 candles for up-to-date data
* 100 candles for historic data
* additional data:
* 100 candles for additional candles
:param timeframe: Timeframe to check
:param candle_type: Candle-type
:param since_ms: Starting timestamp
:return: Candle limit as integer
"""
if candle_type in (CandleType.FUTURES, CandleType.SPOT) and (
if candle_type in (CandleType.FUTURES, CandleType.SPOT):
return 300
if candle_type in (CandleType.MARK, CandleType.PREMIUMINDEX) and (
not since_ms or since_ms > (date_minus_candles(timeframe, 300).timestamp() * 1000)
):
return 300

View File

@@ -1617,7 +1617,9 @@ class FreqtradeBot(LoggingMixin):
f"Emergency exiting trade {trade}, as the exit order "
f"timed out {max_timeouts} times. force selling {order['amount']}."
)
self.emergency_exit(trade, order["price"], order["amount"])
# Trade.session.refresh(order_obj)
self.emergency_exit(trade, order["price"], order_obj.safe_remaining)
return canceled
def emergency_exit(

View File

@@ -5,14 +5,26 @@ from pydantic import TypeAdapter
from typing_extensions import TypedDict
class AnnotationType(TypedDict, total=False):
type: Required[Literal["area"]]
class _BaseAnnotationType(TypedDict, total=False):
start: str | datetime
end: str | datetime
y_start: float
y_end: float
color: str
label: str
z_level: int
AnnotationTypeTA = TypeAdapter(AnnotationType)
class AreaAnnotationType(_BaseAnnotationType, total=False):
type: Required[Literal["area"]]
class LineAnnotationType(_BaseAnnotationType, total=False):
type: Required[Literal["line"]]
width: int
line_style: Literal["solid", "dashed", "dotted"]
AnnotationType = AreaAnnotationType | LineAnnotationType
AnnotationTypeTA: TypeAdapter[AnnotationType] = TypeAdapter(AnnotationType)

View File

@@ -145,9 +145,19 @@ class LookaheadAnalysisSubFunctions:
config["enable_protections"] = False
logger.info(
"Protections were enabled. "
"Disabling protections now "
"since they could otherwise produce false positives."
"Disabling protections now since they can produce false positives."
)
if not config.get("lookahead_allow_limit_orders", False):
logger.info("Forced order_types to market orders.")
config["order_types"] = {
"entry": "market",
"exit": "market",
"stoploss": "market",
"stoploss_on_exchange": False,
}
else:
logger.info("Using configured order_types, skipping order_types override.")
if config["targeted_trade_amount"] < config["minimum_trade_amount"]:
# this combo doesn't make any sense.
raise OperationalException(

View File

@@ -37,10 +37,12 @@ class RecursiveAnalysis(BaseAnalysis):
self.dict_recursive: dict[str, Any] = dict()
self.pair_to_used: str | None = None
# For recursive bias check
# analyzes two data frames with processed indicators and shows differences between them.
def analyze_indicators(self):
pair_to_check = self.local_config["pairs"][0]
pair_to_check = self.pair_to_used
logger.info("Start checking for recursive bias")
# check and report signals
@@ -85,7 +87,7 @@ class RecursiveAnalysis(BaseAnalysis):
# For lookahead bias check
# analyzes two data frames with processed indicators and shows differences between them.
def analyze_indicators_lookahead(self):
pair_to_check = self.local_config["pairs"][0]
pair_to_check = self.pair_to_used
logger.info("Start checking for lookahead bias on indicators only")
part = self.partial_varHolder_lookahead_array[0]
@@ -138,7 +140,13 @@ class RecursiveAnalysis(BaseAnalysis):
backtesting = Backtesting(prepare_data_config, self.exchange)
self.exchange = backtesting.exchange
if self.pair_to_used is None:
self.pair_to_used = backtesting.pairlists.whitelist[0]
logger.info(
f"Using pair {self.pair_to_used} only for recursive analysis. Replacing whitelist."
)
self.local_config["candle_type_def"] = prepare_data_config["candle_type_def"]
backtesting.pairlists._whitelist = [self.pair_to_used]
backtesting._set_strategy(backtesting.strategylist[0])
strat = backtesting.strategy

View File

@@ -211,6 +211,7 @@ class Backtesting:
self._can_short = self.trading_mode != TradingMode.SPOT
self._position_stacking: bool = self.config.get("position_stacking", False)
self.enable_protections: bool = self.config.get("enable_protections", False)
self.dynamic_pairlist: bool = self.config.get("enable_dynamic_pairlist", False)
migrate_data(config, self.exchange)
self.init_backtest()
@@ -966,7 +967,7 @@ class Backtesting:
)
)
def get_valid_price_and_stake(
def get_valid_entry_price_and_stake(
self,
pair: str,
row: tuple,
@@ -1089,18 +1090,20 @@ class Backtesting:
stake_amount_ = stake_amount or (trade.stake_amount if trade else 0.0)
precision_price, precision_mode_price = self.get_pair_precision(pair, current_time)
propose_rate, stake_amount, leverage, min_stake_amount = self.get_valid_price_and_stake(
pair,
row,
row[OPEN_IDX],
stake_amount_,
direction,
current_time,
entry_tag,
trade,
order_type,
precision_price,
precision_mode_price,
propose_rate, stake_amount, leverage, min_stake_amount = (
self.get_valid_entry_price_and_stake(
pair,
row,
row[OPEN_IDX],
stake_amount_,
direction,
current_time,
entry_tag,
trade,
order_type,
precision_price,
precision_mode_price,
)
)
# replace proposed rate if another rate was requested
@@ -1582,6 +1585,11 @@ class Backtesting:
for current_time in self._time_generator(start_date, end_date):
# Loop for each main candle.
self.check_abort()
if self.dynamic_pairlist and self.pairlists:
self.pairlists.refresh_pairlist()
pairs = self.pairlists.whitelist
# Reset open trade count for this candle
# Critical to avoid exceeding max_open_trades in backtesting
# when timeframe-detail is used and trades close within the opening candle.

View File

@@ -0,0 +1,95 @@
"""
Delist pair list filter
"""
import logging
from datetime import UTC, datetime, timedelta
from freqtrade.exceptions import ConfigurationError
from freqtrade.exchange.exchange_types import Ticker
from freqtrade.plugins.pairlist.IPairList import IPairList, PairlistParameter, SupportsBacktesting
from freqtrade.util import format_date
logger = logging.getLogger(__name__)
class DelistFilter(IPairList):
supports_backtesting = SupportsBacktesting.NO
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._max_days_from_now = self._pairlistconfig.get("max_days_from_now", 0)
if self._max_days_from_now < 0:
raise ConfigurationError("DelistFilter requires max_days_from_now to be >= 0")
if not self._exchange._ft_has["has_delisting"]:
raise ConfigurationError(
"DelistFilter doesn't support this exchange and trading mode combination.",
)
@property
def needstickers(self) -> bool:
"""
Boolean property defining if tickers are necessary.
If no Pairlist requires tickers, an empty Dict is passed
as tickers argument to filter_pairlist
"""
return False
def short_desc(self) -> str:
"""
Short whitelist method description - used for startup-messages
"""
return (
f"{self.name} - Filtering pairs that will be delisted"
+ (
f" in the next {self._max_days_from_now} days"
if self._max_days_from_now > 0
else ""
)
+ "."
)
@staticmethod
def description() -> str:
return "Filter pairs that will be delisted on exchange."
@staticmethod
def available_parameters() -> dict[str, PairlistParameter]:
return {
"max_days_from_now": {
"type": "number",
"default": 0,
"description": "Max days from now",
"help": (
"Remove pairs that will be delisted in the next X days. Set to 0 to remove all."
),
},
}
def _validate_pair(self, pair: str, ticker: Ticker | None) -> bool:
"""
Check if pair will be delisted.
:param pair: Pair that's currently validated
:param ticker: ticker dict as returned from ccxt.fetch_ticker
:return: True if the pair can stay, false if it should be removed
"""
delist_date = self._exchange.check_delisting_time(pair)
if delist_date is not None:
remove_pair = self._max_days_from_now == 0
if self._max_days_from_now > 0:
current_datetime = datetime.now(UTC)
max_delist_date = current_datetime + timedelta(days=self._max_days_from_now)
remove_pair = delist_date <= max_delist_date
if remove_pair:
self.log_once(
f"Removed {pair} from whitelist, because it will be delisted on "
f"{format_date(delist_date)}.",
logger.info,
)
return False
return True

View File

@@ -93,6 +93,8 @@ class ShuffleFilter(IPairList):
return pairlist_new
# Shuffle is done inplace
self._random.shuffle(pairlist)
self.__pairlist_cache[pairlist_bef] = pairlist
if self._config.get("runmode") in (RunMode.LIVE, RunMode.DRY_RUN):
self.__pairlist_cache[pairlist_bef] = pairlist
return pairlist

View File

@@ -7,6 +7,9 @@ Provides pair white list as it configured in config
import logging
from copy import deepcopy
from cachetools import LRUCache
from freqtrade.enums import RunMode
from freqtrade.exchange.exchange_types import Tickers
from freqtrade.plugins.pairlist.IPairList import IPairList, PairlistParameter, SupportsBacktesting
@@ -22,6 +25,8 @@ class StaticPairList(IPairList):
super().__init__(*args, **kwargs)
self._allow_inactive = self._pairlistconfig.get("allow_inactive", False)
# Pair cache - only used for optimize modes
self._bt_pair_cache: LRUCache = LRUCache(maxsize=1)
@property
def needstickers(self) -> bool:
@@ -60,15 +65,23 @@ class StaticPairList(IPairList):
:param tickers: Tickers (from exchange.get_tickers). May be cached.
:return: List of pairs
"""
wl = self.verify_whitelist(
self._config["exchange"]["pair_whitelist"], logger.info, keep_invalid=True
)
if self._allow_inactive:
return wl
else:
# Avoid implicit filtering of "verify_whitelist" to keep
# proper warnings in the log
return self._whitelist_for_active_markets(wl)
pairlist = self._bt_pair_cache.get("pairlist")
if not pairlist:
wl = self.verify_whitelist(
self._config["exchange"]["pair_whitelist"], logger.info, keep_invalid=True
)
if self._allow_inactive:
pairlist = wl
else:
# Avoid implicit filtering of "verify_whitelist" to keep
# proper warnings in the log
pairlist = self._whitelist_for_active_markets(wl)
if self._config["runmode"] in (RunMode.BACKTEST, RunMode.HYPEROPT):
self._bt_pair_cache["pairlist"] = pairlist.copy()
return pairlist
def filter_pairlist(self, pairlist: list[str], tickers: Tickers) -> list[str]:
"""

View File

@@ -247,7 +247,6 @@ class VolumePairList(IPairList):
* 1000
)
# todo: utc date output for starting date
self.log_once(
f"Using volume range of {self._lookback_period} candles, timeframe: "
f"{self._lookback_timeframe}, starting from {format_ms_time(since_ms)} "

View File

@@ -5,7 +5,7 @@ PairList manager class
import logging
from functools import partial
from cachetools import TTLCache, cached
from cachetools import LRUCache, TTLCache, cached
from freqtrade.constants import Config, ListPairsWithTimeframes
from freqtrade.data.dataprovider import DataProvider
@@ -56,6 +56,7 @@ class PairListManager(LoggingMixin):
)
self._check_backtest()
self._not_expiring_cache: LRUCache = LRUCache(maxsize=1)
refresh_period = config.get("pairlist_refresh_period", 3600)
LoggingMixin.__init__(self, logger, refresh_period)
@@ -109,7 +110,15 @@ class PairListManager(LoggingMixin):
@property
def expanded_blacklist(self) -> list[str]:
"""The expanded blacklist (including wildcard expansion)"""
return expand_pairlist(self._blacklist, self._exchange.get_markets().keys())
eblacklist = self._not_expiring_cache.get("eblacklist")
if not eblacklist:
eblacklist = expand_pairlist(self._blacklist, self._exchange.get_markets().keys())
if self._config["runmode"] in (RunMode.BACKTEST, RunMode.HYPEROPT):
self._not_expiring_cache["eblacklist"] = eblacklist.copy()
return eblacklist
@property
def name_list(self) -> list[str]:
@@ -157,16 +166,17 @@ class PairListManager(LoggingMixin):
:param logmethod: Function that'll be called, `logger.info` or `logger.warning`.
:return: pairlist - blacklisted pairs
"""
try:
blacklist = self.expanded_blacklist
except ValueError as err:
logger.error(f"Pair blacklist contains an invalid Wildcard: {err}")
return []
log_once = partial(self.log_once, logmethod=logmethod)
for pair in pairlist.copy():
if pair in blacklist:
log_once(f"Pair {pair} in your blacklist. Removing it from whitelist...")
pairlist.remove(pair)
if self._blacklist:
try:
blacklist = self.expanded_blacklist
except ValueError as err:
logger.error(f"Pair blacklist contains an invalid Wildcard: {err}")
return []
log_once = partial(self.log_once, logmethod=logmethod)
for pair in pairlist.copy():
if pair in blacklist:
log_once(f"Pair {pair} in your blacklist. Removing it from whitelist...")
pairlist.remove(pair)
return pairlist
def verify_whitelist(

View File

@@ -87,7 +87,7 @@ class StrategyResolver(IResolver):
# Loop this list again to have output combined
for attribute, _ in attributes:
if attribute in config:
logger.info("Strategy using %s: %s", attribute, config[attribute])
logger.info(f"Strategy using {attribute}: {config[attribute]}")
StrategyResolver._normalize_attributes(strategy)
@@ -109,9 +109,8 @@ class StrategyResolver(IResolver):
# Ensure Properties are not overwritten
setattr(strategy, attribute, config[attribute])
logger.info(
"Override strategy '%s' with value in config file: %s.",
attribute,
config[attribute],
f"Override strategy '{attribute}' with value from the configuration: "
f"{config[attribute]}.",
)
elif hasattr(strategy, attribute):
val = getattr(strategy, attribute)

View File

@@ -63,7 +63,7 @@ def __run_backtest_bg(btconfig: Config):
ApiBG.bt["bt"] = Backtesting(btconfig)
else:
ApiBG.bt["bt"].config = btconfig
ApiBG.bt["bt"].config = deep_merge_dicts(btconfig, ApiBG.bt["bt"].config)
ApiBG.bt["bt"].init_backtest()
# Only reload data if timeframe changed.
if (

View File

@@ -57,7 +57,8 @@ def pairlists_evaluate(
config_loc = deepcopy(config)
config_loc["stake_currency"] = ""
config_loc["pairs"] = payload.pairs
config_loc["timerange"] = payload.timerange
if payload.timerange:
config_loc["timerange"] = payload.timerange
config_loc["days"] = payload.days
config_loc["timeframes"] = payload.timeframes
config_loc["erase"] = payload.erase

View File

@@ -229,6 +229,7 @@ class ShowConfig(BaseModel):
api_version: float
dry_run: bool
trading_mode: str
margin_mode: str
short_allowed: bool
stake_currency: str
stake_amount: str

View File

@@ -136,6 +136,7 @@ class RPC:
"strategy_version": strategy_version,
"dry_run": config["dry_run"],
"trading_mode": config.get("trading_mode", "spot"),
"margin_mode": config.get("margin_mode", ""),
"short_allowed": config.get("trading_mode", "spot") != "spot",
"stake_currency": config["stake_currency"],
"stake_currency_decimals": decimals_per_coin(config["stake_currency"]),

View File

@@ -360,11 +360,9 @@ class Telegram(RPCHandler):
await asyncio.sleep(2)
if self._app.updater:
await self._app.updater.start_polling(
bootstrap_retries=-1,
bootstrap_retries=10,
timeout=20,
# read_latency=60, # Assumed transmission latency
drop_pending_updates=True,
# stop_signals=[], # Necessary as we don't run on the main thread
)
while True:
await asyncio.sleep(10)

View File

@@ -100,7 +100,7 @@ def _create_and_merge_informative_pair(
dataframe: DataFrame,
metadata: dict,
inf_data: InformativeData,
populate_indicators: PopulateIndicators,
populate_indicators_fn: PopulateIndicators,
):
asset = inf_data.asset or ""
timeframe = inf_data.timeframe
@@ -133,7 +133,12 @@ def _create_and_merge_informative_pair(
inf_metadata = {"pair": asset, "timeframe": timeframe}
inf_dataframe = strategy.dp.get_pair_dataframe(asset, timeframe, candle_type)
inf_dataframe = populate_indicators(strategy, inf_dataframe, inf_metadata)
if inf_dataframe.empty:
raise ValueError(
f"Informative dataframe for ({asset}, {timeframe}, {candle_type}) is empty. "
"Can't populate informative indicators."
)
inf_dataframe = populate_indicators_fn(strategy, inf_dataframe, inf_metadata)
formatter: Any = None
if callable(fmt):

View File

@@ -152,7 +152,7 @@ class IStrategy(ABC, HyperStrategyMixin):
def __init__(self, config: Config) -> None:
self.config = config
# Dict to determine if analysis is necessary
self._last_candle_seen_per_pair: dict[str, datetime] = {}
self.__last_candle_seen_per_pair: dict[str, datetime] = {}
super().__init__(config)
# Gather informative pairs from @informative-decorated methods.
@@ -1209,14 +1209,14 @@ class IStrategy(ABC, HyperStrategyMixin):
"""
pair = str(metadata.get("pair"))
new_candle = self._last_candle_seen_per_pair.get(pair, None) != dataframe.iloc[-1]["date"]
new_candle = self.__last_candle_seen_per_pair.get(pair, None) != dataframe.iloc[-1]["date"]
# Test if seen this pair and last candle before.
# always run if process_only_new_candles is set to false
if not self.process_only_new_candles or new_candle:
# Defs that only make change on new candle data.
dataframe = self.analyze_ticker(dataframe, metadata)
self._last_candle_seen_per_pair[pair] = dataframe.iloc[-1]["date"]
self.__last_candle_seen_per_pair[pair] = dataframe.iloc[-1]["date"]
candle_type = self.config.get("candle_type_def", CandleType.SPOT)
self.dp._set_cached_df(pair, self.timeframe, dataframe, candle_type=candle_type)

View File

@@ -78,5 +78,5 @@ def format_duration(td: timedelta) -> str:
"""
d = td.days
h, r = divmod(td.seconds, 3600)
m, s = divmod(r, 60)
m, _ = divmod(r, 60)
return f"{d}d {h:02d}:{m:02d}"

View File

@@ -1,7 +1,7 @@
from freqtrade_client.ft_rest_client import FtRestClient
__version__ = "2025.9-dev"
__version__ = "2025.10-dev"
if "dev" in __version__:
from pathlib import Path

View File

@@ -29,7 +29,7 @@ classifiers = [
dependencies = [
# from requirements.txt
"ccxt>=4.4.87",
"ccxt>=4.5.4",
"SQLAlchemy>=2.0.6",
"python-telegram-bot>=20.1",
"humanize>=4.0.0",

View File

@@ -6,17 +6,17 @@
-r requirements-freqai-rl.txt
-r docs/requirements-docs.txt
ruff==0.12.11
mypy==1.17.1
ruff==0.13.3
mypy==1.18.2
pre-commit==4.3.0
pytest==8.4.1
pytest-asyncio==1.1.0
pytest-cov==6.2.1
pytest-mock==3.14.1
pytest==8.4.2
pytest-asyncio==1.2.0
pytest-cov==7.0.0
pytest-mock==3.15.1
pytest-random-order==1.2.0
pytest-timeout==2.4.0
pytest-xdist==3.8.0
isort==6.0.1
isort==6.1.0
# For datetime mocking
time-machine==2.19.0
@@ -24,9 +24,9 @@ time-machine==2.19.0
nbconvert==7.16.6
# mypy types
scipy-stubs==1.16.1.1 # keep in sync with `scipy` in `requirements-hyperopt.txt`
scipy-stubs==1.16.2.0 # keep in sync with `scipy` in `requirements-hyperopt.txt`
types-cachetools==6.2.0.20250827
types-filelock==3.2.7
types-requests==2.32.4.20250809
types-requests==2.32.4.20250913
types-tabulate==0.9.0.20241207
types-python-dateutil==2.9.0.20250822

View File

@@ -3,10 +3,10 @@
-r requirements-plot.txt
# Required for freqai
scikit-learn==1.7.1
scikit-learn==1.7.2
joblib==1.5.2
catboost==1.2.8; 'arm' not in platform_machine
lightgbm==4.6.0
xgboost==3.0.4
xgboost==3.0.5
tensorboard==2.20.0
datasieve==0.1.9

View File

@@ -2,8 +2,8 @@
-r requirements.txt
# Required for hyperopt
scipy==1.16.1
scikit-learn==1.7.1
scipy==1.16.2
scikit-learn==1.7.2
filelock==3.19.1
optuna==4.5.0
cmaes==0.12.0

View File

@@ -1,4 +1,4 @@
# Include all requirements to run the bot.
-r requirements.txt
plotly==6.3.0
plotly==6.3.1

View File

@@ -1,26 +1,24 @@
numpy==2.3.2; platform_machine != 'armv7l'
numpy==2.2.4; platform_machine == 'armv7l'
pandas==2.3.2; platform_machine != 'armv7l'
pandas==2.2.3; platform_machine == 'armv7l'
bottleneck==1.5.0
numexpr==2.11.0
numpy==2.3.3
pandas==2.3.3
bottleneck==1.6.0
numexpr==2.13.1
# Indicator libraries
ft-pandas-ta==0.3.15
ta-lib==0.6.6
ft-pandas-ta==0.3.16
ta-lib==0.6.7
technical==1.5.3
ccxt==4.5.2
cryptography==45.0.6
ccxt==4.5.7
cryptography==46.0.2
aiohttp==3.12.15
SQLAlchemy==2.0.43
python-telegram-bot==22.3
python-telegram-bot==22.5
# can't be hard-pinned due to telegram-bot pinning httpx with ~
httpx>=0.24.1
humanize==4.13.0
cachetools==6.2.0
requests==2.32.5
urllib3==2.5.0
certifi==2025.8.3
certifi==2025.10.5
jsonschema==4.25.1
tabulate==0.9.0
pycoingecko==3.2.0
@@ -40,12 +38,12 @@ orjson==3.11.3
sdnotify==0.3.2
# API Server
fastapi==0.116.1
pydantic==2.11.7
uvicorn==0.35.0
fastapi==0.118.0
pydantic==2.11.10
uvicorn==0.37.0
pyjwt==2.10.1
aiofiles==24.1.0
psutil==7.0.0
psutil==7.1.0
# Building config files interactively
questionary==2.1.1

View File

@@ -658,7 +658,9 @@ def test_start_new_strategy_no_arg():
args = [
"new-strategy",
]
with pytest.raises(OperationalException, match="`new-strategy` requires --strategy to be set."):
with pytest.raises(
OperationalException, match=r"`new-strategy` requires --strategy to be set\."
):
start_new_strategy(get_args(args))
@@ -803,7 +805,7 @@ def test_get_ui_download_url_direct(mocker):
assert last_version == "0.0.1"
assert x == "http://download1.zip"
with pytest.raises(ValueError, match="UI-Version not found."):
with pytest.raises(ValueError, match=r"UI-Version not found\."):
x, last_version = get_ui_download_url("0.0.3", False)
@@ -1650,7 +1652,7 @@ def test_hyperopt_show(mocker, capsys):
pargs = get_args(args)
pargs["config"] = None
with pytest.raises(
OperationalException, match="The index of the epoch to show should be greater than -4."
OperationalException, match=r"The index of the epoch to show should be greater than -4\."
):
start_hyperopt_show(pargs)
@@ -1658,7 +1660,7 @@ def test_hyperopt_show(mocker, capsys):
pargs = get_args(args)
pargs["config"] = None
with pytest.raises(
OperationalException, match="The index of the epoch to show should be less than 4."
OperationalException, match=r"The index of the epoch to show should be less than 4\."
):
start_hyperopt_show(pargs)
@@ -2032,5 +2034,7 @@ def test_start_edge():
]
pargs = get_args(args)
with pytest.raises(OperationalException, match="The Edge module has been deprecated in 2023.9"):
with pytest.raises(
OperationalException, match=r"The Edge module has been deprecated in 2023\.9"
):
start_edge(pargs)

View File

@@ -75,7 +75,7 @@ def test_get_latest_hyperopt_file(testdatadir):
# Test with absolute path
with pytest.raises(
OperationalException,
match="--hyperopt-filename expects only the filename, not an absolute path.",
match=r"--hyperopt-filename expects only the filename, not an absolute path\.",
):
get_latest_hyperopt_file(str(testdatadir.parent), str(testdatadir.parent))
@@ -344,7 +344,7 @@ def test_create_cum_profit1(testdatadir):
assert cum_profits.iloc[0]["cum_profits"] == 0
assert pytest.approx(cum_profits.iloc[-1]["cum_profits"]) == 9.0225563e-05
with pytest.raises(ValueError, match="Trade dataframe empty."):
with pytest.raises(ValueError, match=r"Trade dataframe empty\."):
create_cum_profit(
df.set_index("date"),
bt_data[bt_data["pair"] == "NOTAPAIR"],
@@ -369,10 +369,10 @@ def test_calculate_max_drawdown(testdatadir):
underwater = calculate_underwater(bt_data)
assert isinstance(underwater, DataFrame)
with pytest.raises(ValueError, match="Trade dataframe empty."):
with pytest.raises(ValueError, match=r"Trade dataframe empty\."):
calculate_max_drawdown(DataFrame())
with pytest.raises(ValueError, match="Trade dataframe empty."):
with pytest.raises(ValueError, match=r"Trade dataframe empty\."):
calculate_underwater(DataFrame())
@@ -391,7 +391,7 @@ def test_calculate_csum(testdatadir):
assert csum_min1 == csum_min + 5
assert csum_max1 == csum_max + 5
with pytest.raises(ValueError, match="Trade dataframe empty."):
with pytest.raises(ValueError, match=r"Trade dataframe empty\."):
csum_min, csum_max = calculate_csum(DataFrame())

View File

@@ -14,6 +14,7 @@ from freqtrade.data.converter import (
convert_trades_to_ohlcv,
ohlcv_fill_up_missing_data,
ohlcv_to_dataframe,
order_book_to_dataframe,
reduce_dataframe_footprint,
trades_df_remove_duplicates,
trades_dict_to_list,
@@ -49,7 +50,7 @@ def test_ohlcv_to_dataframe(ohlcv_history_list, caplog):
def test_trades_to_ohlcv(trades_history_df, caplog):
caplog.set_level(logging.DEBUG)
with pytest.raises(ValueError, match="Trade-list empty."):
with pytest.raises(ValueError, match=r"Trade-list empty\."):
trades_to_ohlcv(pd.DataFrame(columns=trades_history_df.columns), "1m")
df = trades_to_ohlcv(trades_history_df, "1m")
@@ -588,3 +589,77 @@ def test_convert_trades_to_ohlcv(testdatadir, tmp_path, caplog):
candle_type=CandleType.SPOT,
)
assert log_has(msg, caplog)
def test_order_book_to_dataframe():
bids = [
[100.0, 5.0],
[99.5, 3.0],
[99.0, 2.0],
]
asks = [
[100.5, 4.0],
[101.0, 6.0],
[101.5, 1.0],
]
result = order_book_to_dataframe(bids, asks)
assert isinstance(result, pd.DataFrame)
expected_columns = ["b_sum", "b_size", "bids", "asks", "a_size", "a_sum"]
assert result.columns.tolist() == expected_columns
assert len(result) == max(len(bids), len(asks))
assert result["bids"].tolist() == [100.0, 99.5, 99.0]
assert result["b_size"].tolist() == [5.0, 3.0, 2.0]
assert result["b_sum"].tolist() == [5.0, 8.0, 10.0]
assert result["asks"].tolist() == [100.5, 101.0, 101.5]
assert result["a_size"].tolist() == [4.0, 6.0, 1.0]
assert result["a_sum"].tolist() == [4.0, 10.0, 11.0]
def test_order_book_to_dataframe_empty():
bids = []
asks = []
result = order_book_to_dataframe(bids, asks)
assert isinstance(result, pd.DataFrame)
expected_columns = ["b_sum", "b_size", "bids", "asks", "a_size", "a_sum"]
assert result.columns.tolist() == expected_columns
# Empty input should result in empty dataframe
assert len(result) == 0
def test_order_book_to_dataframe_unequal_lengths():
bids = [
[100.0, 5.0],
[99.5, 3.0],
[99.0, 2.0],
[98.5, 1.0],
]
asks = [
[100.5, 4.0],
[101.0, 6.0],
]
result = order_book_to_dataframe(bids, asks)
assert len(result) == max(len(bids), len(asks))
assert len(result) == 4
assert result["bids"].tolist() == [100.0, 99.5, 99.0, 98.5]
assert result["b_size"].tolist() == [5.0, 3.0, 2.0, 1.0]
assert result["b_sum"].tolist() == [5.0, 8.0, 10.0, 11.0]
assert result["asks"].tolist()[:2] == [100.5, 101.0]
# NA for missing asks
assert pd.isna(result["asks"].iloc[2])
assert pd.isna(result["asks"].iloc[3])
assert result["a_size"].tolist()[:2] == [4.0, 6.0]
assert result["a_sum"].tolist()[:2] == [4.0, 10.0]

View File

@@ -8,6 +8,7 @@ from freqtrade.data.dataprovider import DataProvider
from freqtrade.enums import CandleType, RunMode
from freqtrade.exceptions import ExchangeError, OperationalException
from freqtrade.plugins.pairlistmanager import PairListManager
from freqtrade.util import dt_utc
from tests.conftest import EXMS, generate_test_data, get_patched_exchange
@@ -449,6 +450,12 @@ def test_no_exchange_mode(default_conf):
with pytest.raises(OperationalException, match=message):
dp.available_pairs()
with pytest.raises(OperationalException, match=message):
dp.funding_rate("XRP/USDT:USDT")
with pytest.raises(OperationalException, match=message):
dp.check_delisting("XRP/USDT")
def test_dp_send_msg(default_conf):
default_conf["runmode"] = RunMode.DRY_RUN
@@ -612,3 +619,20 @@ def test_dp_get_required_startup(default_conf_usdt):
assert dp.get_required_startup("5m") == 51880
assert dp.get_required_startup("1h") == 4360
assert dp.get_required_startup("1d") == 220
def test_check_delisting(mocker, default_conf_usdt):
delist_mock = MagicMock(return_value=None)
exchange = get_patched_exchange(mocker, default_conf_usdt)
mocker.patch.object(exchange, "check_delisting_time", delist_mock)
dp = DataProvider(default_conf_usdt, exchange)
res = dp.check_delisting("ETH/USDT")
assert res is None
assert delist_mock.call_count == 1
delist_mock2 = MagicMock(return_value=dt_utc(2025, 10, 2))
mocker.patch.object(exchange, "check_delisting_time", delist_mock2)
res = dp.check_delisting("XRP/USDT")
assert res == dt_utc(2025, 10, 2)
assert delist_mock2.call_count == 1

View File

@@ -18,6 +18,7 @@ from freqtrade.data.converter import ohlcv_to_dataframe
from freqtrade.data.history import get_datahandler
from freqtrade.data.history.datahandlers.jsondatahandler import JsonDataHandler, JsonGzDataHandler
from freqtrade.data.history.history_utils import (
_download_all_pairs_history_parallel,
_download_pair_history,
_download_trades_history,
_load_cached_data_for_updating,
@@ -545,6 +546,14 @@ def test_refresh_backtest_ohlcv_data(
):
caplog.set_level(logging.DEBUG)
dl_mock = mocker.patch("freqtrade.data.history.history_utils._download_pair_history")
def parallel_mock(pairs, timeframe, candle_type, **kwargs):
return {(pair, timeframe, candle_type): DataFrame() for pair in pairs}
parallel_mock = mocker.patch(
"freqtrade.data.history.history_utils._download_all_pairs_history_parallel",
side_effect=parallel_mock,
)
mocker.patch(f"{EXMS}.markets", PropertyMock(return_value=markets))
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
@@ -559,10 +568,12 @@ def test_refresh_backtest_ohlcv_data(
timeframes=["1m", "5m"],
datadir=testdatadir,
timerange=timerange,
erase=True,
erase=False,
trading_mode=trademode,
)
# Called once per timeframe (as we return an empty dataframe)
assert parallel_mock.call_count == 2
assert dl_mock.call_count == callcount
assert dl_mock.call_args[1]["timerange"].starttype == "date"
@@ -699,3 +710,256 @@ def test_download_trades_history(
assert ght_mock.call_count == 0
_clean_test_file(file2)
def test_download_all_pairs_history_parallel(mocker, default_conf_usdt):
pairs = ["PAIR1/BTC", "PAIR2/USDT"]
timeframe = "5m"
candle_type = CandleType.SPOT
df1 = DataFrame(
{
"date": [1, 2],
"open": [1, 2],
"close": [1, 2],
"high": [1, 2],
"low": [1, 2],
"volume": [1, 2],
}
)
df2 = DataFrame(
{
"date": [3, 4],
"open": [3, 4],
"close": [3, 4],
"high": [3, 4],
"low": [3, 4],
"volume": [3, 4],
}
)
expected = {
("PAIR1/BTC", timeframe, candle_type): df1,
("PAIR2/USDT", timeframe, candle_type): df2,
}
# Mock exchange
mocker.patch.multiple(
EXMS,
exchange_has=MagicMock(return_value=True),
ohlcv_candle_limit=MagicMock(return_value=1000),
refresh_latest_ohlcv=MagicMock(return_value=expected),
)
exchange = get_patched_exchange(mocker, default_conf_usdt)
# timerange with starttype 'date' and startts far in the future to trigger parallel download
timerange = TimeRange("date", None, 9999999999, 0)
result = _download_all_pairs_history_parallel(
exchange=exchange,
pairs=pairs,
timeframe=timeframe,
candle_type=candle_type,
timerange=timerange,
)
assert result == expected
assert exchange.ohlcv_candle_limit.call_args[0] == (timeframe, candle_type)
assert exchange.refresh_latest_ohlcv.call_count == 1
# If since is not after one_call_min_time_dt, should not call refresh_latest_ohlcv
exchange.refresh_latest_ohlcv.reset_mock()
timerange2 = TimeRange("date", None, 0, 0)
result2 = _download_all_pairs_history_parallel(
exchange=exchange,
pairs=pairs,
timeframe=timeframe,
candle_type=candle_type,
timerange=timerange2,
)
assert result2 == {}
assert exchange.refresh_latest_ohlcv.call_count == 0
exchange.refresh_latest_ohlcv.reset_mock()
# Test without timerange
result3 = _download_all_pairs_history_parallel(
exchange=exchange,
pairs=pairs,
timeframe=timeframe,
candle_type=candle_type,
timerange=None,
)
assert result3 == {}
assert exchange.refresh_latest_ohlcv.call_count == 0
def test_download_pair_history_with_pair_candles(mocker, default_conf, tmp_path, caplog) -> None:
"""
Test _download_pair_history with pair_candles parameter (parallel method).
"""
exchange = get_patched_exchange(mocker, default_conf)
# Create test data for existing cached data
existing_data = DataFrame(
{
"date": [dt_utc(2018, 1, 10, 10, 0), dt_utc(2018, 1, 10, 10, 5)],
"open": [1.0, 1.15],
"high": [1.1, 1.2],
"low": [0.9, 1.1],
"close": [1.05, 1.15],
"volume": [100, 150],
}
)
# Create pair_candles data that will be used instead of exchange download
# This data should start before or at the same time as since_ms to trigger the else branch
pair_candles_data = DataFrame(
{
"date": [
dt_utc(2018, 1, 10, 10, 5),
dt_utc(2018, 1, 10, 10, 10),
dt_utc(2018, 1, 10, 10, 15),
],
"open": [1.15, 1.2, 1.25],
"high": [1.25, 1.3, 1.35],
"low": [1.1, 1.15, 1.2],
"close": [1.2, 1.25, 1.3],
"volume": [200, 250, 300],
}
)
# Mock the data handler to return existing cached data
data_handler_mock = MagicMock()
data_handler_mock.ohlcv_load.return_value = existing_data
data_handler_mock.ohlcv_store = MagicMock()
mocker.patch(
"freqtrade.data.history.history_utils.get_datahandler", return_value=data_handler_mock
)
# Mock _load_cached_data_for_updating to return existing data and since_ms
since_ms = dt_ts(dt_utc(2018, 1, 10, 10, 5)) # Time of last existing candle
mocker.patch(
"freqtrade.data.history.history_utils._load_cached_data_for_updating",
return_value=(existing_data, since_ms, None),
)
# Mock clean_ohlcv_dataframe to return concatenated data
expected_result = DataFrame(
{
"date": [
dt_utc(2018, 1, 10, 10, 0),
dt_utc(2018, 1, 10, 10, 5),
dt_utc(2018, 1, 10, 10, 10),
dt_utc(2018, 1, 10, 10, 15),
],
"open": [1.0, 1.15, 1.2, 1.25],
"high": [1.1, 1.25, 1.3, 1.35],
"low": [0.9, 1.1, 1.15, 1.2],
"close": [1.05, 1.2, 1.25, 1.3],
"volume": [100, 200, 250, 300],
}
)
get_historic_ohlcv_mock = MagicMock()
mocker.patch.object(exchange, "get_historic_ohlcv", get_historic_ohlcv_mock)
# Call _download_pair_history with pre-loaded pair_candles
result = _download_pair_history(
datadir=tmp_path,
exchange=exchange,
pair="TEST/BTC",
timeframe="5m",
candle_type=CandleType.SPOT,
pair_candles=pair_candles_data,
)
# Verify the function succeeded
assert result is True
# Verify that exchange.get_historic_ohlcv was NOT called (parallel method was used)
assert get_historic_ohlcv_mock.call_count == 0
# Verify the log message indicating parallel method was used (line 315-316)
assert log_has("Downloaded data for TEST/BTC with length 3. Parallel Method.", caplog)
# Verify data was stored
assert data_handler_mock.ohlcv_store.call_count == 1
stored_data = data_handler_mock.ohlcv_store.call_args_list[0][1]["data"]
assert stored_data.equals(expected_result)
assert len(stored_data) == 4
def test_download_pair_history_with_pair_candles_no_overlap(
mocker, default_conf, tmp_path, caplog
) -> None:
exchange = get_patched_exchange(mocker, default_conf)
# Create test data for existing cached data
existing_data = DataFrame(
{
"date": [dt_utc(2018, 1, 10, 10, 0), dt_utc(2018, 1, 10, 10, 5)],
"open": [1.0, 1.1],
"high": [1.1, 1.2],
"low": [0.9, 1.0],
"close": [1.05, 1.15],
"volume": [100, 150],
}
)
# Create pair_candles data that will be used instead of exchange download
# This data should start before or at the same time as since_ms to trigger the else branch
pair_candles_data = DataFrame(
{
"date": [
dt_utc(2018, 1, 10, 10, 10),
dt_utc(2018, 1, 10, 10, 15),
dt_utc(2018, 1, 10, 10, 20),
],
"open": [1.15, 1.2, 1.25],
"high": [1.25, 1.3, 1.35],
"low": [1.1, 1.15, 1.2],
"close": [1.2, 1.25, 1.3],
"volume": [200, 250, 300],
}
)
# Mock the data handler to return existing cached data
data_handler_mock = MagicMock()
data_handler_mock.ohlcv_load.return_value = existing_data
data_handler_mock.ohlcv_store = MagicMock()
mocker.patch(
"freqtrade.data.history.history_utils.get_datahandler", return_value=data_handler_mock
)
# Mock _load_cached_data_for_updating to return existing data and since_ms
since_ms = dt_ts(dt_utc(2018, 1, 10, 10, 5)) # Time of last existing candle
mocker.patch(
"freqtrade.data.history.history_utils._load_cached_data_for_updating",
return_value=(existing_data, since_ms, None),
)
get_historic_ohlcv_mock = MagicMock(return_value=DataFrame())
mocker.patch.object(exchange, "get_historic_ohlcv", get_historic_ohlcv_mock)
# Call _download_pair_history with pre-loaded pair_candles
result = _download_pair_history(
datadir=tmp_path,
exchange=exchange,
pair="TEST/BTC",
timeframe="5m",
candle_type=CandleType.SPOT,
pair_candles=pair_candles_data,
)
# Verify the function succeeded
assert result is True
# Verify that exchange.get_historic_ohlcv was NOT called (parallel method was used)
assert get_historic_ohlcv_mock.call_count == 1
# Verify the log message indicating parallel method was used (line 315-316)
assert not log_has_re(r"Downloaded .* Parallel Method.", caplog)
# Verify data was stored
assert data_handler_mock.ohlcv_store.call_count == 1
stored_data = data_handler_mock.ohlcv_store.call_args_list[0][1]["data"]
assert stored_data.equals(existing_data)
assert len(stored_data) == 2

View File

@@ -1,3 +1,4 @@
from copy import deepcopy
from datetime import datetime, timedelta
from random import randint
from unittest.mock import MagicMock, PropertyMock
@@ -7,7 +8,7 @@ import pandas as pd
import pytest
from freqtrade.data.converter.trade_converter import trades_dict_to_list
from freqtrade.enums import CandleType, MarginMode, TradingMode
from freqtrade.enums import CandleType, MarginMode, RunMode, TradingMode
from freqtrade.exceptions import DependencyException, InvalidOrderException, OperationalException
from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_seconds
from freqtrade.persistence import Trade
@@ -1108,3 +1109,84 @@ async def test__async_get_trade_history_id_binance_fast(
# Clean up event loop to avoid warnings
exchange.close()
def test_check_delisting_time_binance(default_conf_usdt, mocker):
exchange = get_patched_exchange(mocker, default_conf_usdt, exchange="binance")
exchange._config["runmode"] = RunMode.BACKTEST
delist_mock = MagicMock(return_value=None)
delist_fut_mock = MagicMock(return_value=None)
mocker.patch.object(exchange, "_get_spot_pair_delist_time", delist_mock)
mocker.patch.object(exchange, "_check_delisting_futures", delist_fut_mock)
# Invalid run mode
resp = exchange.check_delisting_time("BTC/USDT")
assert resp is None
assert delist_mock.call_count == 0
assert delist_fut_mock.call_count == 0
# Delist spot called
exchange._config["runmode"] = RunMode.DRY_RUN
resp1 = exchange.check_delisting_time("BTC/USDT")
assert resp1 is None
assert delist_mock.call_count == 1
assert delist_fut_mock.call_count == 0
delist_mock.reset_mock()
# Delist futures called
exchange.trading_mode = TradingMode.FUTURES
resp1 = exchange.check_delisting_time("BTC/USDT:USDT")
assert resp1 is None
assert delist_mock.call_count == 0
assert delist_fut_mock.call_count == 1
def test__check_delisting_futures_binance(default_conf_usdt, mocker, markets):
markets["BTC/USDT:USDT"] = deepcopy(markets["SOL/BUSD:BUSD"])
markets["BTC/USDT:USDT"]["info"]["deliveryDate"] = 4133404800000
markets["SOL/BUSD:BUSD"]["info"]["deliveryDate"] = 4133404800000
markets["ADA/USDT:USDT"]["info"]["deliveryDate"] = 1760745600000 # 2025-10-18
exchange = get_patched_exchange(mocker, default_conf_usdt, exchange="binance")
mocker.patch(f"{EXMS}.markets", PropertyMock(return_value=markets))
resp_sol = exchange._check_delisting_futures("SOL/BUSD:BUSD")
# Delisting is equal to BTC
assert resp_sol is None
# Actually has a delisting date
resp_ada = exchange._check_delisting_futures("ADA/USDT:USDT")
assert resp_ada == dt_utc(2025, 10, 18)
def test__get_spot_delist_schedule_binance(default_conf_usdt, mocker):
exchange = get_patched_exchange(mocker, default_conf_usdt, exchange="binance")
ret_value = [{"delistTime": 1759114800000, "symbols": ["ETCBTC"]}]
schedule_mock = mocker.patch.object(exchange, "_get_spot_delist_schedule", return_value=None)
# None - mode is DRY
assert exchange._get_spot_pair_delist_time("ETC/BTC") is None
# Switch to live
exchange._config["runmode"] = RunMode.LIVE
assert exchange._get_spot_pair_delist_time("ETC/BTC") is None
mocker.patch.object(exchange, "_get_spot_delist_schedule", return_value=ret_value)
resp = exchange._get_spot_pair_delist_time("ETC/BTC")
assert resp == dt_utc(2025, 9, 29, 3, 0)
assert schedule_mock.call_count == 1
schedule_mock.reset_mock()
# Caching - don't refresh.
assert exchange._get_spot_pair_delist_time("ETC/BTC", refresh=False) == dt_utc(
2025, 9, 29, 3, 0
)
assert schedule_mock.call_count == 0
api_mock = MagicMock()
ccxt_exceptionhandlers(
mocker,
default_conf_usdt,
api_mock,
"binance",
"_get_spot_delist_schedule",
"sapi_get_spot_delist_schedule",
retries=1,
)

View File

@@ -28,6 +28,7 @@ from freqtrade.exchange import (
Bybit,
Exchange,
Kraken,
date_minus_candles,
market_is_active,
timeframe_to_prev_date,
)
@@ -858,7 +859,7 @@ def test_validate_pricing(default_conf, mocker):
default_conf["exchange"]["name"] = "binance"
ExchangeResolver.load_exchange(default_conf)
has.update({"fetchTicker": False})
with pytest.raises(OperationalException, match="Ticker pricing not available for .*"):
with pytest.raises(OperationalException, match=r"Ticker pricing not available for .*"):
ExchangeResolver.load_exchange(default_conf)
has.update({"fetchTicker": True})
@@ -867,7 +868,7 @@ def test_validate_pricing(default_conf, mocker):
ExchangeResolver.load_exchange(default_conf)
has.update({"fetchL2OrderBook": False})
with pytest.raises(OperationalException, match="Orderbook not available for .*"):
with pytest.raises(OperationalException, match=r"Orderbook not available for .*"):
ExchangeResolver.load_exchange(default_conf)
has.update({"fetchL2OrderBook": True})
@@ -876,7 +877,7 @@ def test_validate_pricing(default_conf, mocker):
default_conf["trading_mode"] = TradingMode.FUTURES
default_conf["margin_mode"] = MarginMode.ISOLATED
with pytest.raises(OperationalException, match="Ticker pricing not available for .*"):
with pytest.raises(OperationalException, match=r"Ticker pricing not available for .*"):
ExchangeResolver.load_exchange(default_conf)
@@ -2144,7 +2145,7 @@ def test___now_is_time_to_refresh(default_conf, mocker, exchange_name, time_mach
assert exchange._now_is_time_to_refresh(pair, "1d", candle_type) is True
@pytest.mark.parametrize("candle_type", ["mark", ""])
@pytest.mark.parametrize("candle_type", ["mark", "spot", "futures"])
@pytest.mark.parametrize("exchange_name", EXCHANGES)
def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_type):
caplog.set_level(logging.DEBUG)
@@ -2171,24 +2172,24 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
# one_call calculation * 1.8 should do 2 calls
candle_limit = exchange.ohlcv_candle_limit("5m", candle_type)
since = date_minus_candles("5m", candle_limit)
ret = exchange.get_historic_ohlcv(pair, "5m", dt_ts(since), candle_type=candle_type)
since = 5 * 60 * exchange.ohlcv_candle_limit("5m", candle_type) * 1.8
ret = exchange.get_historic_ohlcv(
pair, "5m", dt_ts(dt_now() - timedelta(seconds=since)), candle_type=candle_type
)
assert exchange._async_get_candle_history.call_count == 2
if exchange_name == "okx" and candle_type == "mark":
expected = 4
else:
expected = 2
assert exchange._async_get_candle_history.call_count == expected
# Returns twice the above OHLCV data after truncating the open candle.
assert len(ret) == 2
assert len(ret) == expected
assert log_has_re(r"Downloaded data for .* from ccxt with length .*\.", caplog)
caplog.clear()
exchange._async_get_candle_history = get_mock_coro(side_effect=TimeoutError())
with pytest.raises(TimeoutError):
exchange.get_historic_ohlcv(
pair, "5m", dt_ts(dt_now() - timedelta(seconds=since)), candle_type=candle_type
)
exchange.get_historic_ohlcv(pair, "5m", dt_ts(since), candle_type=candle_type)
assert log_has_re(r"Async code raised an exception: .*", caplog)
@@ -2335,7 +2336,7 @@ def test_refresh_latest_ohlcv(mocker, default_conf_usdt, caplog, candle_type) ->
if candle_type != CandleType.MARK:
assert not res
assert len(res) == 0
assert log_has_re(r"Cannot download \(IOTA\/USDT, 3m\).*", caplog)
assert log_has_re(r"Cannot download \(IOTA\/USDT, 3m, \S+\).*", caplog)
else:
assert len(res) == 1
@@ -3555,7 +3556,7 @@ def test_get_historic_trades_notsupported(
pair = "ETH/BTC"
with pytest.raises(
OperationalException, match="This exchange does not support downloading Trades."
OperationalException, match=r"This exchange does not support downloading Trades\."
):
exchange.get_historic_trades(pair, since=trades_history[0][0], until=trades_history[-1][0])
@@ -4441,7 +4442,7 @@ def test_get_markets(
def test_get_markets_error(default_conf, mocker):
ex = get_patched_exchange(mocker, default_conf)
mocker.patch(f"{EXMS}.markets", PropertyMock(return_value=None))
with pytest.raises(OperationalException, match="Markets were not loaded."):
with pytest.raises(OperationalException, match=r"Markets were not loaded\."):
ex.get_markets("LTC", "USDT", True, False)
@@ -4455,8 +4456,7 @@ def test_ohlcv_candle_limit(default_conf, mocker, exchange_name):
for timeframe in timeframes:
# if 'ohlcv_candle_limit_per_timeframe' in exchange._ft_has:
# expected = exchange._ft_has['ohlcv_candle_limit_per_timeframe'][timeframe]
# This should only run for bittrex
# assert exchange_name == 'bittrex'
# This should only run for htx
assert exchange.ohlcv_candle_limit(timeframe, CandleType.SPOT) == expected
@@ -5243,7 +5243,7 @@ def test__fetch_and_calculate_funding_fees(
# Return empty "refresh_latest"
mocker.patch(f"{EXMS}.refresh_latest_ohlcv", return_value={})
ex = get_patched_exchange(mocker, default_conf, api_mock, exchange=exchange)
with pytest.raises(ExchangeError, match="Could not find funding rates."):
with pytest.raises(ExchangeError, match=r"Could not find funding rates\."):
ex._fetch_and_calculate_funding_fees(
pair="ADA/USDT:USDT", amount=amount, is_short=False, open_date=d1, close_date=d2
)
@@ -6319,3 +6319,39 @@ def test_exchange_features(default_conf, mocker):
assert exchange.features("futures", "fetchOHLCV", "limit", 500) == 997
# Fall back to default
assert exchange.features("futures", "fetchOHLCV_else", "limit", 601) == 601
@pytest.mark.parametrize("exchange_name", EXCHANGES)
def test_fetch_funding_rate(default_conf, mocker, exchange_name):
api_mock = MagicMock()
funding_rate = {
"symbol": "ETH/BTC",
"fundingRate": 5.652e-05,
"fundingTimestamp": 1757174400000,
"fundingDatetime": "2025-09-06T16:00:00.000Z",
}
api_mock.fetch_funding_rate = MagicMock(return_value=funding_rate)
api_mock.markets = {"ETH/BTC": {"active": True}}
exchange = get_patched_exchange(mocker, default_conf, api_mock, exchange=exchange_name)
# retrieve original funding rate
funding_rate = exchange.fetch_funding_rate(pair="ETH/BTC")
assert funding_rate["fundingRate"] == funding_rate["fundingRate"]
assert funding_rate["fundingTimestamp"] == funding_rate["fundingTimestamp"]
assert funding_rate["fundingDatetime"] == funding_rate["fundingDatetime"]
ccxt_exceptionhandlers(
mocker,
default_conf,
api_mock,
exchange_name,
"fetch_funding_rate",
"fetch_funding_rate",
pair="ETH/BTC",
)
api_mock.fetch_funding_rate = MagicMock(return_value={})
exchange = get_patched_exchange(mocker, default_conf, api_mock, exchange=exchange_name)
exchange.fetch_funding_rate(pair="ETH/BTC")
with pytest.raises(DependencyException, match=r"Pair XRP/ETH not available"):
exchange.fetch_funding_rate(pair="XRP/ETH")

View File

@@ -6,7 +6,8 @@ import pytest
from tests.conftest import EXMS, get_mock_coro, get_patched_exchange
def test_hyperliquid_dry_run_liquidation_price(default_conf, mocker):
@pytest.mark.parametrize("margin_mode", ["isolated", "cross"])
def test_hyperliquid_dry_run_liquidation_price(default_conf, mocker, margin_mode):
# test if liq price calculated by dry_run_liquidation_price() is close to ccxt liq price
# testing different pairs with large/small prices, different leverages, long, short
markets = {
@@ -281,7 +282,7 @@ def test_hyperliquid_dry_run_liquidation_price(default_conf, mocker):
api_mock = MagicMock()
default_conf["trading_mode"] = "futures"
default_conf["margin_mode"] = "isolated"
default_conf["margin_mode"] = margin_mode
default_conf["stake_currency"] = "USDC"
api_mock.load_markets = get_mock_coro()
api_mock.markets = markets
@@ -299,11 +300,32 @@ def test_hyperliquid_dry_run_liquidation_price(default_conf, mocker):
position["contracts"],
position["collateral"],
position["leverage"],
position["collateral"],
[],
# isolated doesn't use wallet-balance
wallet_balance=0.0 if margin_mode == "isolated" else position["collateral"],
open_trades=[],
)
# Assume full position size is the wallet balance
assert pytest.approx(liq_price_returned, rel=0.0001) == liq_price_calculated
if margin_mode == "cross":
# test with larger wallet balance
liq_price_calculated_cross = exchange.dry_run_liquidation_price(
position["symbol"],
position["entryPrice"],
is_short,
position["contracts"],
position["collateral"],
position["leverage"],
wallet_balance=position["collateral"] * 2,
open_trades=[],
)
# Assume full position size is the wallet balance
# This
if position["side"] == "long":
assert liq_price_returned > liq_price_calculated_cross < position["entryPrice"]
else:
assert liq_price_returned < liq_price_calculated_cross > position["entryPrice"]
def test_hyperliquid_get_funding_fees(default_conf, mocker):
now = datetime.now(UTC)

View File

@@ -20,11 +20,11 @@ def test_okx_ohlcv_candle_limit(default_conf, mocker):
for timeframe in timeframes:
assert exchange.ohlcv_candle_limit(timeframe, CandleType.SPOT) == 300
assert exchange.ohlcv_candle_limit(timeframe, CandleType.FUTURES) == 300
assert exchange.ohlcv_candle_limit(timeframe, CandleType.MARK) == 100
assert exchange.ohlcv_candle_limit(timeframe, CandleType.MARK) == 300
assert exchange.ohlcv_candle_limit(timeframe, CandleType.FUNDING_RATE) == 100
assert exchange.ohlcv_candle_limit(timeframe, CandleType.SPOT, start_time) == 100
assert exchange.ohlcv_candle_limit(timeframe, CandleType.FUTURES, start_time) == 100
assert exchange.ohlcv_candle_limit(timeframe, CandleType.SPOT, start_time) == 300
assert exchange.ohlcv_candle_limit(timeframe, CandleType.FUTURES, start_time) == 300
assert exchange.ohlcv_candle_limit(timeframe, CandleType.MARK, start_time) == 100
assert exchange.ohlcv_candle_limit(timeframe, CandleType.FUNDING_RATE, start_time) == 100
one_call = int(
@@ -36,6 +36,7 @@ def test_okx_ohlcv_candle_limit(default_conf, mocker):
assert exchange.ohlcv_candle_limit(timeframe, CandleType.SPOT, one_call) == 300
assert exchange.ohlcv_candle_limit(timeframe, CandleType.FUTURES, one_call) == 300
assert exchange.ohlcv_candle_limit(timeframe, CandleType.MARK, one_call) == 300
one_call = int(
(
@@ -43,8 +44,9 @@ def test_okx_ohlcv_candle_limit(default_conf, mocker):
).timestamp()
* 1000
)
assert exchange.ohlcv_candle_limit(timeframe, CandleType.SPOT, one_call) == 100
assert exchange.ohlcv_candle_limit(timeframe, CandleType.FUTURES, one_call) == 100
assert exchange.ohlcv_candle_limit(timeframe, CandleType.SPOT, one_call) == 300
assert exchange.ohlcv_candle_limit(timeframe, CandleType.FUTURES, one_call) == 300
assert exchange.ohlcv_candle_limit(timeframe, CandleType.MARK, one_call) == 100
def test_get_maintenance_ratio_and_amt_okx(

View File

@@ -28,7 +28,11 @@ EXCHANGES = {
"leverage_tiers_public": False,
"leverage_in_spot_market": False,
"trades_lookback_hours": 4,
"private_methods": ["fapiPrivateGetPositionSideDual", "fapiPrivateGetMultiAssetsMargin"],
"private_methods": [
"fapiPrivateGetPositionSideDual",
"fapiPrivateGetMultiAssetsMargin",
"sapi_get_spot_delist_schedule",
],
"sample_order": [
{
"exchange_response": {
@@ -149,6 +153,8 @@ EXCHANGES = {
"ADA.F": {"balance": "2.00000000", "hold_trade": "0.00000000"},
"XBT": {"balance": "0.00060000", "hold_trade": "0.00000000"},
"XBT.F": {"balance": "0.00100000", "hold_trade": "0.00000000"},
"ZEUR": {"balance": "1000.00000000", "hold_trade": "0.00000000"},
"ZUSD": {"balance": "1000.00000000", "hold_trade": "0.00000000"},
}
},
"expected": {
@@ -157,6 +163,8 @@ EXCHANGES = {
"BTC": {"free": 0.0006, "total": 0.0006, "used": 0.0},
# XBT.F should be mapped to BTC.F
"BTC.F": {"free": 0.001, "total": 0.001, "used": 0.0},
"EUR": {"free": 1000.0, "total": 1000.0, "used": 0.0},
"USD": {"free": 1000.0, "total": 1000.0, "used": 0.0},
},
},
},
@@ -415,6 +423,14 @@ EXCHANGES = {
"timeframe": "1h",
"candle_count": 1000,
},
"coinex": {
"pair": "BTC/USDT",
"stake_currency": "USDT",
"hasQuoteVolume": False,
"timeframe": "1h",
"candle_count": 1000,
"orderbook_max_entries": 50,
},
# TODO: re-enable htx once certificates work again
# "htx": {
# "pair": "ETH/BTC",

View File

@@ -986,7 +986,7 @@ def test_execute_entry(
# Fail to get price...
mocker.patch(f"{EXMS}.get_rate", MagicMock(return_value=0.0))
with pytest.raises(PricingError, match="Could not determine entry price."):
with pytest.raises(PricingError, match=r"Could not determine entry price\."):
freqtrade.execute_entry(pair, stake_amount, is_short=is_short)
# In case of custom entry price
@@ -2267,6 +2267,18 @@ def test_manage_open_orders_exit_usercustom(
freqtrade.manage_open_orders()
assert log_has_re("Emergency exiting trade.*", caplog)
assert et_mock.call_count == 1
# Full exit
assert et_mock.call_args_list[0][1]["sub_trade_amt"] == 30
et_mock.reset_mock()
# Full partially filled order
# Only places the order for the remaining amount
limit_sell_order_old["remaining"] = open_trade_usdt.amount - 10
freqtrade.manage_open_orders()
assert log_has_re("Emergency exiting trade.*", caplog)
assert et_mock.call_count == 1
assert et_mock.call_args_list[0][1]["sub_trade_amt"] == 20.0
@pytest.mark.parametrize("is_short", [False, True])

View File

@@ -18,7 +18,7 @@ from tests.optimize import (
)
# Test 0: Sell with signal sell in candle 3
# Test 0: exit with exit signal in candle 3
# Test with Stop-loss at 1%
tc0 = BTContainer(
data=[
@@ -279,7 +279,7 @@ tc12 = BTContainer(
trades=[BTrade(exit_reason=ExitType.TRAILING_STOP_LOSS, open_tick=1, close_tick=2)],
)
# Test 13: Buy and sell ROI on same candle
# Test 13: Enter and exit ROI on same candle
# stop-loss: 10% (should not apply), ROI: 1%
tc13 = BTContainer(
data=[
@@ -296,7 +296,7 @@ tc13 = BTContainer(
trades=[BTrade(exit_reason=ExitType.ROI, open_tick=1, close_tick=1)],
)
# Test 14 - Buy and Stoploss on same candle
# Test 14 - Enter and Stoploss on same candle
# stop-loss: 5%, ROI: 10% (should not apply)
tc14 = BTContainer(
data=[
@@ -314,7 +314,7 @@ tc14 = BTContainer(
)
# Test 15 - Buy and ROI on same candle, followed by buy and Stoploss on next candle
# Test 15 - Enter and ROI on same candle, followed by entry and Stoploss on next candle
# stop-loss: 5%, ROI: 10% (should not apply)
tc15 = BTContainer(
data=[
@@ -334,8 +334,8 @@ tc15 = BTContainer(
],
)
# Test 16: Buy, hold for 65 min, then forceexit using roi=-1
# Causes negative profit even though sell-reason is ROI.
# Test 16: Enter, hold for 65 min, then forceexit using roi=-1
# Causes negative profit even though exit-reason is ROI.
# stop-loss: 10%, ROI: 10% (should not apply), -100% after 65 minutes (limits trade duration)
tc16 = BTContainer(
data=[
@@ -353,10 +353,10 @@ tc16 = BTContainer(
trades=[BTrade(exit_reason=ExitType.ROI, open_tick=1, close_tick=3)],
)
# Test 17: Buy, hold for 120 mins, then forceexit using roi=-1
# Causes negative profit even though sell-reason is ROI.
# Test 17: Enter, hold for 120 mins, then forceexit using roi=-1
# Causes negative profit even though exit-reason is ROI.
# stop-loss: 10%, ROI: 10% (should not apply), -100% after 100 minutes (limits trade duration)
# Uses open as sell-rate (special case) - since the roi-time is a multiple of the timeframe.
# Uses open as exit-rate (special case) - since the roi-time is a multiple of the timeframe.
tc17 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
@@ -374,16 +374,16 @@ tc17 = BTContainer(
)
# Test 18: Buy, hold for 120 mins, then drop ROI to 1%, causing a sell in candle 3.
# Test 18: Enter, hold for 120 mins, then drop ROI to 1%, causing an exit in candle 3.
# stop-loss: 10%, ROI: 10% (should not apply), -100% after 100 minutes (limits trade duration)
# uses open_rate as sell-price
# uses open_rate as exit price
tc18 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
[0, 5000, 5025, 4975, 4987, 6172, 1, 0],
[1, 5000, 5025, 4975, 4987, 6172, 0, 0],
[2, 4987, 5300, 4950, 5200, 6172, 0, 0],
[3, 5200, 5220, 4940, 4962, 6172, 0, 0], # Sell on ROI (sells on open)
[3, 5200, 5220, 4940, 4962, 6172, 0, 0], # Exit on ROI (exits on open)
[4, 4962, 4987, 4950, 4950, 6172, 0, 0],
[5, 4950, 4975, 4925, 4950, 6172, 0, 0],
],
@@ -393,16 +393,16 @@ tc18 = BTContainer(
trades=[BTrade(exit_reason=ExitType.ROI, open_tick=1, close_tick=3)],
)
# Test 19: Buy, hold for 119 mins, then drop ROI to 1%, causing a sell in candle 3.
# Test 19: Enter, hold for 119 mins, then drop ROI to 1%, causing an exit in candle 3.
# stop-loss: 10%, ROI: 10% (should not apply), -100% after 100 minutes (limits trade duration)
# uses calculated ROI (1%) as sell rate, otherwise identical to tc18
# uses calculated ROI (1%) as exit rate, otherwise identical to tc18
tc19 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
[0, 5000, 5025, 4975, 4987, 6172, 1, 0],
[1, 5000, 5025, 4975, 4987, 6172, 0, 0],
[2, 4987, 5300, 4950, 5200, 6172, 0, 0],
[3, 5000, 5300, 4940, 4962, 6172, 0, 0], # Sell on ROI
[3, 5000, 5300, 4940, 4962, 6172, 0, 0], # Exit on ROI
[4, 4962, 4987, 4950, 4950, 6172, 0, 0],
[5, 4550, 4975, 4550, 4950, 6172, 0, 0],
],
@@ -412,16 +412,16 @@ tc19 = BTContainer(
trades=[BTrade(exit_reason=ExitType.ROI, open_tick=1, close_tick=3)],
)
# Test 20: Buy, hold for 119 mins, then drop ROI to 1%, causing a sell in candle 3.
# Test 20: Enter, hold for 119 mins, then drop ROI to 1%, causing an exit in candle 3.
# stop-loss: 10%, ROI: 10% (should not apply), -100% after 100 minutes (limits trade duration)
# uses calculated ROI (1%) as sell rate, otherwise identical to tc18
# uses calculated ROI (1%) as exit rate, otherwise identical to tc18
tc20 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
[0, 5000, 5025, 4975, 4987, 6172, 1, 0],
[1, 5000, 5025, 4975, 4987, 6172, 0, 0],
[2, 4987, 5300, 4950, 5200, 6172, 0, 0],
[3, 5200, 5300, 4940, 4962, 6172, 0, 0], # Sell on ROI
[3, 5200, 5300, 4940, 4962, 6172, 0, 0], # Exit on ROI
[4, 4962, 4987, 4950, 4950, 6172, 0, 0],
[5, 4925, 4975, 4925, 4950, 6172, 0, 0],
],
@@ -434,7 +434,7 @@ tc20 = BTContainer(
# Test 21: trailing_stop ROI collision.
# Roi should trigger before Trailing stop - otherwise Trailing stop profits can be > ROI
# which cannot happen in reality
# stop-loss: 10%, ROI: 4%, Trailing stop adjusted at the sell candle
# stop-loss: 10%, ROI: 4%, Trailing stop adjusted at the exit candle
tc21 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
@@ -501,10 +501,10 @@ tc23 = BTContainer(
# Test 24: trailing_stop Raises in candle 2 (does not trigger)
# applying a positive trailing stop of 3% since stop_positive_offset is reached.
# ROI is changed after this to 4%, dropping ROI below trailing_stop_positive, causing a sell
# ROI is changed after this to 4%, dropping ROI below trailing_stop_positive, causing an exit
# in the candle after the raised stoploss candle with ROI reason.
# Stoploss would trigger in this candle too, but it's no longer relevant.
# stop-loss: 10%, ROI: 4%, stoploss adjusted candle 2, ROI adjusted in candle 3 (causing the sell)
# stop-loss: 10%, ROI: 4%, stoploss adjusted candle 2, ROI adjusted in candle 3 (causing the exit)
tc24 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
@@ -524,16 +524,16 @@ tc24 = BTContainer(
trades=[BTrade(exit_reason=ExitType.ROI, open_tick=1, close_tick=3)],
)
# Test 25: Sell with signal sell in candle 3 (stoploss also triggers on this candle)
# Test 25: Exit with exit signal in candle 3 (stoploss also triggers on this candle)
# Stoploss at 1%.
# Stoploss wins over Sell-signal (because sell-signal is acted on in the next candle)
# Stoploss wins over exit-signal (because exit-signal is acted on in the next candle)
tc25 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
[0, 5000, 5025, 4975, 4987, 6172, 1, 0],
[1, 5000, 5025, 4975, 4987, 6172, 0, 0], # enter trade (signal on last candle)
[2, 4987, 5012, 4986, 4986, 6172, 0, 0],
[3, 5010, 5010, 4855, 5010, 6172, 0, 1], # Triggers stoploss + sellsignal
[3, 5010, 5010, 4855, 5010, 6172, 0, 1], # Triggers stoploss + exit-signal
[4, 5010, 5010, 4977, 4995, 6172, 0, 0],
[5, 4995, 4995, 4950, 4950, 6172, 0, 0],
],
@@ -544,9 +544,9 @@ tc25 = BTContainer(
trades=[BTrade(exit_reason=ExitType.STOP_LOSS, open_tick=1, close_tick=3)],
)
# Test 26: Sell with signal sell in candle 3 (stoploss also triggers on this candle)
# Test 26: Exit with exit signal in candle 3 (stoploss also triggers on this candle)
# Stoploss at 1%.
# Sell-signal wins over stoploss
# Exit-signal wins over stoploss
tc26 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
@@ -554,7 +554,7 @@ tc26 = BTContainer(
[1, 5000, 5025, 4975, 4987, 6172, 0, 0], # enter trade (signal on last candle)
[2, 4987, 5012, 4986, 4986, 6172, 0, 0],
[3, 5010, 5010, 4986, 5010, 6172, 0, 1],
[4, 5010, 5010, 4855, 4995, 6172, 0, 0], # Triggers stoploss + sellsignal acted on
[4, 5010, 5010, 4855, 4995, 6172, 0, 0], # Triggers stoploss + exit-signal acted on
[5, 4995, 4995, 4950, 4950, 6172, 0, 0],
],
stop_loss=-0.01,
@@ -565,9 +565,9 @@ tc26 = BTContainer(
)
# Test 27: (copy of test26 with leverage)
# Sell with signal sell in candle 3 (stoploss also triggers on this candle)
# Exit with exit signal in candle 3 (stoploss also triggers on this candle)
# Stoploss at 1%.
# Sell-signal wins over stoploss
# exit-signal wins over stoploss
tc27 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
@@ -575,7 +575,7 @@ tc27 = BTContainer(
[1, 5000, 5025, 4975, 4987, 6172, 0, 0], # enter trade (signal on last candle)
[2, 4987, 5012, 4986, 4986, 6172, 0, 0],
[3, 5010, 5010, 4986, 5010, 6172, 0, 1],
[4, 5010, 5010, 4855, 4995, 6172, 0, 0], # Triggers stoploss + sellsignal acted on
[4, 5010, 5010, 4855, 4995, 6172, 0, 0], # Triggers stoploss + exit-signal acted on
[5, 4995, 4995, 4950, 4950, 6172, 0, 0],
],
stop_loss=-0.05,
@@ -587,9 +587,9 @@ tc27 = BTContainer(
)
# Test 28: (copy of test26 with leverage and as short)
# Sell with signal sell in candle 3 (stoploss also triggers on this candle)
# Exit with exit signal in candle 3 (stoploss also triggers on this candle)
# Stoploss at 1%.
# Sell-signal wins over stoploss
# Exit-signal wins over stoploss
tc28 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
@@ -597,7 +597,7 @@ tc28 = BTContainer(
[1, 5000, 5025, 4975, 4987, 6172, 0, 0, 0, 0], # enter trade (signal on last candle)
[2, 4987, 5012, 4986, 4986, 6172, 0, 0, 0, 0],
[3, 5010, 5010, 4986, 5010, 6172, 0, 0, 0, 1],
[4, 4990, 5010, 4855, 4995, 6172, 0, 0, 0, 0], # Triggers stoploss + sellsignal acted on
[4, 4990, 5010, 4855, 4995, 6172, 0, 0, 0, 0], # Triggers stoploss + exit-signal acted on
[5, 4995, 4995, 4950, 4950, 6172, 0, 0, 0, 0],
],
stop_loss=-0.05,
@@ -607,16 +607,16 @@ tc28 = BTContainer(
leverage=5.0,
trades=[BTrade(exit_reason=ExitType.EXIT_SIGNAL, open_tick=1, close_tick=4, is_short=True)],
)
# Test 29: Sell with signal sell in candle 3 (ROI at signal candle)
# Test 29: Exit with exit signal in candle 3 (ROI at signal candle)
# Stoploss at 10% (irrelevant), ROI at 5% (will trigger)
# Sell-signal wins over stoploss
# Exit-signal wins over stoploss
tc29 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
[0, 5000, 5025, 4975, 4987, 6172, 1, 0],
[1, 5000, 5025, 4975, 4987, 6172, 0, 0], # enter trade (signal on last candle)
[2, 4987, 5012, 4986, 4986, 6172, 0, 0],
[3, 5010, 5251, 4986, 5010, 6172, 0, 1], # Triggers ROI, sell-signal
[3, 5010, 5251, 4986, 5010, 6172, 0, 1], # Triggers ROI, exit-signal
[4, 5010, 5010, 4855, 4995, 6172, 0, 0],
[5, 4995, 4995, 4950, 4950, 6172, 0, 0],
],
@@ -627,16 +627,16 @@ tc29 = BTContainer(
trades=[BTrade(exit_reason=ExitType.ROI, open_tick=1, close_tick=3)],
)
# Test 30: Sell with signal sell in candle 3 (ROI at signal candle)
# Stoploss at 10% (irrelevant), ROI at 5% (will trigger) - Wins over Sell-signal
# Test 30: Exit with exit signal in candle 3 (ROI at signal candle)
# Stoploss at 10% (irrelevant), ROI at 5% (will trigger) - Wins over exit-signal
tc30 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
[0, 5000, 5025, 4975, 4987, 6172, 1, 0],
[1, 5000, 5025, 4975, 4987, 6172, 0, 0], # enter trade (signal on last candle)
[2, 4987, 5012, 4986, 4986, 6172, 0, 0],
[3, 5010, 5012, 4986, 5010, 6172, 0, 1], # sell-signal
[4, 5010, 5251, 4855, 4995, 6172, 0, 0], # Triggers ROI, sell-signal acted on
[3, 5010, 5012, 4986, 5010, 6172, 0, 1], # exit-signal
[4, 5010, 5251, 4855, 4995, 6172, 0, 0], # Triggers ROI, exit-signal acted on
[5, 4995, 4995, 4950, 4950, 6172, 0, 0],
],
stop_loss=-0.10,
@@ -888,7 +888,7 @@ tc41 = BTContainer(
# Test 42: Custom-entry-price around candle low
# Would cause immediate ROI exit, but since the trade was entered
# below open, we treat this as cheating, and delay the sell by 1 candle.
# below open, we treat this as cheating, and delay the exit by 1 candle.
# details: https://github.com/freqtrade/freqtrade/issues/6261
tc42 = BTContainer(
data=[
@@ -945,7 +945,7 @@ tc44 = BTContainer(
)
# Test 45: Custom exit price above all candles
# causes sell signal timeout
# causes exit signal timeout
tc45 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT
@@ -964,7 +964,7 @@ tc45 = BTContainer(
)
# Test 46: (Short of tc45) Custom short exit price above below candles
# causes sell signal timeout
# causes exit signal timeout
tc46 = BTContainer(
data=[
# D O H L C V EL XL ES Xs BT

Some files were not shown because too many files have changed in this diff Show More