mirror of
https://github.com/freqtrade/freqtrade.git
synced 2025-12-17 05:11:15 +00:00
Merge branch 'develop' into add-current-drawdown-in-telegram-profit-command
This commit is contained in:
29
.github/workflows/ci.yml
vendored
29
.github/workflows/ci.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ "ubuntu-22.04", "ubuntu-24.04" ]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
python-version: ["3.11", "3.12", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
with:
|
||||
activate-environment: true
|
||||
enable-cache: true
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
mypy freqtrade scripts tests
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@a975c85e53c8ea07b0b10f8461b0a90059816dcf #v2.1.1
|
||||
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1
|
||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: error
|
||||
@@ -159,11 +159,8 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ "macos-13", "macos-14", "macos-15" ]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
exclude:
|
||||
- os: macos-13
|
||||
python-version: "3.13"
|
||||
os: [ "macos-14", "macos-15" ]
|
||||
python-version: ["3.11", "3.12", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -177,7 +174,7 @@ jobs:
|
||||
check-latest: true
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
with:
|
||||
activate-environment: true
|
||||
enable-cache: true
|
||||
@@ -278,7 +275,7 @@ jobs:
|
||||
mypy freqtrade scripts
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@a975c85e53c8ea07b0b10f8461b0a90059816dcf #v2.1.1
|
||||
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1
|
||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: info
|
||||
@@ -291,7 +288,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ windows-latest ]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
python-version: ["3.11", "3.12", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -304,7 +301,7 @@ jobs:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
with:
|
||||
activate-environment: true
|
||||
enable-cache: true
|
||||
@@ -372,7 +369,7 @@ jobs:
|
||||
shell: powershell
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@a975c85e53c8ea07b0b10f8461b0a90059816dcf #v2.1.1
|
||||
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1
|
||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: error
|
||||
@@ -430,7 +427,7 @@ jobs:
|
||||
mkdocs build
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@a975c85e53c8ea07b0b10f8461b0a90059816dcf #v2.1.1
|
||||
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1
|
||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: error
|
||||
@@ -452,7 +449,7 @@ jobs:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
with:
|
||||
activate-environment: true
|
||||
enable-cache: true
|
||||
@@ -518,7 +515,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@a975c85e53c8ea07b0b10f8461b0a90059816dcf #v2.1.1
|
||||
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1
|
||||
if: always() && steps.check.outputs.has-permission && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||
with:
|
||||
severity: info
|
||||
|
||||
12
.github/workflows/docker-build.yml
vendored
12
.github/workflows/docker-build.yml
vendored
@@ -55,20 +55,12 @@ jobs:
|
||||
run: |
|
||||
echo "${DOCKER_PASSWORD}" | docker login --username ${DOCKER_USERNAME} --password-stdin
|
||||
|
||||
# We need docker experimental to pull the ARM image.
|
||||
- name: Switch docker to experimental
|
||||
run: |
|
||||
docker version -f '{{.Server.Experimental}}'
|
||||
echo $'{\n "experimental": true\n}' | sudo tee /etc/docker/daemon.json
|
||||
sudo systemctl restart docker
|
||||
docker version -f '{{.Server.Experimental}}'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 #v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1
|
||||
|
||||
- name: Available platforms
|
||||
run: echo ${PLATFORMS}
|
||||
@@ -124,7 +116,7 @@ jobs:
|
||||
build_helpers/publish_docker_arm64.sh
|
||||
|
||||
- name: Discord notification
|
||||
uses: rjstone/discord-webhook-notify@a975c85e53c8ea07b0b10f8461b0a90059816dcf #v2.1.1
|
||||
uses: rjstone/discord-webhook-notify@c2597273488aeda841dd1e891321952b51f7996f #v2.2.1
|
||||
if: always() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false) && (github.event_name != 'schedule')
|
||||
with:
|
||||
severity: info
|
||||
|
||||
29
.github/workflows/zizmor.yml
vendored
Normal file
29
.github/workflows/zizmor.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: GitHub Actions Security Analysis with zizmor 🌈
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- stable
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- stable
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
zizmor:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
security-events: write
|
||||
# contents: read # only needed for private repos
|
||||
# actions: read # only needed for private repos
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run zizmor 🌈
|
||||
uses: zizmorcore/zizmor-action@f52a838cfabf134edcbaa7c8b3677dde20045018 # v0.1.1
|
||||
@@ -14,14 +14,14 @@ repos:
|
||||
additional_dependencies: ["python-rapidjson", "jsonschema"]
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: "7.2.0"
|
||||
rev: "7.3.0"
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [Flake8-pyproject]
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: "v1.16.0"
|
||||
rev: "v1.16.1"
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: build_helpers
|
||||
@@ -31,6 +31,7 @@ repos:
|
||||
- types-requests==2.32.4.20250611
|
||||
- types-tabulate==0.9.0.20241207
|
||||
- types-python-dateutil==2.9.0.20250516
|
||||
- scipy-stubs==1.16.0.2
|
||||
- SQLAlchemy==2.0.41
|
||||
# stages: [push]
|
||||
|
||||
@@ -43,7 +44,7 @@ repos:
|
||||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.11.13'
|
||||
rev: 'v0.12.2'
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
@@ -82,6 +83,6 @@ repos:
|
||||
|
||||
# Ensure github actions remain safe
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.9.0
|
||||
rev: v1.11.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
18
Dockerfile
18
Dockerfile
@@ -1,10 +1,10 @@
|
||||
FROM python:3.13.5-slim-bookworm as base
|
||||
FROM python:3.13.5-slim-bookworm AS base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
ENV LC_ALL C.UTF-8
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONFAULTHANDLER 1
|
||||
ENV LANG=C.UTF-8
|
||||
ENV LC_ALL=C.UTF-8
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONFAULTHANDLER=1
|
||||
ENV PATH=/home/ftuser/.local/bin:$PATH
|
||||
ENV FT_APP_ENV="docker"
|
||||
|
||||
@@ -21,7 +21,7 @@ RUN mkdir /freqtrade \
|
||||
WORKDIR /freqtrade
|
||||
|
||||
# Install dependencies
|
||||
FROM base as python-deps
|
||||
FROM base AS python-deps
|
||||
RUN apt-get update \
|
||||
&& apt-get -y install build-essential libssl-dev git libffi-dev libgfortran5 pkg-config cmake gcc \
|
||||
&& apt-get clean \
|
||||
@@ -30,7 +30,7 @@ RUN apt-get update \
|
||||
# Install TA-lib
|
||||
COPY build_helpers/* /tmp/
|
||||
RUN cd /tmp && /tmp/install_ta-lib.sh && rm -r /tmp/*ta-lib*
|
||||
ENV LD_LIBRARY_PATH /usr/local/lib
|
||||
ENV LD_LIBRARY_PATH=/usr/local/lib
|
||||
|
||||
# Install dependencies
|
||||
COPY --chown=ftuser:ftuser requirements.txt requirements-hyperopt.txt /freqtrade/
|
||||
@@ -39,9 +39,9 @@ RUN pip install --user --no-cache-dir "numpy<3.0" \
|
||||
&& pip install --user --no-cache-dir -r requirements-hyperopt.txt
|
||||
|
||||
# Copy dependencies to runtime-image
|
||||
FROM base as runtime-image
|
||||
FROM base AS runtime-image
|
||||
COPY --from=python-deps /usr/local/lib /usr/local/lib
|
||||
ENV LD_LIBRARY_PATH /usr/local/lib
|
||||
ENV LD_LIBRARY_PATH=/usr/local/lib
|
||||
|
||||
COPY --from=python-deps --chown=ftuser:ftuser /home/ftuser/.local /home/ftuser/.local
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ Please find the complete documentation on the [freqtrade website](https://www.fr
|
||||
|
||||
## Features
|
||||
|
||||
- [x] **Based on Python 3.10+**: For botting on any operating system - Windows, macOS and Linux.
|
||||
- [x] **Based on Python 3.11+**: For botting on any operating system - Windows, macOS and Linux.
|
||||
- [x] **Persistence**: Persistence is achieved through sqlite.
|
||||
- [x] **Dry-run**: Run the bot without paying money.
|
||||
- [x] **Backtesting**: Run a simulation of your buy/sell strategy.
|
||||
@@ -219,7 +219,7 @@ To run this bot we recommend you a cloud instance with a minimum of:
|
||||
|
||||
### Software requirements
|
||||
|
||||
- [Python >= 3.10](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
- [Python >= 3.11](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
- [pip](https://pip.pypa.io/en/stable/installing/)
|
||||
- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
|
||||
- [TA-Lib](https://ta-lib.github.io/ta-lib-python/)
|
||||
|
||||
@@ -16,10 +16,12 @@ with require_dev.open("r") as rfile:
|
||||
with require.open("r") as rfile:
|
||||
requirements.extend(rfile.readlines())
|
||||
|
||||
# Extract types only
|
||||
type_reqs = [
|
||||
r.strip("\n") for r in requirements if r.startswith("types-") or r.startswith("SQLAlchemy")
|
||||
]
|
||||
# Extract relevant types only
|
||||
supported = ("types-", "SQLAlchemy", "scipy-stubs")
|
||||
|
||||
# Find relevant dependencies
|
||||
# Only keep the first part of the line up to the first space
|
||||
type_reqs = [r.strip("\n").split()[0] for r in requirements if r.startswith(supported)]
|
||||
|
||||
with pre_commit_file.open("r") as file:
|
||||
f = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"$schema": "https://schema.freqtrade.io/schema.json",
|
||||
"max_open_trades": 3,
|
||||
"stake_currency": "USDT",
|
||||
"stake_amount": 0.05,
|
||||
"stake_amount": 30,
|
||||
"tradable_balance_ratio": 0.99,
|
||||
"fiat_display_currency": "USD",
|
||||
"timeframe": "5m",
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
FROM python:3.11.13-slim-bookworm as base
|
||||
FROM python:3.11.13-slim-bookworm AS base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
ENV LC_ALL C.UTF-8
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONFAULTHANDLER 1
|
||||
ENV LANG=C.UTF-8
|
||||
ENV LC_ALL=C.UTF-8
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONFAULTHANDLER=1
|
||||
ENV PATH=/home/ftuser/.local/bin:$PATH
|
||||
ENV FT_APP_ENV="docker"
|
||||
|
||||
@@ -22,7 +22,7 @@ RUN mkdir /freqtrade \
|
||||
WORKDIR /freqtrade
|
||||
|
||||
# Install dependencies
|
||||
FROM base as python-deps
|
||||
FROM base AS python-deps
|
||||
RUN apt-get update \
|
||||
&& apt-get -y install build-essential libssl-dev libffi-dev libgfortran5 pkg-config cmake gcc \
|
||||
&& apt-get clean \
|
||||
@@ -39,9 +39,9 @@ RUN pip install --user --no-cache-dir "numpy<3.0" \
|
||||
&& pip install --user --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy dependencies to runtime-image
|
||||
FROM base as runtime-image
|
||||
FROM base AS runtime-image
|
||||
COPY --from=python-deps /usr/local/lib /usr/local/lib
|
||||
ENV LD_LIBRARY_PATH /usr/local/lib
|
||||
ENV LD_LIBRARY_PATH=/usr/local/lib
|
||||
|
||||
COPY --from=python-deps --chown=ftuser:ftuser /home/ftuser/.local /home/ftuser/.local
|
||||
|
||||
|
||||
@@ -304,6 +304,13 @@ The `IProtection` parent class provides a helper method for this in `calculate_l
|
||||
|
||||
Most exchanges supported by CCXT should work out of the box.
|
||||
|
||||
If you need to implement a specific exchange class, these are found in the `freqtrade/exchange` source folder. You'll also need to add the import to `freqtrade/exchange/__init__.py` to make the loading logic aware of the new exchange.
|
||||
We recommend looking at existing exchange implementations to get an idea of what might be required.
|
||||
|
||||
!!! Warning
|
||||
Implementing and testing an exchange can be a lot of trial and error, so please bear this in mind.
|
||||
You should also have some development experience, as this is not a beginner task.
|
||||
|
||||
To quickly test the public endpoints of an exchange, add a configuration for your exchange to `tests/exchange_online/conftest.py` and run these tests with `pytest --longrun tests/exchange_online/test_ccxt_compat.py`.
|
||||
Completing these tests successfully a good basis point (it's a requirement, actually), however these won't guarantee correct exchange functioning, as this only tests public endpoints, but no private endpoint (like generate order or similar).
|
||||
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
[](https://coveralls.io/github/freqtrade/freqtrade?branch=develop)
|
||||
[](https://codeclimate.com/github/freqtrade/freqtrade/maintainability)
|
||||
|
||||
<!-- Place this tag where you want the button to render. -->
|
||||
<a class="github-button" href="https://github.com/freqtrade/freqtrade" data-icon="octicon-star" data-size="large" aria-label="Star freqtrade/freqtrade on GitHub">Star</a>
|
||||
<a class="github-button" href="https://github.com/freqtrade/freqtrade/fork" data-icon="octicon-repo-forked" data-size="large" aria-label="Fork freqtrade/freqtrade on GitHub">Fork</a>
|
||||
<a class="github-button" href="https://github.com/freqtrade/freqtrade/archive/stable.zip" data-icon="octicon-cloud-download" data-size="large" aria-label="Download freqtrade/freqtrade on GitHub">Download</a>
|
||||
<!-- GitHub action buttons -->
|
||||
[:octicons-star-16: Star](https://github.com/freqtrade/freqtrade){ .md-button .md-button--sm }
|
||||
[:octicons-repo-forked-16: Fork](https://github.com/freqtrade/freqtrade/fork){ .md-button .md-button--sm }
|
||||
[:octicons-download-16: Download](https://github.com/freqtrade/freqtrade/archive/stable.zip){ .md-button .md-button--sm }
|
||||
|
||||
## Introduction
|
||||
|
||||
@@ -87,7 +87,7 @@ To run this bot we recommend you a linux cloud instance with a minimum of:
|
||||
|
||||
Alternatively
|
||||
|
||||
- Python 3.10+
|
||||
- Python 3.11+
|
||||
- pip (pip3)
|
||||
- git
|
||||
- TA-Lib
|
||||
|
||||
@@ -24,7 +24,7 @@ The easiest way to install and run Freqtrade is to clone the bot Github reposito
|
||||
The `stable` branch contains the code of the last release (done usually once per month on an approximately one week old snapshot of the `develop` branch to prevent packaging bugs, so potentially it's more stable).
|
||||
|
||||
!!! Note
|
||||
Python3.10 or higher and the corresponding `pip` are assumed to be available. The install-script will warn you and stop if that's not the case. `git` is also needed to clone the Freqtrade repository.
|
||||
Python3.11 or higher and the corresponding `pip` are assumed to be available. The install-script will warn you and stop if that's not the case. `git` is also needed to clone the Freqtrade repository.
|
||||
Also, python headers (`python<yourversion>-dev` / `python<yourversion>-devel`) must be available for the installation to complete successfully.
|
||||
|
||||
!!! Warning "Up-to-date clock"
|
||||
@@ -42,7 +42,7 @@ These requirements apply to both [Script Installation](#script-installation) and
|
||||
|
||||
### Install guide
|
||||
|
||||
* [Python >= 3.10](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
* [Python >= 3.11](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
* [pip](https://pip.pypa.io/en/stable/installing/)
|
||||
* [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
|
||||
* [virtualenv](https://virtualenv.pypa.io/en/stable/installation.html) (Recommended)
|
||||
@@ -54,7 +54,7 @@ We've included/collected install instructions for Ubuntu, MacOS, and Windows. Th
|
||||
OS Specific steps are listed first, the common section below is necessary for all systems.
|
||||
|
||||
!!! Note
|
||||
Python3.10 or higher and the corresponding pip are assumed to be available.
|
||||
Python3.11 or higher and the corresponding pip are assumed to be available.
|
||||
|
||||
=== "Debian/Ubuntu"
|
||||
#### Install necessary dependencies
|
||||
@@ -179,7 +179,7 @@ You can as well update, configure and reset the codebase of your bot with `./scr
|
||||
** --install **
|
||||
|
||||
With this option, the script will install the bot and most dependencies:
|
||||
You will need to have git and python3.10+ installed beforehand for this to work.
|
||||
You will need to have git and python3.11+ installed beforehand for this to work.
|
||||
|
||||
* Mandatory software as: `ta-lib`
|
||||
* Setup your virtualenv under `.venv/`
|
||||
|
||||
@@ -37,7 +37,6 @@
|
||||
{{ super() }}
|
||||
|
||||
<!-- Place this tag in your head or just before your close body tag. -->
|
||||
<script async defer src="https://buttons.github.io/buttons.js"></script>
|
||||
<script src="https://code.jquery.com/jquery-3.4.1.min.js"
|
||||
integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo=" crossorigin="anonymous"></script>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
markdown==3.8
|
||||
markdown==3.8.2
|
||||
mkdocs==1.6.1
|
||||
mkdocs-material==9.6.14
|
||||
mkdocs-material==9.6.15
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.15
|
||||
pymdown-extensions==10.16
|
||||
jinja2==3.1.6
|
||||
mike==2.1.3
|
||||
|
||||
@@ -174,17 +174,27 @@ class AwesomeStrategy(IStrategy):
|
||||
|
||||
## Enter Tag
|
||||
|
||||
When your strategy has multiple buy signals, you can name the signal that triggered.
|
||||
Then you can access your buy signal on `custom_exit`
|
||||
When your strategy has multiple entry signals, you can name the signal that triggered.
|
||||
Then you can access your entry signal on `custom_exit`
|
||||
|
||||
```python
|
||||
def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
dataframe["enter_tag"] = ""
|
||||
signal_rsi = (qtpylib.crossed_above(dataframe["rsi"], 35))
|
||||
signal_bblower = (dataframe["bb_lowerband"] < dataframe["close"])
|
||||
# Additional conditions
|
||||
dataframe.loc[
|
||||
(
|
||||
(dataframe['rsi'] < 35) &
|
||||
(dataframe['volume'] > 0)
|
||||
),
|
||||
['enter_long', 'enter_tag']] = (1, 'buy_signal_rsi')
|
||||
signal_rsi
|
||||
| signal_bblower
|
||||
# ... additional signals to enter a long position
|
||||
)
|
||||
& (dataframe["volume"] > 0)
|
||||
, "enter_long"
|
||||
] = 1
|
||||
# Concatenate the tags so all signals are kept
|
||||
dataframe.loc[signal_rsi, "enter_tag"] += "long_signal_rsi "
|
||||
dataframe.loc[signal_bblower, "enter_tag"] += "long_signal_bblower "
|
||||
|
||||
return dataframe
|
||||
|
||||
@@ -192,14 +202,17 @@ def custom_exit(self, pair: str, trade: Trade, current_time: datetime, current_r
|
||||
current_profit: float, **kwargs):
|
||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
|
||||
last_candle = dataframe.iloc[-1].squeeze()
|
||||
if trade.enter_tag == 'buy_signal_rsi' and last_candle['rsi'] > 80:
|
||||
return 'sell_signal_rsi'
|
||||
if "long_signal_rsi" in trade.enter_tag and last_candle["rsi"] > 80:
|
||||
return "exit_signal_rsi"
|
||||
if "long_signal_bblower" in trade.enter_tag and last_candle["high"] > last_candle["bb_upperband"]:
|
||||
return "exit_signal_bblower"
|
||||
# ...
|
||||
return None
|
||||
|
||||
```
|
||||
|
||||
!!! Note
|
||||
`enter_tag` is limited to 100 characters, remaining data will be truncated.
|
||||
`enter_tag` is limited to 255 characters, remaining data will be truncated.
|
||||
|
||||
!!! Warning
|
||||
There is only one `enter_tag` column, which is used for both long and short trades.
|
||||
@@ -213,17 +226,27 @@ Similar to [Entry Tagging](#enter-tag), you can also specify an exit tag.
|
||||
|
||||
``` python
|
||||
def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
dataframe["exit_tag"] = ""
|
||||
rsi_exit_signal = (dataframe["rsi"] > 70)
|
||||
ema_exit_signal = (dataframe["ema20"] < dataframe["ema50"])
|
||||
# Additional conditions
|
||||
dataframe.loc[
|
||||
(
|
||||
(dataframe['rsi'] > 70) &
|
||||
(dataframe['volume'] > 0)
|
||||
),
|
||||
['exit_long', 'exit_tag']] = (1, 'exit_rsi')
|
||||
rsi_exit_signal
|
||||
| ema_exit_signal
|
||||
# ... additional signals to exit a long position
|
||||
) &
|
||||
(dataframe["volume"] > 0)
|
||||
,
|
||||
"exit_long"] = 1
|
||||
# Concatenate the tags so all signals are kept
|
||||
dataframe.loc[rsi_exit_signal, "exit_tag"] += "exit_signal_rsi "
|
||||
dataframe.loc[rsi_exit_signal2, "exit_tag"] += "exit_signal_rsi "
|
||||
|
||||
return dataframe
|
||||
```
|
||||
|
||||
The provided exit-tag is then used as sell-reason - and shown as such in backtest results.
|
||||
The provided exit-tag is then used as exit-reason - and shown as such in backtest results.
|
||||
|
||||
!!! Note
|
||||
`exit_reason` is limited to 100 characters, remaining data will be truncated.
|
||||
|
||||
@@ -19,3 +19,31 @@
|
||||
#available-endpoints ~ .md-typeset__scrollwrap .md-typeset__table th:first-of-type {
|
||||
width: 35% !important;
|
||||
}
|
||||
|
||||
|
||||
.md-typeset .md-button--sm {
|
||||
padding: 0.2em 1em;
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
background-color: #f6f8fa;
|
||||
color: #24292f;
|
||||
border: 1px solid #d0d7de;
|
||||
border-radius: 0.25em;
|
||||
text-decoration: none;
|
||||
display: inline-block;
|
||||
transition: all 0.2s ease;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.md-typeset .md-button--sm:hover {
|
||||
background-color: #e5eaee;
|
||||
border-color: #d1d9e0;
|
||||
text-decoration: none;
|
||||
color: #24292f;
|
||||
}
|
||||
|
||||
.md-typeset .md-button--sm:active {
|
||||
background-color: #ebecf0;
|
||||
border-color: #afb8c1;
|
||||
box-shadow: inset 0 1px 0 rgba(175, 184, 193, 0.2);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ We **strongly** recommend that Windows users use [Docker](docker_quickstart.md)
|
||||
If that is not possible, try using the Windows Linux subsystem (WSL) - for which the Ubuntu instructions should work.
|
||||
Otherwise, please follow the instructions below.
|
||||
|
||||
All instructions assume that python 3.10+ is installed and available.
|
||||
All instructions assume that python 3.11+ is installed and available.
|
||||
|
||||
## Clone the git repository
|
||||
|
||||
@@ -42,7 +42,7 @@ cd freqtrade
|
||||
|
||||
Install ta-lib according to the [ta-lib documentation](https://github.com/TA-Lib/ta-lib-python#windows).
|
||||
|
||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), Freqtrade provides these dependencies (in the binary wheel format) for the latest 3 Python versions (3.10, 3.11, 3.12 and 3.13) and for 64bit Windows.
|
||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), Freqtrade provides these dependencies (in the binary wheel format) for the latest 3 Python versions (3.11, 3.12 and 3.13) and for 64bit Windows.
|
||||
These Wheels are also used by CI running on windows, and are therefore tested together with freqtrade.
|
||||
|
||||
Other versions must be downloaded from the above link.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Freqtrade bot"""
|
||||
|
||||
__version__ = "2025.6-dev"
|
||||
__version__ = "2025.7-dev"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
__main__.py for Freqtrade
|
||||
To launch Freqtrade as a module
|
||||
|
||||
> python -m freqtrade (with Python >= 3.10)
|
||||
> python -m freqtrade (with Python >= 3.11)
|
||||
"""
|
||||
|
||||
from freqtrade import main
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# flake8: noqa: F401
|
||||
|
||||
from freqtrade.configuration.config_secrets import sanitize_config
|
||||
from freqtrade.configuration.config_secrets import remove_exchange_credentials, sanitize_config
|
||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||
from freqtrade.configuration.config_validation import validate_config_consistency
|
||||
from freqtrade.configuration.configuration import Configuration
|
||||
|
||||
@@ -1,6 +1,27 @@
|
||||
from copy import deepcopy
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.constants import Config, ExchangeConfig
|
||||
|
||||
|
||||
_SENSITIVE_KEYS = [
|
||||
"exchange.key",
|
||||
"exchange.api_key",
|
||||
"exchange.apiKey",
|
||||
"exchange.secret",
|
||||
"exchange.password",
|
||||
"exchange.uid",
|
||||
"exchange.account_id",
|
||||
"exchange.accountId",
|
||||
"exchange.wallet_address",
|
||||
"exchange.walletAddress",
|
||||
"exchange.private_key",
|
||||
"exchange.privateKey",
|
||||
"telegram.token",
|
||||
"telegram.chat_id",
|
||||
"discord.webhook_url",
|
||||
"api_server.password",
|
||||
"webhook.url",
|
||||
]
|
||||
|
||||
|
||||
def sanitize_config(config: Config, *, show_sensitive: bool = False) -> Config:
|
||||
@@ -12,27 +33,8 @@ def sanitize_config(config: Config, *, show_sensitive: bool = False) -> Config:
|
||||
"""
|
||||
if show_sensitive:
|
||||
return config
|
||||
keys_to_remove = [
|
||||
"exchange.key",
|
||||
"exchange.api_key",
|
||||
"exchange.apiKey",
|
||||
"exchange.secret",
|
||||
"exchange.password",
|
||||
"exchange.uid",
|
||||
"exchange.account_id",
|
||||
"exchange.accountId",
|
||||
"exchange.wallet_address",
|
||||
"exchange.walletAddress",
|
||||
"exchange.private_key",
|
||||
"exchange.privateKey",
|
||||
"telegram.token",
|
||||
"telegram.chat_id",
|
||||
"discord.webhook_url",
|
||||
"api_server.password",
|
||||
"webhook.url",
|
||||
]
|
||||
config = deepcopy(config)
|
||||
for key in keys_to_remove:
|
||||
for key in _SENSITIVE_KEYS:
|
||||
if "." in key:
|
||||
nested_keys = key.split(".")
|
||||
nested_config = config
|
||||
@@ -45,3 +47,21 @@ def sanitize_config(config: Config, *, show_sensitive: bool = False) -> Config:
|
||||
config[key] = "REDACTED"
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def remove_exchange_credentials(exchange_config: ExchangeConfig, dry_run: bool) -> None:
|
||||
"""
|
||||
Removes exchange keys from the configuration and specifies dry-run
|
||||
Used for backtesting / hyperopt and utils.
|
||||
Modifies the input dict!
|
||||
:param exchange_config: Exchange configuration
|
||||
:param dry_run: If True, remove sensitive keys from the exchange configuration
|
||||
"""
|
||||
if not dry_run:
|
||||
return
|
||||
|
||||
for key in [k for k in _SENSITIVE_KEYS if k.startswith("exchange.")]:
|
||||
if "." in key:
|
||||
key1 = key.removeprefix("exchange.")
|
||||
if key1 in exchange_config:
|
||||
exchange_config[key1] = ""
|
||||
|
||||
@@ -4,9 +4,8 @@ This module contains the argument manager class
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from typing_extensions import Self
|
||||
from datetime import UTC, datetime
|
||||
from typing import Self
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
@@ -151,9 +150,7 @@ class TimeRange:
|
||||
starts = rvals[index]
|
||||
if stype[0] == "date" and len(starts) == 8:
|
||||
start = int(
|
||||
datetime.strptime(starts, "%Y%m%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
datetime.strptime(starts, "%Y%m%d").replace(tzinfo=UTC).timestamp()
|
||||
)
|
||||
elif len(starts) == 13:
|
||||
start = int(starts) // 1000
|
||||
@@ -164,9 +161,7 @@ class TimeRange:
|
||||
stops = rvals[index]
|
||||
if stype[1] == "date" and len(stops) == 8:
|
||||
stop = int(
|
||||
datetime.strptime(stops, "%Y%m%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
datetime.strptime(stops, "%Y%m%d").replace(tzinfo=UTC).timestamp()
|
||||
)
|
||||
elif len(stops) == 13:
|
||||
stop = int(stops) // 1000
|
||||
|
||||
@@ -5,7 +5,7 @@ Helpers when analyzing backtest data
|
||||
import logging
|
||||
import zipfile
|
||||
from copy import copy
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from io import BytesIO, StringIO
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal
|
||||
@@ -324,7 +324,7 @@ def find_existing_backtest_stats(
|
||||
|
||||
if min_backtest_date is not None:
|
||||
backtest_date = strategy_metadata["backtest_start_time"]
|
||||
backtest_date = datetime.fromtimestamp(backtest_date, tz=timezone.utc)
|
||||
backtest_date = datetime.fromtimestamp(backtest_date, tz=UTC)
|
||||
if backtest_date < min_backtest_date:
|
||||
# Do not use a cached result for this strategy as first result is too old.
|
||||
del run_ids[strategy_name]
|
||||
|
||||
@@ -7,7 +7,7 @@ Common Interface for bot and strategy to access data.
|
||||
|
||||
import logging
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
|
||||
@@ -98,7 +98,7 @@ class DataProvider:
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
"""
|
||||
pair_key = (pair, timeframe, candle_type)
|
||||
self.__cached_pairs[pair_key] = (dataframe, datetime.now(timezone.utc))
|
||||
self.__cached_pairs[pair_key] = (dataframe, datetime.now(UTC))
|
||||
|
||||
# For multiple producers we will want to merge the pairlists instead of overwriting
|
||||
def _set_producer_pairs(self, pairlist: list[str], producer_name: str = "default"):
|
||||
@@ -131,7 +131,7 @@ class DataProvider:
|
||||
"data": {
|
||||
"key": pair_key,
|
||||
"df": dataframe.tail(1),
|
||||
"la": datetime.now(timezone.utc),
|
||||
"la": datetime.now(UTC),
|
||||
},
|
||||
}
|
||||
self.__rpc.send_msg(msg)
|
||||
@@ -164,7 +164,7 @@ class DataProvider:
|
||||
if producer_name not in self.__producer_pairs_df:
|
||||
self.__producer_pairs_df[producer_name] = {}
|
||||
|
||||
_last_analyzed = datetime.now(timezone.utc) if not last_analyzed else last_analyzed
|
||||
_last_analyzed = datetime.now(UTC) if not last_analyzed else last_analyzed
|
||||
|
||||
self.__producer_pairs_df[producer_name][pair_key] = (dataframe, _last_analyzed)
|
||||
logger.debug(f"External DataFrame for {pair_key} from {producer_name} added.")
|
||||
@@ -275,12 +275,12 @@ class DataProvider:
|
||||
# If we have no data from this Producer yet
|
||||
if producer_name not in self.__producer_pairs_df:
|
||||
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
|
||||
|
||||
# If we do have data from that Producer, but no data on this pair_key
|
||||
if pair_key not in self.__producer_pairs_df[producer_name]:
|
||||
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
|
||||
|
||||
# We have it, return this data
|
||||
df, la = self.__producer_pairs_df[producer_name][pair_key]
|
||||
@@ -396,10 +396,10 @@ class DataProvider:
|
||||
if (max_index := self.__slice_index.get(pair)) is not None:
|
||||
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES) : max_index]
|
||||
else:
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
|
||||
return df, date
|
||||
else:
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
|
||||
|
||||
@property
|
||||
def runmode(self) -> RunMode:
|
||||
|
||||
@@ -8,7 +8,7 @@ import logging
|
||||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
from pandas import DataFrame, to_datetime
|
||||
@@ -118,8 +118,8 @@ class IDataHandler(ABC):
|
||||
df = self._ohlcv_load(pair, timeframe, None, candle_type)
|
||||
if df.empty:
|
||||
return (
|
||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
||||
datetime.fromtimestamp(0, tz=UTC),
|
||||
datetime.fromtimestamp(0, tz=UTC),
|
||||
0,
|
||||
)
|
||||
return df.iloc[0]["date"].to_pydatetime(), df.iloc[-1]["date"].to_pydatetime(), len(df)
|
||||
@@ -201,8 +201,8 @@ class IDataHandler(ABC):
|
||||
df = self._trades_load(pair, trading_mode)
|
||||
if df.empty:
|
||||
return (
|
||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
||||
datetime.fromtimestamp(0, tz=UTC),
|
||||
datetime.fromtimestamp(0, tz=UTC),
|
||||
0,
|
||||
)
|
||||
return (
|
||||
|
||||
@@ -13,4 +13,4 @@ class MarginMode(str, Enum):
|
||||
NONE = ""
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name.lower()}"
|
||||
return f"{self.value.lower()}"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# flake8: noqa: F401
|
||||
# isort: off
|
||||
from freqtrade.exchange.common import remove_exchange_credentials, MAP_EXCHANGE_CHILDCLASS
|
||||
from freqtrade.exchange.common import MAP_EXCHANGE_CHILDCLASS
|
||||
from freqtrade.exchange.exchange import Exchange
|
||||
|
||||
# isort: on
|
||||
@@ -43,4 +43,5 @@ from freqtrade.exchange.idex import Idex
|
||||
from freqtrade.exchange.kraken import Kraken
|
||||
from freqtrade.exchange.kucoin import Kucoin
|
||||
from freqtrade.exchange.lbank import Lbank
|
||||
from freqtrade.exchange.luno import Luno
|
||||
from freqtrade.exchange.okx import Okx
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Binance exchange subclass"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
import ccxt
|
||||
@@ -160,7 +160,7 @@ class Binance(Exchange):
|
||||
since_ms = x[3][0][0]
|
||||
logger.info(
|
||||
f"Candle-data for {pair} available starting with "
|
||||
f"{datetime.fromtimestamp(since_ms // 1000, tz=timezone.utc).isoformat()}."
|
||||
f"{datetime.fromtimestamp(since_ms // 1000, tz=UTC).isoformat()}."
|
||||
)
|
||||
if until_ms and since_ms >= until_ms:
|
||||
logger.warning(
|
||||
@@ -399,7 +399,7 @@ class Binance(Exchange):
|
||||
trades = await self._api_async.fetch_trades(
|
||||
pair,
|
||||
params={
|
||||
self._trades_pagination_arg: "0",
|
||||
self._ft_has["trades_pagination_arg"]: "0",
|
||||
},
|
||||
limit=5,
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
"""Bitpanda exchange subclass"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from freqtrade.exchange import Exchange
|
||||
|
||||
@@ -34,5 +34,5 @@ class Bitpanda(Exchange):
|
||||
:param pair: Pair the order is for
|
||||
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
||||
"""
|
||||
params = {"to": int(datetime.now(timezone.utc).timestamp() * 1000)}
|
||||
params = {"to": int(datetime.now(UTC).timestamp() * 1000)}
|
||||
return super().get_trades_for_order(order_id, pair, since, params)
|
||||
|
||||
@@ -5,7 +5,6 @@ from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import Any, TypeVar, cast, overload
|
||||
|
||||
from freqtrade.constants import ExchangeConfig
|
||||
from freqtrade.exceptions import DDosProtection, RetryableOrderError, TemporaryError
|
||||
from freqtrade.mixins import LoggingMixin
|
||||
|
||||
@@ -104,20 +103,6 @@ EXCHANGE_HAS_OPTIONAL = [
|
||||
]
|
||||
|
||||
|
||||
def remove_exchange_credentials(exchange_config: ExchangeConfig, dry_run: bool) -> None:
|
||||
"""
|
||||
Removes exchange keys from the configuration and specifies dry-run
|
||||
Used for backtesting / hyperopt and utils.
|
||||
Modifies the input dict!
|
||||
"""
|
||||
if dry_run:
|
||||
exchange_config["key"] = ""
|
||||
exchange_config["apiKey"] = ""
|
||||
exchange_config["secret"] = ""
|
||||
exchange_config["password"] = ""
|
||||
exchange_config["uid"] = ""
|
||||
|
||||
|
||||
def calculate_backoff(retrycount, max_retries):
|
||||
"""
|
||||
Calculate backoff
|
||||
|
||||
@@ -9,7 +9,7 @@ import logging
|
||||
import signal
|
||||
from collections.abc import Coroutine, Generator
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from math import floor, isnan
|
||||
from threading import Lock
|
||||
from typing import Any, Literal, TypeGuard, TypeVar
|
||||
@@ -21,6 +21,7 @@ from ccxt import TICK_SIZE
|
||||
from dateutil import parser
|
||||
from pandas import DataFrame, concat
|
||||
|
||||
from freqtrade.configuration import remove_exchange_credentials
|
||||
from freqtrade.constants import (
|
||||
DEFAULT_AMOUNT_RESERVE_PERCENT,
|
||||
DEFAULT_TRADES_COLUMNS,
|
||||
@@ -64,7 +65,6 @@ from freqtrade.exceptions import (
|
||||
)
|
||||
from freqtrade.exchange.common import (
|
||||
API_FETCH_ORDER_RETRY_COUNT,
|
||||
remove_exchange_credentials,
|
||||
retrier,
|
||||
retrier_async,
|
||||
)
|
||||
@@ -137,6 +137,7 @@ class Exchange:
|
||||
"ohlcv_has_history": True, # Some exchanges (Kraken) don't provide history via ohlcv
|
||||
"ohlcv_partial_candle": True,
|
||||
"ohlcv_require_since": False,
|
||||
"always_require_api_keys": False, # purge API keys for Dry-run. Must default to false.
|
||||
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
|
||||
"ohlcv_volume_currency": "base", # "base" or "quote"
|
||||
"tickers_have_quoteVolume": True,
|
||||
@@ -199,6 +200,19 @@ class Exchange:
|
||||
|
||||
self._config.update(config)
|
||||
|
||||
# Leverage properties
|
||||
self.trading_mode: TradingMode = config.get("trading_mode", TradingMode.SPOT)
|
||||
self.margin_mode: MarginMode = (
|
||||
MarginMode(config.get("margin_mode")) if config.get("margin_mode") else MarginMode.NONE
|
||||
)
|
||||
self.liquidation_buffer = config.get("liquidation_buffer", 0.05)
|
||||
|
||||
exchange_conf: ExchangeConfig = exchange_config if exchange_config else config["exchange"]
|
||||
|
||||
# Deep merge ft_has with default ft_has options
|
||||
# Must be called before ft_has is used.
|
||||
self.build_ft_has(exchange_conf)
|
||||
|
||||
# Holds last candle refreshed time of each pair
|
||||
self._pairs_last_refresh_time: dict[PairWithTimeframe, int] = {}
|
||||
# Timestamp of last markets refresh
|
||||
@@ -227,33 +241,17 @@ class Exchange:
|
||||
if config["dry_run"]:
|
||||
logger.info("Instance is running with dry_run enabled")
|
||||
logger.info(f"Using CCXT {ccxt.__version__}")
|
||||
exchange_conf: dict[str, Any] = exchange_config if exchange_config else config["exchange"]
|
||||
remove_exchange_credentials(exchange_conf, config.get("dry_run", False))
|
||||
self.log_responses = exchange_conf.get("log_responses", False)
|
||||
|
||||
# Leverage properties
|
||||
self.trading_mode: TradingMode = config.get("trading_mode", TradingMode.SPOT)
|
||||
self.margin_mode: MarginMode = (
|
||||
MarginMode(config.get("margin_mode")) if config.get("margin_mode") else MarginMode.NONE
|
||||
# Don't remove exchange credentials for dry-run or if always_require_api_keys is set
|
||||
remove_exchange_credentials(
|
||||
exchange_conf,
|
||||
not self._ft_has["always_require_api_keys"] and config.get("dry_run", False),
|
||||
)
|
||||
self.liquidation_buffer = config.get("liquidation_buffer", 0.05)
|
||||
|
||||
# Deep merge ft_has with default ft_has options
|
||||
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
self._ft_has = deep_merge_dicts(self._ft_has_futures, self._ft_has)
|
||||
if exchange_conf.get("_ft_has_params"):
|
||||
self._ft_has = deep_merge_dicts(exchange_conf.get("_ft_has_params"), self._ft_has)
|
||||
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
||||
self.log_responses = exchange_conf.get("log_responses", False)
|
||||
|
||||
# Assign this directly for easy access
|
||||
self._ohlcv_partial_candle = self._ft_has["ohlcv_partial_candle"]
|
||||
|
||||
self._max_trades_limit = self._ft_has["trades_limit"]
|
||||
|
||||
self._trades_pagination = self._ft_has["trades_pagination"]
|
||||
self._trades_pagination_arg = self._ft_has["trades_pagination_arg"]
|
||||
|
||||
# Initialize ccxt objects
|
||||
ccxt_config = self._ccxt_config
|
||||
ccxt_config = deep_merge_dicts(exchange_conf.get("ccxt_config", {}), ccxt_config)
|
||||
@@ -657,7 +655,7 @@ class Exchange:
|
||||
if isinstance(markets, Exception):
|
||||
raise markets
|
||||
return None
|
||||
except asyncio.TimeoutError as e:
|
||||
except TimeoutError as e:
|
||||
logger.warning("Could not load markets. Reason: %s", e)
|
||||
raise TemporaryError from e
|
||||
|
||||
@@ -877,10 +875,24 @@ class Exchange:
|
||||
(trading_mode, margin_mode) not in self._supported_trading_mode_margin_pairs
|
||||
):
|
||||
mm_value = margin_mode and margin_mode.value
|
||||
raise OperationalException(
|
||||
f"Freqtrade does not support {mm_value} {trading_mode} on {self.name}"
|
||||
raise ConfigurationError(
|
||||
f"Freqtrade does not support '{mm_value}' '{trading_mode}' on {self.name}."
|
||||
)
|
||||
|
||||
def build_ft_has(self, exchange_conf: ExchangeConfig) -> None:
|
||||
"""
|
||||
Deep merge ft_has with default ft_has options
|
||||
and with exchange_conf._ft_has_params if available.
|
||||
This is called on initialization of the exchange object.
|
||||
It must be called before ft_has is used.
|
||||
"""
|
||||
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
self._ft_has = deep_merge_dicts(self._ft_has_futures, self._ft_has)
|
||||
if exchange_conf.get("_ft_has_params"):
|
||||
self._ft_has = deep_merge_dicts(exchange_conf.get("_ft_has_params"), self._ft_has)
|
||||
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
||||
|
||||
def get_option(self, param: str, default: Any | None = None) -> Any:
|
||||
"""
|
||||
Get parameter value from _ft_has
|
||||
@@ -2208,7 +2220,7 @@ class Exchange:
|
||||
_params = params if params else {}
|
||||
my_trades = self._api.fetch_my_trades(
|
||||
pair,
|
||||
int((since.replace(tzinfo=timezone.utc).timestamp() - 5) * 1000),
|
||||
int((since.replace(tzinfo=UTC).timestamp() - 5) * 1000),
|
||||
params=_params,
|
||||
)
|
||||
matched_trades = [trade for trade in my_trades if trade["order"] == order_id]
|
||||
@@ -2995,7 +3007,7 @@ class Exchange:
|
||||
returns: List of dicts containing trades, the next iteration value (new "since" or trade_id)
|
||||
"""
|
||||
try:
|
||||
trades_limit = self._max_trades_limit
|
||||
trades_limit = self._ft_has["trades_limit"]
|
||||
# fetch trades asynchronously
|
||||
if params:
|
||||
logger.debug("Fetching trades for pair %s, params: %s ", pair, params)
|
||||
@@ -3039,7 +3051,7 @@ class Exchange:
|
||||
"""
|
||||
if not trades:
|
||||
return None
|
||||
if self._trades_pagination == "id":
|
||||
if self._ft_has["trades_pagination"] == "id":
|
||||
return trades[-1].get("id")
|
||||
else:
|
||||
return trades[-1].get("timestamp")
|
||||
@@ -3057,7 +3069,7 @@ class Exchange:
|
||||
) -> tuple[str, list[list]]:
|
||||
"""
|
||||
Asynchronously gets trade history using fetch_trades
|
||||
use this when exchange uses id-based iteration (check `self._trades_pagination`)
|
||||
use this when exchange uses id-based iteration (check `self._ft_has["trades_pagination"]`)
|
||||
:param pair: Pair to fetch trade data for
|
||||
:param since: Since as integer timestamp in milliseconds
|
||||
:param until: Until as integer timestamp in milliseconds
|
||||
@@ -3083,7 +3095,7 @@ class Exchange:
|
||||
while True:
|
||||
try:
|
||||
t, from_id_next = await self._async_fetch_trades(
|
||||
pair, params={self._trades_pagination_arg: from_id}
|
||||
pair, params={self._ft_has["trades_pagination_arg"]: from_id}
|
||||
)
|
||||
if t:
|
||||
trades.extend(t[x])
|
||||
@@ -3111,7 +3123,7 @@ class Exchange:
|
||||
) -> tuple[str, list[list]]:
|
||||
"""
|
||||
Asynchronously gets trade history using fetch_trades,
|
||||
when the exchange uses time-based iteration (check `self._trades_pagination`)
|
||||
when the exchange uses time-based iteration (check `self._ft_has["trades_pagination"]`)
|
||||
:param pair: Pair to fetch trade data for
|
||||
:param since: Since as integer timestamp in milliseconds
|
||||
:param until: Until as integer timestamp in milliseconds
|
||||
@@ -3165,9 +3177,9 @@ class Exchange:
|
||||
until = ccxt.Exchange.milliseconds()
|
||||
logger.debug(f"Exchange milliseconds: {until}")
|
||||
|
||||
if self._trades_pagination == "time":
|
||||
if self._ft_has["trades_pagination"] == "time":
|
||||
return await self._async_get_trade_history_time(pair=pair, since=since, until=until)
|
||||
elif self._trades_pagination == "id":
|
||||
elif self._ft_has["trades_pagination"] == "id":
|
||||
return await self._async_get_trade_history_id(
|
||||
pair=pair, since=since, until=until, from_id=from_id
|
||||
)
|
||||
@@ -3335,7 +3347,7 @@ class Exchange:
|
||||
if not filename.parent.is_dir():
|
||||
filename.parent.mkdir(parents=True)
|
||||
data = {
|
||||
"updated": datetime.now(timezone.utc),
|
||||
"updated": datetime.now(UTC),
|
||||
"data": tiers,
|
||||
}
|
||||
file_dump_json(filename, data)
|
||||
@@ -3357,7 +3369,7 @@ class Exchange:
|
||||
updated = tiers.get("updated")
|
||||
if updated:
|
||||
updated_dt = parser.parse(updated)
|
||||
if updated_dt < datetime.now(timezone.utc) - cache_time:
|
||||
if updated_dt < datetime.now(UTC) - cache_time:
|
||||
logger.info("Cached leverage tiers are outdated. Will update.")
|
||||
return None
|
||||
return tiers.get("data")
|
||||
@@ -3416,20 +3428,30 @@ class Exchange:
|
||||
# Find the appropriate tier based on stake_amount
|
||||
prior_max_lev = None
|
||||
for tier in pair_tiers:
|
||||
# Adjust notional by leverage to do a proper comparison
|
||||
min_stake = tier["minNotional"] / (prior_max_lev or tier["maxLeverage"])
|
||||
max_stake = tier["maxNotional"] / tier["maxLeverage"]
|
||||
prior_max_lev = tier["maxLeverage"]
|
||||
# Adjust notional by leverage to do a proper comparison
|
||||
if min_stake <= stake_amount <= max_stake:
|
||||
return tier["maxLeverage"]
|
||||
if stake_amount < min_stake and stake_amount <= max_stake:
|
||||
# TODO: Remove this warning eventually
|
||||
# Code could be simplified by removing the check for min-stake in the above
|
||||
# condition, making this branch unnecessary.
|
||||
logger.warning(
|
||||
f"Fallback to next higher leverage tier for {pair}, stake: {stake_amount}, "
|
||||
f"min_stake: {min_stake}."
|
||||
)
|
||||
return tier["maxLeverage"]
|
||||
|
||||
# else: # if on the last tier
|
||||
if stake_amount > max_stake:
|
||||
# If stake is > than max tradeable amount
|
||||
raise InvalidOrderException(f"Amount {stake_amount} too high for {pair}")
|
||||
raise InvalidOrderException(f"Stake amount {stake_amount} too high for {pair}")
|
||||
|
||||
raise OperationalException(
|
||||
"Looped through all tiers without finding a max leverage. Should never be reached"
|
||||
f"Looped through all tiers without finding a max leverage for {pair}. "
|
||||
"Should never be reached."
|
||||
)
|
||||
|
||||
elif self.trading_mode == TradingMode.MARGIN: # Search markets.limits for max lev
|
||||
@@ -3571,7 +3593,7 @@ class Exchange:
|
||||
mark_price_type = CandleType.from_string(self._ft_has["mark_ohlcv_price"])
|
||||
|
||||
if not close_date:
|
||||
close_date = datetime.now(timezone.utc)
|
||||
close_date = datetime.now(UTC)
|
||||
since_ms = dt_ts(timeframe_to_prev_date(timeframe, open_date))
|
||||
|
||||
mark_comb: PairWithTimeframe = (pair, timeframe, mark_price_type)
|
||||
|
||||
@@ -24,6 +24,7 @@ class FtHas(TypedDict, total=False):
|
||||
ohlcv_require_since: bool
|
||||
ohlcv_volume_currency: str
|
||||
ohlcv_candle_limit_per_timeframe: dict[str, int]
|
||||
always_require_api_keys: bool
|
||||
# Tickers
|
||||
tickers_have_quoteVolume: bool
|
||||
tickers_have_percentage: bool
|
||||
|
||||
@@ -3,7 +3,7 @@ Exchange support utils
|
||||
"""
|
||||
|
||||
import inspect
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from math import ceil, floor, isnan
|
||||
from typing import Any
|
||||
|
||||
@@ -148,7 +148,7 @@ def date_minus_candles(timeframe: str, candle_count: int, date: datetime | None
|
||||
|
||||
"""
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
date = datetime.now(UTC)
|
||||
|
||||
tf_min = timeframe_to_minutes(timeframe)
|
||||
new_date = timeframe_to_prev_date(timeframe, date) - timedelta(minutes=tf_min * candle_count)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import ccxt
|
||||
from ccxt import ROUND_DOWN, ROUND_UP
|
||||
@@ -59,7 +59,7 @@ def timeframe_to_prev_date(timeframe: str, date: datetime | None = None) -> date
|
||||
:returns: date of previous candle (with utc timezone)
|
||||
"""
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
date = datetime.now(UTC)
|
||||
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_DOWN) // 1000
|
||||
return dt_from_ts(new_timestamp)
|
||||
@@ -73,6 +73,6 @@ def timeframe_to_next_date(timeframe: str, date: datetime | None = None) -> date
|
||||
:returns: date of next candle (with utc timezone)
|
||||
"""
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
date = datetime.now(UTC)
|
||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_UP) // 1000
|
||||
return dt_from_ts(new_timestamp)
|
||||
|
||||
24
freqtrade/exchange/luno.py
Normal file
24
freqtrade/exchange/luno.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import logging
|
||||
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.exchange_types import FtHas
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Luno(Exchange):
|
||||
"""
|
||||
Luno exchange class. Contains adjustments needed for Freqtrade to work
|
||||
with this exchange.
|
||||
|
||||
Please note that this exchange is not included in the list of exchanges
|
||||
officially supported by the Freqtrade development team. So some features
|
||||
may still not work as expected.
|
||||
"""
|
||||
|
||||
_ft_has: FtHas = {
|
||||
"ohlcv_has_history": False, # Only provides the last 1000 candles
|
||||
"always_require_api_keys": True, # Requires API keys to fetch candles
|
||||
"trades_has_history": False, # Only the last 24h are available
|
||||
}
|
||||
@@ -3,7 +3,7 @@ import importlib
|
||||
import logging
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -239,7 +239,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
pair, refresh=False, side="exit", is_short=trade.is_short
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc).timestamp()
|
||||
now = datetime.now(UTC).timestamp()
|
||||
trade_duration = int((now - trade.open_date_utc.timestamp()) / self.base_tf_seconds)
|
||||
current_profit = trade.calc_profit_ratio(current_rate)
|
||||
if trade.is_short:
|
||||
|
||||
@@ -5,7 +5,7 @@ import re
|
||||
import shutil
|
||||
import threading
|
||||
import warnings
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, TypedDict
|
||||
|
||||
@@ -116,7 +116,7 @@ class FreqaiDataDrawer:
|
||||
if metric not in self.metric_tracker[pair]:
|
||||
self.metric_tracker[pair][metric] = {"timestamp": [], "value": []}
|
||||
|
||||
timestamp = int(datetime.now(timezone.utc).timestamp())
|
||||
timestamp = int(datetime.now(UTC).timestamp())
|
||||
self.metric_tracker[pair][metric]["value"].append(value)
|
||||
self.metric_tracker[pair][metric]["timestamp"].append(timestamp)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import inspect
|
||||
import logging
|
||||
import random
|
||||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -341,7 +341,7 @@ class FreqaiDataKitchen:
|
||||
full_timerange = TimeRange.parse_timerange(tr)
|
||||
config_timerange = TimeRange.parse_timerange(self.config["timerange"])
|
||||
if config_timerange.stopts == 0:
|
||||
config_timerange.stopts = int(datetime.now(tz=timezone.utc).timestamp())
|
||||
config_timerange.stopts = int(datetime.now(tz=UTC).timestamp())
|
||||
timerange_train = copy.deepcopy(full_timerange)
|
||||
timerange_backtest = copy.deepcopy(full_timerange)
|
||||
|
||||
@@ -525,7 +525,7 @@ class FreqaiDataKitchen:
|
||||
:return:
|
||||
bool = If the model is expired or not.
|
||||
"""
|
||||
time = datetime.now(tz=timezone.utc).timestamp()
|
||||
time = datetime.now(tz=UTC).timestamp()
|
||||
elapsed_time = (time - trained_timestamp) / 3600 # hours
|
||||
max_time = self.freqai_config.get("expiration_hours", 0)
|
||||
if max_time > 0:
|
||||
@@ -536,7 +536,7 @@ class FreqaiDataKitchen:
|
||||
def check_if_new_training_required(
|
||||
self, trained_timestamp: int
|
||||
) -> tuple[bool, TimeRange, TimeRange]:
|
||||
time = datetime.now(tz=timezone.utc).timestamp()
|
||||
time = datetime.now(tz=UTC).timestamp()
|
||||
trained_timerange = TimeRange()
|
||||
data_load_timerange = TimeRange()
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import threading
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal
|
||||
|
||||
@@ -76,7 +76,7 @@ class IFreqaiModel(ABC):
|
||||
|
||||
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config)
|
||||
# set current candle to arbitrary historical date
|
||||
self.current_candle: datetime = datetime.fromtimestamp(637887600, tz=timezone.utc)
|
||||
self.current_candle: datetime = datetime.fromtimestamp(637887600, tz=UTC)
|
||||
self.dd.current_candle = self.current_candle
|
||||
self.scanning = False
|
||||
self.ft_params = self.freqai_info["feature_parameters"]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -64,7 +64,7 @@ def get_required_data_timerange(config: Config) -> TimeRange:
|
||||
Used to compute the required data download time range
|
||||
for auto data-download in FreqAI
|
||||
"""
|
||||
time = datetime.now(tz=timezone.utc).timestamp()
|
||||
time = datetime.now(tz=UTC).timestamp()
|
||||
|
||||
timeframes = config["freqai"]["feature_parameters"].get("include_timeframes")
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ Freqtrade is the main module of this bot. It contains the class Freqtrade()
|
||||
import logging
|
||||
import traceback
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, time, timedelta, timezone
|
||||
from datetime import UTC, datetime, time, timedelta
|
||||
from math import isclose
|
||||
from threading import Lock
|
||||
from time import sleep
|
||||
@@ -14,7 +14,7 @@ from typing import Any
|
||||
from schedule import Scheduler
|
||||
|
||||
from freqtrade import constants
|
||||
from freqtrade.configuration import validate_config_consistency
|
||||
from freqtrade.configuration import remove_exchange_credentials, validate_config_consistency
|
||||
from freqtrade.constants import BuySell, Config, EntryExecuteMode, ExchangeConfig, LongShort
|
||||
from freqtrade.data.converter import order_book_to_dataframe
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
@@ -37,7 +37,6 @@ from freqtrade.exceptions import (
|
||||
from freqtrade.exchange import (
|
||||
ROUND_DOWN,
|
||||
ROUND_UP,
|
||||
remove_exchange_credentials,
|
||||
timeframe_to_minutes,
|
||||
timeframe_to_next_date,
|
||||
timeframe_to_seconds,
|
||||
@@ -267,7 +266,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
)
|
||||
|
||||
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)(
|
||||
current_time=datetime.now(timezone.utc)
|
||||
current_time=datetime.now(UTC)
|
||||
)
|
||||
|
||||
with self._measure_execution:
|
||||
@@ -297,7 +296,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
self._schedule.run_pending()
|
||||
Trade.commit()
|
||||
self.rpc.process_msg_queue(self.dataprovider._msg_queue)
|
||||
self.last_process = datetime.now(timezone.utc)
|
||||
self.last_process = datetime.now(UTC)
|
||||
|
||||
def process_stopped(self) -> None:
|
||||
"""
|
||||
@@ -422,7 +421,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
except InvalidOrderException as e:
|
||||
logger.warning(f"Error updating Order {order.order_id} due to {e}.")
|
||||
if order.order_date_utc - timedelta(days=5) < datetime.now(timezone.utc):
|
||||
if order.order_date_utc - timedelta(days=5) < datetime.now(UTC):
|
||||
logger.warning(
|
||||
"Order is older than 5 days. Assuming order was fully cancelled."
|
||||
)
|
||||
@@ -756,7 +755,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
logger.debug(f"Calling adjust_trade_position for pair {trade.pair}")
|
||||
stake_amount, order_tag = self.strategy._adjust_trade_position_internal(
|
||||
trade=trade,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
current_time=datetime.now(UTC),
|
||||
current_rate=current_entry_rate,
|
||||
current_profit=current_entry_profit,
|
||||
min_stake=min_entry_stake,
|
||||
@@ -917,7 +916,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
amount=amount,
|
||||
rate=enter_limit_requested,
|
||||
time_in_force=time_in_force,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
current_time=datetime.now(UTC),
|
||||
entry_tag=enter_tag,
|
||||
side=trade_side,
|
||||
):
|
||||
@@ -988,7 +987,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# Fee is applied twice because we make a LIMIT_BUY and LIMIT_SELL
|
||||
fee = self.exchange.get_fee(symbol=pair, taker_or_maker="maker")
|
||||
base_currency = self.exchange.get_pair_base_currency(pair)
|
||||
open_date = datetime.now(timezone.utc)
|
||||
open_date = datetime.now(UTC)
|
||||
|
||||
funding_fees = self.exchange.get_funding_fees(
|
||||
pair=pair,
|
||||
@@ -1107,7 +1106,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
)(
|
||||
pair=pair,
|
||||
trade=trade,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
current_time=datetime.now(UTC),
|
||||
proposed_rate=enter_limit_requested,
|
||||
entry_tag=entry_tag,
|
||||
side=trade_side,
|
||||
@@ -1125,7 +1124,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
else:
|
||||
leverage = strategy_safe_wrapper(self.strategy.leverage, default_retval=1.0)(
|
||||
pair=pair,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
current_time=datetime.now(UTC),
|
||||
current_rate=enter_limit_requested,
|
||||
proposed_leverage=1.0,
|
||||
max_leverage=max_leverage,
|
||||
@@ -1158,7 +1157,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.strategy.custom_stake_amount, default_retval=stake_amount
|
||||
)(
|
||||
pair=pair,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
current_time=datetime.now(UTC),
|
||||
current_rate=enter_limit_requested,
|
||||
proposed_stake=stake_amount,
|
||||
min_stake=min_stake_amount,
|
||||
@@ -1223,7 +1222,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
"quote_currency": self.exchange.get_pair_quote_currency(trade.pair),
|
||||
"fiat_currency": self.config.get("fiat_display_currency", None),
|
||||
"amount": order.safe_amount_after_fee if fill else (order.safe_amount or trade.amount),
|
||||
"open_date": trade.open_date_utc or datetime.now(timezone.utc),
|
||||
"open_date": trade.open_date_utc or datetime.now(UTC),
|
||||
"current_rate": current_rate,
|
||||
"sub_trade": sub_trade,
|
||||
}
|
||||
@@ -1362,7 +1361,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
exits: list[ExitCheckTuple] = self.strategy.should_exit(
|
||||
trade,
|
||||
exit_rate,
|
||||
datetime.now(timezone.utc),
|
||||
datetime.now(UTC),
|
||||
enter=enter,
|
||||
exit_=exit_,
|
||||
force_stoploss=0,
|
||||
@@ -1497,7 +1496,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
if self.exchange.stoploss_adjust(stoploss_norm, order, side=trade.exit_side):
|
||||
# we check if the update is necessary
|
||||
update_beat = self.strategy.order_types.get("stoploss_on_exchange_interval", 60)
|
||||
upd_req = datetime.now(timezone.utc) - timedelta(seconds=update_beat)
|
||||
upd_req = datetime.now(UTC) - timedelta(seconds=update_beat)
|
||||
if trade.stoploss_last_update_utc and upd_req >= trade.stoploss_last_update_utc:
|
||||
# cancelling the current stoploss on exchange first
|
||||
logger.info(
|
||||
@@ -1584,9 +1583,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
if not_closed:
|
||||
if fully_cancelled or (
|
||||
open_order
|
||||
and self.strategy.ft_check_timed_out(
|
||||
trade, open_order, datetime.now(timezone.utc)
|
||||
)
|
||||
and self.strategy.ft_check_timed_out(trade, open_order, datetime.now(UTC))
|
||||
):
|
||||
self.handle_cancel_order(
|
||||
order, open_order, trade, constants.CANCEL_REASON["TIMEOUT"]
|
||||
@@ -1684,7 +1681,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
trade=trade,
|
||||
order=order_obj,
|
||||
pair=trade.pair,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
current_time=datetime.now(UTC),
|
||||
proposed_rate=proposed_rate,
|
||||
current_order_rate=order_obj.safe_placement_price,
|
||||
entry_tag=trade.enter_tag,
|
||||
@@ -2076,7 +2073,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
)(
|
||||
pair=trade.pair,
|
||||
trade=trade,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
current_time=datetime.now(UTC),
|
||||
proposed_rate=proposed_limit_rate,
|
||||
current_profit=current_profit,
|
||||
exit_tag=exit_reason,
|
||||
@@ -2107,7 +2104,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
time_in_force=time_in_force,
|
||||
exit_reason=exit_reason,
|
||||
sell_reason=exit_reason, # sellreason -> compatibility
|
||||
current_time=datetime.now(timezone.utc),
|
||||
current_time=datetime.now(UTC),
|
||||
)
|
||||
):
|
||||
logger.info(f"User denied exit for {trade.pair}.")
|
||||
@@ -2203,7 +2200,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
"enter_tag": trade.enter_tag,
|
||||
"exit_reason": trade.exit_reason,
|
||||
"open_date": trade.open_date_utc,
|
||||
"close_date": trade.close_date_utc or datetime.now(timezone.utc),
|
||||
"close_date": trade.close_date_utc or datetime.now(UTC),
|
||||
"stake_amount": trade.stake_amount,
|
||||
"stake_currency": self.config["stake_currency"],
|
||||
"base_currency": self.exchange.get_pair_base_currency(trade.pair),
|
||||
@@ -2258,7 +2255,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
"enter_tag": trade.enter_tag,
|
||||
"exit_reason": trade.exit_reason,
|
||||
"open_date": trade.open_date,
|
||||
"close_date": trade.close_date or datetime.now(timezone.utc),
|
||||
"close_date": trade.close_date or datetime.now(UTC),
|
||||
"stake_currency": self.config["stake_currency"],
|
||||
"base_currency": self.exchange.get_pair_base_currency(trade.pair),
|
||||
"quote_currency": self.exchange.get_pair_quote_currency(trade.pair),
|
||||
@@ -2339,7 +2336,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
def _update_trade_after_fill(self, trade: Trade, order: Order, send_msg: bool) -> Trade:
|
||||
if order.status in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
strategy_safe_wrapper(self.strategy.order_filled, default_retval=None)(
|
||||
pair=trade.pair, trade=trade, order=order, current_time=datetime.now(timezone.utc)
|
||||
pair=trade.pair, trade=trade, order=order, current_time=datetime.now(UTC)
|
||||
)
|
||||
# If a entry order was closed, force update on stoploss on exchange
|
||||
if order.ft_order_side == trade.entry_side:
|
||||
@@ -2372,7 +2369,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
)
|
||||
profit = trade.calc_profit_ratio(current_rate)
|
||||
self.strategy.ft_stoploss_adjust(
|
||||
current_rate, trade, datetime.now(timezone.utc), profit, 0, after_fill=True
|
||||
current_rate, trade, datetime.now(UTC), profit, 0, after_fill=True
|
||||
)
|
||||
# Updating wallets when order is closed
|
||||
self.wallets.update()
|
||||
@@ -2398,7 +2395,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
def handle_protections(self, pair: str, side: LongShort) -> None:
|
||||
# Lock pair for one candle to prevent immediate re-entries
|
||||
self.strategy.lock_pair(pair, datetime.now(timezone.utc), reason="Auto lock", side=side)
|
||||
self.strategy.lock_pair(pair, datetime.now(UTC), reason="Auto lock", side=side)
|
||||
prot_trig = self.protections.stop_per_pair(pair, side=side)
|
||||
if prot_trig:
|
||||
msg: RPCProtectionMsg = {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
from typing import Literal, Required
|
||||
|
||||
from pydantic import TypeAdapter
|
||||
from typing_extensions import Required, TypedDict
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
|
||||
class AnnotationType(TypedDict, total=False):
|
||||
|
||||
@@ -58,7 +58,7 @@ def setup_logging_pre() -> None:
|
||||
FT_LOGGING_CONFIG = {
|
||||
"version": 1,
|
||||
# "incremental": True,
|
||||
# "disable_existing_loggers": False,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"basic": {"format": "%(message)s"},
|
||||
"standard": {
|
||||
@@ -223,7 +223,7 @@ def setup_logging(config: Config) -> None:
|
||||
logger.info("Enabling colorized output.")
|
||||
error_console._color_system = error_console._detect_color_system()
|
||||
|
||||
logging.info("Logfile configured")
|
||||
logger.info("Logfile configured")
|
||||
|
||||
# Set verbosity levels
|
||||
logging.root.setLevel(logging.INFO if verbosity < 1 else logging.DEBUG)
|
||||
|
||||
@@ -10,8 +10,8 @@ from typing import Any
|
||||
|
||||
|
||||
# check min. python version
|
||||
if sys.version_info < (3, 10): # pragma: no cover # noqa: UP036
|
||||
sys.exit("Freqtrade requires Python version >= 3.10")
|
||||
if sys.version_info < (3, 11): # pragma: no cover # noqa: UP036
|
||||
sys.exit("Freqtrade requires Python version >= 3.11")
|
||||
|
||||
from freqtrade import __version__
|
||||
from freqtrade.commands import Arguments
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from pandas import DataFrame
|
||||
@@ -38,7 +38,7 @@ class BaseAnalysis:
|
||||
|
||||
@staticmethod
|
||||
def dt_to_timestamp(dt: datetime):
|
||||
timestamp = int(dt.replace(tzinfo=timezone.utc).timestamp())
|
||||
timestamp = int(dt.replace(tzinfo=UTC).timestamp())
|
||||
return timestamp
|
||||
|
||||
def fill_full_varholder(self):
|
||||
@@ -48,12 +48,12 @@ class BaseAnalysis:
|
||||
parsed_timerange = TimeRange.parse_timerange(self.local_config["timerange"])
|
||||
|
||||
if parsed_timerange.startdt is None:
|
||||
self.full_varHolder.from_dt = datetime.fromtimestamp(0, tz=timezone.utc)
|
||||
self.full_varHolder.from_dt = datetime.fromtimestamp(0, tz=UTC)
|
||||
else:
|
||||
self.full_varHolder.from_dt = parsed_timerange.startdt
|
||||
|
||||
if parsed_timerange.stopdt is None:
|
||||
self.full_varHolder.to_dt = datetime.now(timezone.utc)
|
||||
self.full_varHolder.to_dt = datetime.now(UTC)
|
||||
else:
|
||||
self.full_varHolder.to_dt = parsed_timerange.stopdt
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ and will be sent to the hyperopt worker processes.
|
||||
import logging
|
||||
import sys
|
||||
import warnings
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -273,7 +273,7 @@ class HyperOptimizer:
|
||||
Keep this function as optimized as possible!
|
||||
"""
|
||||
HyperoptStateContainer.set_state(HyperoptState.OPTIMIZE)
|
||||
backtest_start_time = datetime.now(timezone.utc)
|
||||
backtest_start_time = datetime.now(UTC)
|
||||
|
||||
# Apply parameters
|
||||
if HyperoptTools.has_space(self.config, "buy"):
|
||||
@@ -330,7 +330,7 @@ class HyperOptimizer:
|
||||
bt_results = self.backtesting.backtest(
|
||||
processed=processed, start_date=self.min_date, end_date=self.max_date
|
||||
)
|
||||
backtest_end_time = datetime.now(timezone.utc)
|
||||
backtest_end_time = datetime.now(UTC)
|
||||
bt_results.update(
|
||||
{
|
||||
"backtest_start_time": int(backtest_start_time.timestamp()),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import logging
|
||||
from collections.abc import Iterator
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -71,7 +71,7 @@ class HyperoptTools:
|
||||
"strategy_name": strategy_name,
|
||||
"params": final_params,
|
||||
"ft_stratparam_v": 1,
|
||||
"export_time": datetime.now(timezone.utc),
|
||||
"export_time": datetime.now(UTC),
|
||||
}
|
||||
logger.info(f"Dumping parameters to {filename}")
|
||||
with filename.open("w") as f:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any, Literal
|
||||
|
||||
import numpy as np
|
||||
@@ -652,9 +652,9 @@ def generate_strategy_stats(
|
||||
"max_drawdown_abs": 0.0,
|
||||
"max_drawdown_low": 0.0,
|
||||
"max_drawdown_high": 0.0,
|
||||
"drawdown_start": datetime(1970, 1, 1, tzinfo=timezone.utc),
|
||||
"drawdown_start": datetime(1970, 1, 1, tzinfo=UTC),
|
||||
"drawdown_start_ts": 0,
|
||||
"drawdown_end": datetime(1970, 1, 1, tzinfo=timezone.utc),
|
||||
"drawdown_end": datetime(1970, 1, 1, tzinfo=UTC),
|
||||
"drawdown_end_ts": 0,
|
||||
"csum_min": 0,
|
||||
"csum_max": 0,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from enum import Enum
|
||||
from typing import ClassVar, Literal
|
||||
|
||||
@@ -114,7 +114,7 @@ class KeyValueStore:
|
||||
if kv.value_type == ValueTypesEnum.STRING:
|
||||
return kv.string_value
|
||||
if kv.value_type == ValueTypesEnum.DATETIME and kv.datetime_value is not None:
|
||||
return kv.datetime_value.replace(tzinfo=timezone.utc)
|
||||
return kv.datetime_value.replace(tzinfo=UTC)
|
||||
if kv.value_type == ValueTypesEnum.FLOAT:
|
||||
return kv.float_value
|
||||
if kv.value_type == ValueTypesEnum.INT:
|
||||
@@ -156,7 +156,7 @@ class KeyValueStore:
|
||||
)
|
||||
if kv is None or kv.datetime_value is None:
|
||||
return None
|
||||
return kv.datetime_value.replace(tzinfo=timezone.utc)
|
||||
return kv.datetime_value.replace(tzinfo=UTC)
|
||||
|
||||
@staticmethod
|
||||
def get_float_value(key: KeyStoreKeys) -> float | None:
|
||||
@@ -207,5 +207,5 @@ def set_startup_time() -> None:
|
||||
if t is not None:
|
||||
KeyValueStore.store_value("bot_start_time", t.open_date_utc)
|
||||
else:
|
||||
KeyValueStore.store_value("bot_start_time", datetime.now(timezone.utc))
|
||||
KeyValueStore.store_value("startup_time", datetime.now(timezone.utc))
|
||||
KeyValueStore.store_value("bot_start_time", datetime.now(UTC))
|
||||
KeyValueStore.store_value("startup_time", datetime.now(UTC))
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from sqlalchemy import ScalarResult, String, or_, select
|
||||
@@ -69,11 +69,9 @@ class PairLock(ModelBase):
|
||||
"id": self.id,
|
||||
"pair": self.pair,
|
||||
"lock_time": self.lock_time.strftime(DATETIME_PRINT_FORMAT),
|
||||
"lock_timestamp": int(self.lock_time.replace(tzinfo=timezone.utc).timestamp() * 1000),
|
||||
"lock_timestamp": int(self.lock_time.replace(tzinfo=UTC).timestamp() * 1000),
|
||||
"lock_end_time": self.lock_end_time.strftime(DATETIME_PRINT_FORMAT),
|
||||
"lock_end_timestamp": int(
|
||||
self.lock_end_time.replace(tzinfo=timezone.utc).timestamp() * 1000
|
||||
),
|
||||
"lock_end_timestamp": int(self.lock_end_time.replace(tzinfo=UTC).timestamp() * 1000),
|
||||
"reason": self.reason,
|
||||
"side": self.side,
|
||||
"active": self.active,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
@@ -52,7 +52,7 @@ class PairLocks:
|
||||
"""
|
||||
lock = PairLock(
|
||||
pair=pair,
|
||||
lock_time=now or datetime.now(timezone.utc),
|
||||
lock_time=now or datetime.now(UTC),
|
||||
lock_end_time=timeframe_to_next_date(PairLocks.timeframe, until),
|
||||
reason=reason,
|
||||
side=side,
|
||||
@@ -77,7 +77,7 @@ class PairLocks:
|
||||
:param side: Side get locks for, can be 'long', 'short', '*' or None
|
||||
"""
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
|
||||
if PairLocks.use_db:
|
||||
return PairLock.query_pair_locks(pair, now, side).all()
|
||||
@@ -114,7 +114,7 @@ class PairLocks:
|
||||
defaults to datetime.now(timezone.utc)
|
||||
"""
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
|
||||
logger.info(f"Releasing all locks for {pair}.")
|
||||
locks = PairLocks.get_pair_locks(pair, now, side=side)
|
||||
@@ -132,7 +132,7 @@ class PairLocks:
|
||||
defaults to datetime.now(timezone.utc)
|
||||
"""
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
|
||||
if PairLocks.use_db:
|
||||
# used in live modes
|
||||
@@ -161,7 +161,7 @@ class PairLocks:
|
||||
defaults to datetime.now(timezone.utc)
|
||||
"""
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
|
||||
return len(PairLocks.get_pair_locks("*", now, side)) > 0
|
||||
|
||||
@@ -173,7 +173,7 @@ class PairLocks:
|
||||
defaults to datetime.now(timezone.utc)
|
||||
"""
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
|
||||
return len(PairLocks.get_pair_locks(pair, now, side)) > 0 or PairLocks.is_global_lock(
|
||||
now, side
|
||||
|
||||
@@ -6,9 +6,9 @@ import logging
|
||||
from collections import defaultdict
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from math import isclose
|
||||
from typing import Any, ClassVar, Optional, cast
|
||||
from typing import Any, ClassVar, Optional, Self, cast
|
||||
|
||||
from sqlalchemy import (
|
||||
Enum,
|
||||
@@ -25,7 +25,6 @@ from sqlalchemy import (
|
||||
select,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, lazyload, mapped_column, relationship, validates
|
||||
from typing_extensions import Self
|
||||
|
||||
from freqtrade.constants import (
|
||||
CANCELED_EXCHANGE_STATES,
|
||||
@@ -121,14 +120,12 @@ class Order(ModelBase):
|
||||
@property
|
||||
def order_date_utc(self) -> datetime:
|
||||
"""Order-date with UTC timezoneinfo"""
|
||||
return self.order_date.replace(tzinfo=timezone.utc)
|
||||
return self.order_date.replace(tzinfo=UTC)
|
||||
|
||||
@property
|
||||
def order_filled_utc(self) -> datetime | None:
|
||||
"""last order-date with UTC timezoneinfo"""
|
||||
return (
|
||||
self.order_filled_date.replace(tzinfo=timezone.utc) if self.order_filled_date else None
|
||||
)
|
||||
return self.order_filled_date.replace(tzinfo=UTC) if self.order_filled_date else None
|
||||
|
||||
@property
|
||||
def safe_amount(self) -> float:
|
||||
@@ -229,7 +226,7 @@ class Order(ModelBase):
|
||||
self.order_filled_date = dt_from_ts(
|
||||
safe_value_fallback(order, "lastTradeTimestamp", default_value=dt_ts())
|
||||
)
|
||||
self.order_update_date = datetime.now(timezone.utc)
|
||||
self.order_update_date = datetime.now(UTC)
|
||||
|
||||
def to_ccxt_object(self, stopPriceName: str = "stopPrice") -> dict[str, Any]:
|
||||
order: dict[str, Any] = {
|
||||
@@ -286,7 +283,7 @@ class Order(ModelBase):
|
||||
self.order_date.strftime(DATETIME_PRINT_FORMAT) if self.order_date else None
|
||||
),
|
||||
"order_timestamp": (
|
||||
int(self.order_date.replace(tzinfo=timezone.utc).timestamp() * 1000)
|
||||
int(self.order_date.replace(tzinfo=UTC).timestamp() * 1000)
|
||||
if self.order_date
|
||||
else None
|
||||
),
|
||||
@@ -533,7 +530,7 @@ class LocalTrade:
|
||||
|
||||
@property
|
||||
def open_date_utc(self):
|
||||
return self.open_date.replace(tzinfo=timezone.utc)
|
||||
return self.open_date.replace(tzinfo=UTC)
|
||||
|
||||
@property
|
||||
def stoploss_last_update_utc(self):
|
||||
@@ -543,7 +540,7 @@ class LocalTrade:
|
||||
|
||||
@property
|
||||
def close_date_utc(self):
|
||||
return self.close_date.replace(tzinfo=timezone.utc) if self.close_date else None
|
||||
return self.close_date.replace(tzinfo=UTC) if self.close_date else None
|
||||
|
||||
@property
|
||||
def entry_side(self) -> str:
|
||||
@@ -1056,7 +1053,7 @@ class LocalTrade:
|
||||
return zero
|
||||
|
||||
open_date = self.open_date.replace(tzinfo=None)
|
||||
now = (self.close_date or datetime.now(timezone.utc)).replace(tzinfo=None)
|
||||
now = (self.close_date or datetime.now(UTC)).replace(tzinfo=None)
|
||||
sec_per_hour = FtPrecise(3600)
|
||||
total_seconds = FtPrecise((now - open_date).total_seconds())
|
||||
hours = total_seconds / sec_per_hour or zero
|
||||
@@ -1572,12 +1569,12 @@ class LocalTrade:
|
||||
fee_close=data["fee_close"],
|
||||
fee_close_cost=data.get("fee_close_cost"),
|
||||
fee_close_currency=data.get("fee_close_currency"),
|
||||
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
|
||||
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=UTC),
|
||||
open_rate=data["open_rate"],
|
||||
open_rate_requested=data.get("open_rate_requested", data["open_rate"]),
|
||||
open_trade_value=data.get("open_trade_value"),
|
||||
close_date=(
|
||||
datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
|
||||
datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=UTC)
|
||||
if data["close_timestamp"]
|
||||
else None
|
||||
),
|
||||
@@ -1622,7 +1619,7 @@ class LocalTrade:
|
||||
if order.get("order_date")
|
||||
else None,
|
||||
order_filled_date=(
|
||||
datetime.fromtimestamp(order["order_filled_timestamp"] // 1000, tz=timezone.utc)
|
||||
datetime.fromtimestamp(order["order_filled_timestamp"] // 1000, tz=UTC)
|
||||
if order["order_filled_timestamp"]
|
||||
else None
|
||||
),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
import pandas as pd
|
||||
@@ -638,7 +638,7 @@ def load_and_plot_trades(config: Config):
|
||||
exchange = ExchangeResolver.load_exchange(config)
|
||||
IStrategy.dp = DataProvider(config, exchange)
|
||||
strategy.ft_bot_start()
|
||||
strategy_safe_wrapper(strategy.bot_loop_start)(current_time=datetime.now(timezone.utc))
|
||||
strategy_safe_wrapper(strategy.bot_loop_start)(current_time=datetime.now(UTC))
|
||||
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
|
||||
timerange = plot_elements["timerange"]
|
||||
trades = plot_elements["trades"]
|
||||
|
||||
@@ -3,7 +3,7 @@ Protection manager class
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.constants import Config, LongShort
|
||||
@@ -49,7 +49,7 @@ class ProtectionManager:
|
||||
|
||||
def global_stop(self, now: datetime | None = None, side: LongShort = "long") -> PairLock | None:
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
result = None
|
||||
for protection_handler in self._protection_handlers:
|
||||
if protection_handler.has_global_stop:
|
||||
@@ -65,7 +65,7 @@ class ProtectionManager:
|
||||
self, pair, now: datetime | None = None, side: LongShort = "long"
|
||||
) -> PairLock | None:
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
result = None
|
||||
for protection_handler in self._protection_handlers:
|
||||
if protection_handler.has_local_stop:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.constants import Config, LongShort
|
||||
@@ -127,7 +127,7 @@ class IProtection(LoggingMixin, ABC):
|
||||
max_date: datetime = max([trade.close_date for trade in trades if trade.close_date])
|
||||
# coming from Database, tzinfo is not set.
|
||||
if max_date.tzinfo is None:
|
||||
max_date = max_date.replace(tzinfo=timezone.utc)
|
||||
max_date = max_date.replace(tzinfo=UTC)
|
||||
|
||||
if self._unlock_at is not None:
|
||||
# unlock_at case with fixed hour of the day
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
import secrets
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
@@ -89,15 +89,15 @@ async def validate_ws_token(
|
||||
def create_token(data: dict, secret_key: str, token_type: str = "access") -> str: # noqa: S107
|
||||
to_encode = data.copy()
|
||||
if token_type == "access": # noqa: S105
|
||||
expire = datetime.now(timezone.utc) + timedelta(minutes=15)
|
||||
expire = datetime.now(UTC) + timedelta(minutes=15)
|
||||
elif token_type == "refresh": # noqa: S105
|
||||
expire = datetime.now(timezone.utc) + timedelta(days=30)
|
||||
expire = datetime.now(UTC) + timedelta(days=30)
|
||||
else:
|
||||
raise ValueError()
|
||||
to_encode.update(
|
||||
{
|
||||
"exp": expire,
|
||||
"iat": datetime.now(timezone.utc),
|
||||
"iat": datetime.now(UTC),
|
||||
"type": token_type,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import Any
|
||||
from fastapi import APIRouter, BackgroundTasks, Depends
|
||||
from fastapi.exceptions import HTTPException
|
||||
|
||||
from freqtrade.configuration import remove_exchange_credentials
|
||||
from freqtrade.configuration.config_validation import validate_config_consistency
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.data.btanalysis import (
|
||||
@@ -20,7 +21,6 @@ from freqtrade.data.btanalysis import (
|
||||
)
|
||||
from freqtrade.enums import BacktestState
|
||||
from freqtrade.exceptions import ConfigurationError, DependencyException, OperationalException
|
||||
from freqtrade.exchange.common import remove_exchange_credentials
|
||||
from freqtrade.ft_types import get_BacktestResultType_default
|
||||
from freqtrade.misc import deep_merge_dicts, is_file_in_dir
|
||||
from freqtrade.rpc.api_server.api_schemas import (
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Any, Literal
|
||||
from typing import Any, Literal, NotRequired
|
||||
from uuid import uuid4
|
||||
|
||||
from typing_extensions import NotRequired, TypedDict
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from freqtrade.exchange.exchange import Exchange
|
||||
|
||||
|
||||
@@ -102,7 +102,7 @@ class WebSocketChannel:
|
||||
self._send_times.append(total_time)
|
||||
|
||||
self._calc_send_limit()
|
||||
except asyncio.TimeoutError:
|
||||
except TimeoutError:
|
||||
logger.info(f"Connection for {self} timed out, disconnecting")
|
||||
raise
|
||||
|
||||
@@ -201,8 +201,8 @@ class WebSocketChannel:
|
||||
try:
|
||||
await task
|
||||
except (
|
||||
TimeoutError,
|
||||
asyncio.CancelledError,
|
||||
asyncio.TimeoutError,
|
||||
WebSocketDisconnect,
|
||||
ConnectionClosed,
|
||||
RuntimeError,
|
||||
|
||||
@@ -266,7 +266,7 @@ class ExternalMessageConsumer:
|
||||
except Exception as e:
|
||||
logger.exception(f"Error handling producer message: {e}")
|
||||
|
||||
except (asyncio.TimeoutError, websockets.exceptions.ConnectionClosed):
|
||||
except (TimeoutError, websockets.exceptions.ConnectionClosed):
|
||||
# We haven't received data yet. Check the connection and continue.
|
||||
try:
|
||||
# ping
|
||||
|
||||
@@ -5,7 +5,7 @@ This module contains class to define a RPC communications
|
||||
import logging
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Generator, Sequence
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from datetime import UTC, date, datetime, timedelta
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import psutil
|
||||
@@ -375,7 +375,7 @@ class RPC:
|
||||
"""
|
||||
:param timeunit: Valid entries are 'days', 'weeks', 'months'
|
||||
"""
|
||||
start_date = datetime.now(timezone.utc).date()
|
||||
start_date = datetime.now(UTC).date()
|
||||
if timeunit == "weeks":
|
||||
# weekly
|
||||
start_date = start_date - timedelta(days=start_date.weekday()) # Monday
|
||||
@@ -1099,7 +1099,7 @@ class RPC:
|
||||
trade = Trade.get_trades(trade_filter=[Trade.id == trade_id]).first()
|
||||
if not trade:
|
||||
logger.warning("delete trade: Invalid argument received")
|
||||
raise RPCException("invalid argument")
|
||||
raise RPCException(f"Trade with id '{trade_id}' not found.")
|
||||
|
||||
# Try cancelling regular order if that exists
|
||||
for open_order in trade.open_orders:
|
||||
@@ -1120,13 +1120,16 @@ class RPC:
|
||||
c_count += 1
|
||||
except ExchangeError:
|
||||
pass
|
||||
|
||||
trade_pair = trade.pair
|
||||
trade.delete()
|
||||
self._freqtrade.wallets.update()
|
||||
return {
|
||||
"result": "success",
|
||||
"trade_id": trade_id,
|
||||
"result_msg": f"Deleted trade {trade_id}. Closed {c_count} open orders.",
|
||||
"result_msg": (
|
||||
f"Deleted trade #{trade_id} for pair {trade_pair}. "
|
||||
f"Closed {c_count} open orders."
|
||||
),
|
||||
"cancel_order_count": c_count,
|
||||
}
|
||||
|
||||
@@ -1264,7 +1267,7 @@ class RPC:
|
||||
|
||||
for lock in locks:
|
||||
lock.active = False
|
||||
lock.lock_end_time = datetime.now(timezone.utc)
|
||||
lock.lock_end_time = datetime.now(UTC)
|
||||
|
||||
Trade.commit()
|
||||
|
||||
|
||||
@@ -1488,7 +1488,7 @@ class Telegram(RPCHandler):
|
||||
trade_id = int(context.args[0])
|
||||
msg = self._rpc._rpc_delete(trade_id)
|
||||
await self._send_msg(
|
||||
f"`{msg['result_msg']}`\n"
|
||||
f"{msg['result_msg']}\n"
|
||||
"Please make sure to take care of this asset on the exchange manually."
|
||||
)
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ This module defines the interface to apply for strategies
|
||||
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from math import isinf, isnan
|
||||
|
||||
from pandas import DataFrame
|
||||
@@ -1149,7 +1149,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
manually from within the strategy, to allow an easy way to unlock pairs.
|
||||
:param pair: Unlock pair to allow trading again
|
||||
"""
|
||||
PairLocks.unlock_pair(pair, datetime.now(timezone.utc))
|
||||
PairLocks.unlock_pair(pair, datetime.now(UTC))
|
||||
|
||||
def unlock_reason(self, reason: str) -> None:
|
||||
"""
|
||||
@@ -1158,7 +1158,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
manually from within the strategy, to allow an easy way to unlock pairs.
|
||||
:param reason: Unlock pairs to allow trading again
|
||||
"""
|
||||
PairLocks.unlock_reason(reason, datetime.now(timezone.utc))
|
||||
PairLocks.unlock_reason(reason, datetime.now(UTC))
|
||||
|
||||
def is_pair_locked(
|
||||
self, pair: str, *, candle_date: datetime | None = None, side: str = "*"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from time import time
|
||||
|
||||
import humanize
|
||||
@@ -9,7 +9,7 @@ from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||
|
||||
def dt_now() -> datetime:
|
||||
"""Return the current datetime in UTC."""
|
||||
return datetime.now(timezone.utc)
|
||||
return datetime.now(UTC)
|
||||
|
||||
|
||||
def dt_utc(
|
||||
@@ -22,7 +22,7 @@ def dt_utc(
|
||||
microsecond: int = 0,
|
||||
) -> datetime:
|
||||
"""Return a datetime in UTC."""
|
||||
return datetime(year, month, day, hour, minute, second, microsecond, tzinfo=timezone.utc)
|
||||
return datetime(year, month, day, hour, minute, second, microsecond, tzinfo=UTC)
|
||||
|
||||
|
||||
def dt_ts(dt: datetime | None = None) -> int:
|
||||
@@ -68,7 +68,7 @@ def dt_from_ts(timestamp: float) -> datetime:
|
||||
if timestamp > 1e10:
|
||||
# Timezone in ms - convert to seconds
|
||||
timestamp /= 1000
|
||||
return datetime.fromtimestamp(timestamp, tz=timezone.utc)
|
||||
return datetime.fromtimestamp(timestamp, tz=UTC)
|
||||
|
||||
|
||||
def shorten_date(_date: str) -> str:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from cachetools import TTLCache
|
||||
|
||||
@@ -11,7 +11,7 @@ class PeriodicCache(TTLCache):
|
||||
|
||||
def __init__(self, maxsize, ttl, getsizeof=None):
|
||||
def local_timer():
|
||||
ts = datetime.now(timezone.utc).timestamp()
|
||||
ts = datetime.now(UTC).timestamp()
|
||||
offset = ts % ttl
|
||||
return ts - offset
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from freqtrade_client.ft_rest_client import FtRestClient
|
||||
|
||||
|
||||
__version__ = "2025.6-dev"
|
||||
__version__ = "2025.7-dev"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
@@ -13,14 +13,13 @@ authors = [
|
||||
|
||||
description = "Freqtrade - Client scripts"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
requires-python = ">=3.11"
|
||||
license = {text = "GPLv3"}
|
||||
# license = "GPLv3"
|
||||
classifiers = [
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Science/Research",
|
||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
|
||||
@@ -61,11 +61,14 @@ theme:
|
||||
name: material
|
||||
logo: "images/logo.png"
|
||||
favicon: "images/logo.png"
|
||||
icon:
|
||||
repo: fontawesome/brands/github
|
||||
custom_dir: "docs/overrides"
|
||||
features:
|
||||
- content.code.annotate
|
||||
- search.share
|
||||
- content.code.copy
|
||||
- content.action.edit
|
||||
- navigation.top
|
||||
- navigation.footer
|
||||
palette:
|
||||
@@ -114,6 +117,9 @@ markdown_extensions:
|
||||
custom_checkbox: true
|
||||
- pymdownx.tilde
|
||||
- mdx_truly_sane_lists
|
||||
- pymdownx.emoji:
|
||||
emoji_index: !!python/name:material.extensions.emoji.twemoji
|
||||
emoji_generator: !!python/name:material.extensions.emoji.to_svg
|
||||
extra:
|
||||
version:
|
||||
provider: mike
|
||||
|
||||
@@ -13,13 +13,12 @@ authors = [
|
||||
|
||||
description = "Freqtrade - Crypto Trading Bot"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
requires-python = ">=3.11"
|
||||
license = {text = "GPLv3"}
|
||||
classifiers = [
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Science/Research",
|
||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
@@ -111,6 +110,7 @@ develop = [
|
||||
"pytest-xdist",
|
||||
"pytest",
|
||||
"ruff",
|
||||
"scipy-stubs",
|
||||
"time-machine",
|
||||
"types-cachetools",
|
||||
"types-filelock",
|
||||
@@ -290,6 +290,7 @@ extend-ignore = [
|
||||
"RUF010", # Use explicit conversion flag
|
||||
"RUF012", # mutable-class-default
|
||||
"RUF022", # unsorted-dunder-all
|
||||
"RUF005", # list concatenation
|
||||
]
|
||||
|
||||
[tool.ruff.lint.mccabe]
|
||||
|
||||
@@ -6,16 +6,16 @@
|
||||
-r requirements-freqai-rl.txt
|
||||
-r docs/requirements-docs.txt
|
||||
|
||||
ruff==0.11.13
|
||||
mypy==1.16.0
|
||||
ruff==0.12.2
|
||||
mypy==1.16.1
|
||||
pre-commit==4.2.0
|
||||
pytest==8.4.0
|
||||
pytest==8.4.1
|
||||
pytest-asyncio==1.0.0
|
||||
pytest-cov==6.2.1
|
||||
pytest-mock==3.14.1
|
||||
pytest-random-order==1.1.1
|
||||
pytest-random-order==1.2.0
|
||||
pytest-timeout==2.4.0
|
||||
pytest-xdist==3.7.0
|
||||
pytest-xdist==3.8.0
|
||||
isort==6.0.1
|
||||
# For datetime mocking
|
||||
time-machine==2.16.0
|
||||
@@ -24,6 +24,7 @@ time-machine==2.16.0
|
||||
nbconvert==7.16.6
|
||||
|
||||
# mypy types
|
||||
scipy-stubs==1.16.0.2 # keep in sync with `scipy` in `requirements-hyperopt.txt`
|
||||
types-cachetools==6.0.0.20250525
|
||||
types-filelock==3.2.7
|
||||
types-requests==2.32.4.20250611
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
-r requirements.txt
|
||||
|
||||
# Required for hyperopt
|
||||
scipy==1.15.3
|
||||
scipy==1.16.0
|
||||
scikit-learn==1.7.0
|
||||
filelock==3.18.0
|
||||
optuna==4.3.0
|
||||
optuna==4.4.0
|
||||
cmaes==0.11.1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Include all requirements to run the bot.
|
||||
-r requirements.txt
|
||||
|
||||
plotly==6.1.2
|
||||
plotly==6.2.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
numpy==2.2.6
|
||||
numpy==2.3.1
|
||||
pandas==2.3.0
|
||||
bottleneck==1.5.0
|
||||
numexpr==2.11.0
|
||||
@@ -7,15 +7,15 @@ ft-pandas-ta==0.3.15
|
||||
ta-lib==0.5.5
|
||||
technical==1.5.1
|
||||
|
||||
ccxt==4.4.89
|
||||
cryptography==45.0.4
|
||||
ccxt==4.4.92
|
||||
cryptography==45.0.5
|
||||
aiohttp==3.12.13
|
||||
SQLAlchemy==2.0.41
|
||||
python-telegram-bot==22.1
|
||||
python-telegram-bot==22.2
|
||||
# can't be hard-pinned due to telegram-bot pinning httpx with ~
|
||||
httpx>=0.24.1
|
||||
humanize==4.12.3
|
||||
cachetools==6.0.0
|
||||
cachetools==6.1.0
|
||||
requests==2.32.4
|
||||
urllib3==2.5.0
|
||||
certifi==2025.6.15
|
||||
@@ -36,9 +36,9 @@ orjson==3.10.18
|
||||
sdnotify==0.3.2
|
||||
|
||||
# API Server
|
||||
fastapi==0.115.12
|
||||
fastapi==0.115.14
|
||||
pydantic==2.11.7
|
||||
uvicorn==0.34.3
|
||||
uvicorn==0.35.0
|
||||
pyjwt==2.10.1
|
||||
aiofiles==24.1.0
|
||||
psutil==7.0.0
|
||||
@@ -57,5 +57,5 @@ schedule==1.2.2
|
||||
websockets==15.0.1
|
||||
janus==2.0.0
|
||||
|
||||
ast-comments==1.2.2
|
||||
ast-comments==1.2.3
|
||||
packaging==25.0
|
||||
|
||||
@@ -234,7 +234,7 @@ async def create_client(
|
||||
|
||||
await protocol.on_message(ws, name, message)
|
||||
|
||||
except (asyncio.TimeoutError, websockets.exceptions.WebSocketException):
|
||||
except (TimeoutError, websockets.exceptions.WebSocketException):
|
||||
# Try pinging
|
||||
try:
|
||||
pong = await ws.ping()
|
||||
@@ -244,7 +244,7 @@ async def create_client(
|
||||
|
||||
continue
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
except TimeoutError:
|
||||
logger.error(f"Ping timed out, retrying in {sleep_time}s")
|
||||
await asyncio.sleep(sleep_time)
|
||||
|
||||
|
||||
@@ -153,16 +153,13 @@ function Find-PythonExecutable {
|
||||
"python3.13",
|
||||
"python3.12",
|
||||
"python3.11",
|
||||
"python3.10",
|
||||
"python3",
|
||||
"C:\Users\$env:USERNAME\AppData\Local\Programs\Python\Python313\python.exe",
|
||||
"C:\Users\$env:USERNAME\AppData\Local\Programs\Python\Python312\python.exe",
|
||||
"C:\Users\$env:USERNAME\AppData\Local\Programs\Python\Python311\python.exe",
|
||||
"C:\Users\$env:USERNAME\AppData\Local\Programs\Python\Python310\python.exe",
|
||||
"C:\Python313\python.exe",
|
||||
"C:\Python312\python.exe",
|
||||
"C:\Python311\python.exe",
|
||||
"C:\Python310\python.exe"
|
||||
"C:\Python311\python.exe"
|
||||
)
|
||||
|
||||
|
||||
@@ -178,10 +175,10 @@ function Main {
|
||||
"Starting the operations..." | Out-File $LogFilePath -Append
|
||||
"Current directory: $(Get-Location)" | Out-File $LogFilePath -Append
|
||||
|
||||
# Exit on lower versions than Python 3.10 or when Python executable not found
|
||||
# Exit on lower versions than Python 3.11 or when Python executable not found
|
||||
$PythonExecutable = Find-PythonExecutable
|
||||
if ($null -eq $PythonExecutable) {
|
||||
Write-Log "No suitable Python executable found. Please ensure that Python 3.10 or higher is installed and available in the system PATH." -Level 'ERROR'
|
||||
Write-Log "No suitable Python executable found. Please ensure that Python 3.11 or higher is installed and available in the system PATH." -Level 'ERROR'
|
||||
Exit 1
|
||||
}
|
||||
|
||||
|
||||
8
setup.sh
8
setup.sh
@@ -25,7 +25,7 @@ function check_installed_python() {
|
||||
exit 2
|
||||
fi
|
||||
|
||||
for v in 13 12 11 10
|
||||
for v in 13 12 11
|
||||
do
|
||||
PYTHON="python3.${v}"
|
||||
which $PYTHON
|
||||
@@ -36,7 +36,7 @@ function check_installed_python() {
|
||||
fi
|
||||
done
|
||||
|
||||
echo "No usable python found. Please make sure to have python3.10 or newer installed."
|
||||
echo "No usable python found. Please make sure to have python3.11 or newer installed."
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -257,7 +257,7 @@ function install() {
|
||||
install_redhat
|
||||
else
|
||||
echo "This script does not support your OS."
|
||||
echo "If you have Python version 3.10 - 3.13, pip, virtualenv, ta-lib you can continue."
|
||||
echo "If you have Python version 3.11 - 3.13, pip, virtualenv, ta-lib you can continue."
|
||||
echo "Wait 10 seconds to continue the next install steps or use ctrl+c to interrupt this shell."
|
||||
sleep 10
|
||||
fi
|
||||
@@ -284,7 +284,7 @@ function help() {
|
||||
echo " -p,--plot Install dependencies for Plotting scripts."
|
||||
}
|
||||
|
||||
# Verify if 3.10+ is installed
|
||||
# Verify if 3.11+ is installed
|
||||
check_installed_python
|
||||
|
||||
case $* in
|
||||
|
||||
@@ -4,7 +4,7 @@ import logging
|
||||
import platform
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, Mock, PropertyMock
|
||||
|
||||
@@ -126,7 +126,7 @@ def get_args(args):
|
||||
def generate_trades_history(n_rows, start_date: datetime | None = None, days=5):
|
||||
np.random.seed(42)
|
||||
if not start_date:
|
||||
start_date = datetime(2020, 1, 1, tzinfo=timezone.utc)
|
||||
start_date = datetime(2020, 1, 1, tzinfo=UTC)
|
||||
|
||||
# Generate random data
|
||||
end_date = start_date + timedelta(days=days)
|
||||
@@ -3405,4 +3405,35 @@ def leverage_tiers():
|
||||
"maintAmt": 654500.0,
|
||||
},
|
||||
],
|
||||
"TIA/USDT:USDT": [
|
||||
# Okx tier - these have a gap between maxNotional and the next minNotional
|
||||
{
|
||||
"minNotional": 0.0,
|
||||
"maxNotional": 6500.0,
|
||||
"maintenanceMarginRate": 0.0065,
|
||||
"maxLeverage": 50.0,
|
||||
"maintAmt": None,
|
||||
},
|
||||
{
|
||||
"minNotional": 6501.0,
|
||||
"maxNotional": 12000.0,
|
||||
"maintenanceMarginRate": 0.01,
|
||||
"maxLeverage": 40.0,
|
||||
"maintAmt": None,
|
||||
},
|
||||
{
|
||||
"minNotional": 12001.0,
|
||||
"maxNotional": 25000.0,
|
||||
"maintenanceMarginRate": 0.015,
|
||||
"maxLeverage": 20.0,
|
||||
"maintAmt": None,
|
||||
},
|
||||
{
|
||||
"minNotional": 25001.0,
|
||||
"maxNotional": 50000.0,
|
||||
"maintenanceMarginRate": 0.02,
|
||||
"maxLeverage": 18.18,
|
||||
"maintAmt": None,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from freqtrade.persistence.models import Order, Trade
|
||||
|
||||
@@ -43,7 +43,7 @@ def mock_trade_1(fee, is_short: bool):
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
is_open=True,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=17),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=17),
|
||||
open_rate=0.123,
|
||||
exchange="binance",
|
||||
strategy="StrategyTestV3",
|
||||
@@ -106,8 +106,8 @@ def mock_trade_2(fee, is_short: bool):
|
||||
timeframe=5,
|
||||
enter_tag="TEST1",
|
||||
exit_reason="sell_signal",
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=20),
|
||||
close_date=datetime.now(tz=timezone.utc) - timedelta(minutes=2),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=20),
|
||||
close_date=datetime.now(tz=UTC) - timedelta(minutes=2),
|
||||
is_short=is_short,
|
||||
)
|
||||
o = Order.parse_from_ccxt_object(mock_order_2(is_short), "ETC/BTC", entry_side(is_short))
|
||||
@@ -168,8 +168,8 @@ def mock_trade_3(fee, is_short: bool):
|
||||
strategy="StrategyTestV3",
|
||||
timeframe=5,
|
||||
exit_reason="roi",
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=20),
|
||||
close_date=datetime.now(tz=timezone.utc),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=20),
|
||||
close_date=datetime.now(tz=UTC),
|
||||
is_short=is_short,
|
||||
)
|
||||
o = Order.parse_from_ccxt_object(mock_order_3(is_short), "XRP/BTC", entry_side(is_short))
|
||||
@@ -205,7 +205,7 @@ def mock_trade_4(fee, is_short: bool):
|
||||
amount_requested=124.0,
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=14),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=14),
|
||||
is_open=True,
|
||||
open_rate=0.123,
|
||||
exchange="binance",
|
||||
@@ -260,7 +260,7 @@ def mock_trade_5(fee, is_short: bool):
|
||||
amount_requested=124.0,
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=12),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=12),
|
||||
is_open=True,
|
||||
open_rate=0.123,
|
||||
exchange="binance",
|
||||
@@ -316,7 +316,7 @@ def mock_trade_6(fee, is_short: bool):
|
||||
stake_amount=0.001,
|
||||
amount=2.0,
|
||||
amount_requested=2.0,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=5),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=5),
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
is_open=True,
|
||||
@@ -410,7 +410,7 @@ def short_trade(fee):
|
||||
strategy="DefaultStrategy",
|
||||
timeframe=5,
|
||||
exit_reason="sell_signal",
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=20),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=20),
|
||||
# close_date=datetime.now(tz=timezone.utc) - timedelta(minutes=2),
|
||||
is_short=True,
|
||||
)
|
||||
@@ -500,8 +500,8 @@ def leverage_trade(fee):
|
||||
strategy="DefaultStrategy",
|
||||
timeframe=5,
|
||||
exit_reason="sell_signal",
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=300),
|
||||
close_date=datetime.now(tz=timezone.utc),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=300),
|
||||
close_date=datetime.now(tz=UTC),
|
||||
interest_rate=0.0005,
|
||||
)
|
||||
o = Order.parse_from_ccxt_object(leverage_order(), "DOGE/BTC", "sell")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from freqtrade.persistence.models import Order, Trade
|
||||
|
||||
@@ -55,8 +55,8 @@ def mock_trade_usdt_1(fee, is_short: bool):
|
||||
stake_amount=20.0,
|
||||
amount=2.0,
|
||||
amount_requested=2.0,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(days=2, minutes=20),
|
||||
close_date=datetime.now(tz=timezone.utc) - timedelta(days=2, minutes=5),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(days=2, minutes=20),
|
||||
close_date=datetime.now(tz=UTC) - timedelta(days=2, minutes=5),
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
is_open=False,
|
||||
@@ -127,8 +127,8 @@ def mock_trade_usdt_2(fee, is_short: bool):
|
||||
timeframe=5,
|
||||
enter_tag="TEST1",
|
||||
exit_reason="exit_signal",
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=20),
|
||||
close_date=datetime.now(tz=timezone.utc) - timedelta(minutes=2),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=20),
|
||||
close_date=datetime.now(tz=UTC) - timedelta(minutes=2),
|
||||
is_short=is_short,
|
||||
)
|
||||
o = Order.parse_from_ccxt_object(mock_order_usdt_2(is_short), "NEO/USDT", entry_side(is_short))
|
||||
@@ -190,8 +190,8 @@ def mock_trade_usdt_3(fee, is_short: bool):
|
||||
timeframe=5,
|
||||
enter_tag="TEST3",
|
||||
exit_reason="roi",
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=20),
|
||||
close_date=datetime.now(tz=timezone.utc),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=20),
|
||||
close_date=datetime.now(tz=UTC),
|
||||
is_short=is_short,
|
||||
)
|
||||
o = Order.parse_from_ccxt_object(mock_order_usdt_3(is_short), "XRP/USDT", entry_side(is_short))
|
||||
@@ -228,7 +228,7 @@ def mock_trade_usdt_4(fee, is_short: bool):
|
||||
amount_requested=10.01,
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=14),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=14),
|
||||
is_open=True,
|
||||
open_rate=2.0,
|
||||
exchange="binance",
|
||||
@@ -280,7 +280,7 @@ def mock_trade_usdt_5(fee, is_short: bool):
|
||||
amount_requested=10.01,
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=12),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=12),
|
||||
is_open=True,
|
||||
open_rate=2.0,
|
||||
exchange="binance",
|
||||
@@ -332,7 +332,7 @@ def mock_trade_usdt_6(fee, is_short: bool):
|
||||
stake_amount=20.0,
|
||||
amount=2.0,
|
||||
amount_requested=2.0,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=5),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=5),
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
is_open=True,
|
||||
@@ -374,7 +374,7 @@ def mock_trade_usdt_7(fee, is_short: bool):
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
is_open=True,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=17),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=17),
|
||||
open_rate=2.0,
|
||||
exchange="binance",
|
||||
strategy="StrategyTestV2",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
from zipfile import ZipFile
|
||||
@@ -182,19 +182,19 @@ def test_extract_trades_of_period(testdatadir):
|
||||
"profit_abs": [0.0, 1, -2, -5],
|
||||
"open_date": to_datetime(
|
||||
[
|
||||
datetime(2017, 11, 13, 15, 40, 0, tzinfo=timezone.utc),
|
||||
datetime(2017, 11, 14, 9, 41, 0, tzinfo=timezone.utc),
|
||||
datetime(2017, 11, 14, 14, 20, 0, tzinfo=timezone.utc),
|
||||
datetime(2017, 11, 15, 3, 40, 0, tzinfo=timezone.utc),
|
||||
datetime(2017, 11, 13, 15, 40, 0, tzinfo=UTC),
|
||||
datetime(2017, 11, 14, 9, 41, 0, tzinfo=UTC),
|
||||
datetime(2017, 11, 14, 14, 20, 0, tzinfo=UTC),
|
||||
datetime(2017, 11, 15, 3, 40, 0, tzinfo=UTC),
|
||||
],
|
||||
utc=True,
|
||||
),
|
||||
"close_date": to_datetime(
|
||||
[
|
||||
datetime(2017, 11, 13, 16, 40, 0, tzinfo=timezone.utc),
|
||||
datetime(2017, 11, 14, 10, 41, 0, tzinfo=timezone.utc),
|
||||
datetime(2017, 11, 14, 15, 25, 0, tzinfo=timezone.utc),
|
||||
datetime(2017, 11, 15, 3, 55, 0, tzinfo=timezone.utc),
|
||||
datetime(2017, 11, 13, 16, 40, 0, tzinfo=UTC),
|
||||
datetime(2017, 11, 14, 10, 41, 0, tzinfo=UTC),
|
||||
datetime(2017, 11, 14, 15, 25, 0, tzinfo=UTC),
|
||||
datetime(2017, 11, 15, 3, 55, 0, tzinfo=UTC),
|
||||
],
|
||||
utc=True,
|
||||
),
|
||||
@@ -203,10 +203,10 @@ def test_extract_trades_of_period(testdatadir):
|
||||
trades1 = extract_trades_of_period(data, trades)
|
||||
# First and last trade are dropped as they are out of range
|
||||
assert len(trades1) == 2
|
||||
assert trades1.iloc[0].open_date == datetime(2017, 11, 14, 9, 41, 0, tzinfo=timezone.utc)
|
||||
assert trades1.iloc[0].close_date == datetime(2017, 11, 14, 10, 41, 0, tzinfo=timezone.utc)
|
||||
assert trades1.iloc[-1].open_date == datetime(2017, 11, 14, 14, 20, 0, tzinfo=timezone.utc)
|
||||
assert trades1.iloc[-1].close_date == datetime(2017, 11, 14, 15, 25, 0, tzinfo=timezone.utc)
|
||||
assert trades1.iloc[0].open_date == datetime(2017, 11, 14, 9, 41, 0, tzinfo=UTC)
|
||||
assert trades1.iloc[0].close_date == datetime(2017, 11, 14, 10, 41, 0, tzinfo=UTC)
|
||||
assert trades1.iloc[-1].open_date == datetime(2017, 11, 14, 14, 20, 0, tzinfo=UTC)
|
||||
assert trades1.iloc[-1].close_date == datetime(2017, 11, 14, 15, 25, 0, tzinfo=UTC)
|
||||
|
||||
|
||||
def test_analyze_trade_parallelism(testdatadir):
|
||||
@@ -293,7 +293,7 @@ def test_combined_dataframes_with_rel_mean(testdatadir):
|
||||
pairs = ["ETH/BTC", "ADA/BTC"]
|
||||
data = load_data(datadir=testdatadir, pairs=pairs, timeframe="5m")
|
||||
df = combined_dataframes_with_rel_mean(
|
||||
data, datetime(2018, 1, 12, tzinfo=timezone.utc), datetime(2018, 1, 28, tzinfo=timezone.utc)
|
||||
data, datetime(2018, 1, 12, tzinfo=UTC), datetime(2018, 1, 28, tzinfo=UTC)
|
||||
)
|
||||
assert isinstance(df, DataFrame)
|
||||
assert "ETH/BTC" not in df.columns
|
||||
@@ -596,7 +596,7 @@ def test_calculate_max_drawdown_abs(profits, relative, highd, lowdays, result, r
|
||||
[1000, 500, 1000, 11000, 10000] # absolute results
|
||||
[1000, 50%, 0%, 0%, ~9%] # Relative drawdowns
|
||||
"""
|
||||
init_date = datetime(2020, 1, 1, tzinfo=timezone.utc)
|
||||
init_date = datetime(2020, 1, 1, tzinfo=UTC)
|
||||
dates = [init_date + timedelta(days=i) for i in range(len(profits))]
|
||||
df = DataFrame(zip(profits, dates, strict=False), columns=["profit_abs", "open_date"])
|
||||
# sort by profit and reset index
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# pragma pylint: disable=missing-docstring, protected-access, C0103
|
||||
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
@@ -165,19 +165,19 @@ def test_datahandler_ohlcv_data_min_max(testdatadir):
|
||||
# Empty pair
|
||||
min_max = dh.ohlcv_data_min_max("UNITTEST/BTC", "8m", "spot")
|
||||
assert len(min_max) == 3
|
||||
assert min_max[0] == datetime.fromtimestamp(0, tz=timezone.utc)
|
||||
assert min_max[0] == datetime.fromtimestamp(0, tz=UTC)
|
||||
assert min_max[0] == min_max[1]
|
||||
# Empty pair2
|
||||
min_max = dh.ohlcv_data_min_max("NOPAIR/XXX", "41m", "spot")
|
||||
assert len(min_max) == 3
|
||||
assert min_max[0] == datetime.fromtimestamp(0, tz=timezone.utc)
|
||||
assert min_max[0] == datetime.fromtimestamp(0, tz=UTC)
|
||||
assert min_max[0] == min_max[1]
|
||||
|
||||
# Existing pair ...
|
||||
min_max = dh.ohlcv_data_min_max("UNITTEST/BTC", "1m", "spot")
|
||||
assert len(min_max) == 3
|
||||
assert min_max[0] == datetime(2017, 11, 4, 23, 2, tzinfo=timezone.utc)
|
||||
assert min_max[1] == datetime(2017, 11, 14, 22, 59, tzinfo=timezone.utc)
|
||||
assert min_max[0] == datetime(2017, 11, 4, 23, 2, tzinfo=UTC)
|
||||
assert min_max[1] == datetime(2017, 11, 14, 22, 59, tzinfo=UTC)
|
||||
|
||||
|
||||
def test_datahandler__check_empty_df(testdatadir, caplog):
|
||||
@@ -467,14 +467,14 @@ def test_datahandler_trades_data_min_max(testdatadir):
|
||||
# Empty pair
|
||||
min_max = dh.trades_data_min_max("NADA/ETH", TradingMode.SPOT)
|
||||
assert len(min_max) == 3
|
||||
assert min_max[0] == datetime.fromtimestamp(0, tz=timezone.utc)
|
||||
assert min_max[0] == datetime.fromtimestamp(0, tz=UTC)
|
||||
assert min_max[0] == min_max[1]
|
||||
|
||||
# Existing pair ...
|
||||
min_max = dh.trades_data_min_max("XRP/ETH", TradingMode.SPOT)
|
||||
assert len(min_max) == 3
|
||||
assert min_max[0] == datetime(2019, 10, 11, 0, 0, 11, 620000, tzinfo=timezone.utc)
|
||||
assert min_max[1] == datetime(2019, 10, 13, 11, 19, 28, 844000, tzinfo=timezone.utc)
|
||||
assert min_max[0] == datetime(2019, 10, 11, 0, 0, 11, 620000, tzinfo=UTC)
|
||||
assert min_max[1] == datetime(2019, 10, 13, 11, 19, 28, 844000, tzinfo=UTC)
|
||||
|
||||
|
||||
def test_gethandlerclass():
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
@@ -222,8 +222,8 @@ def test_get_producer_df(default_conf):
|
||||
timeframe = default_conf["timeframe"]
|
||||
candle_type = CandleType.SPOT
|
||||
|
||||
empty_la = datetime.fromtimestamp(0, tz=timezone.utc)
|
||||
now = datetime.now(timezone.utc)
|
||||
empty_la = datetime.fromtimestamp(0, tz=UTC)
|
||||
now = datetime.now(UTC)
|
||||
|
||||
# no data has been added, any request should return an empty dataframe
|
||||
dataframe, la = dataprovider.get_producer_df(pair, timeframe, candle_type)
|
||||
@@ -404,7 +404,7 @@ def test_get_analyzed_dataframe(mocker, default_conf, ohlcv_history):
|
||||
dataframe, time = dp.get_analyzed_dataframe("NOTHING/BTC", timeframe)
|
||||
assert dataframe.empty
|
||||
assert isinstance(time, datetime)
|
||||
assert time == datetime(1970, 1, 1, tzinfo=timezone.utc)
|
||||
assert time == datetime(1970, 1, 1, tzinfo=UTC)
|
||||
|
||||
# Test backtest mode
|
||||
default_conf["runmode"] = RunMode.BACKTEST
|
||||
@@ -478,7 +478,7 @@ def test_dp__add_external_df(default_conf_usdt):
|
||||
default_conf_usdt["timeframe"] = timeframe
|
||||
dp = DataProvider(default_conf_usdt, None)
|
||||
df = generate_test_data(timeframe, 24, "2022-01-01 00:00:00+00:00")
|
||||
last_analyzed = datetime.now(timezone.utc)
|
||||
last_analyzed = datetime.now(UTC)
|
||||
|
||||
res = dp._add_external_df("ETH/USDT", df, last_analyzed, timeframe, CandleType.SPOT)
|
||||
assert res[0] is False
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# pragma pylint: disable=missing-docstring, C0103
|
||||
|
||||
from datetime import timezone
|
||||
from datetime import UTC
|
||||
|
||||
import pandas as pd
|
||||
from numpy import nan
|
||||
@@ -16,15 +16,15 @@ def test_get_tick_size_over_time():
|
||||
# Create test dataframe with different levels of precision
|
||||
data = {
|
||||
"date": [
|
||||
Timestamp("2020-01-01 00:00:00", tz=timezone.utc),
|
||||
Timestamp("2020-01-02 00:00:00", tz=timezone.utc),
|
||||
Timestamp("2020-01-03 00:00:00", tz=timezone.utc),
|
||||
Timestamp("2020-01-15 00:00:00", tz=timezone.utc),
|
||||
Timestamp("2020-01-16 00:00:00", tz=timezone.utc),
|
||||
Timestamp("2020-01-31 00:00:00", tz=timezone.utc),
|
||||
Timestamp("2020-02-01 00:00:00", tz=timezone.utc),
|
||||
Timestamp("2020-02-15 00:00:00", tz=timezone.utc),
|
||||
Timestamp("2020-03-15 00:00:00", tz=timezone.utc),
|
||||
Timestamp("2020-01-01 00:00:00", tz=UTC),
|
||||
Timestamp("2020-01-02 00:00:00", tz=UTC),
|
||||
Timestamp("2020-01-03 00:00:00", tz=UTC),
|
||||
Timestamp("2020-01-15 00:00:00", tz=UTC),
|
||||
Timestamp("2020-01-16 00:00:00", tz=UTC),
|
||||
Timestamp("2020-01-31 00:00:00", tz=UTC),
|
||||
Timestamp("2020-02-01 00:00:00", tz=UTC),
|
||||
Timestamp("2020-02-15 00:00:00", tz=UTC),
|
||||
Timestamp("2020-03-15 00:00:00", tz=UTC),
|
||||
],
|
||||
"open": [1.23456, 1.234, 1.23, 1.2, 1.23456, 1.234, 2.3456, 2.34, 2.34],
|
||||
"high": [1.23457, 1.235, 1.24, 1.3, 1.23456, 1.235, 2.3457, 2.34, 2.34],
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from shutil import copytree
|
||||
from unittest.mock import PropertyMock
|
||||
|
||||
@@ -49,12 +49,8 @@ def test_import_kraken_trades_from_csv(testdatadir, tmp_path, caplog, default_co
|
||||
trades = dh.trades_load("BCH_EUR", TradingMode.SPOT)
|
||||
assert len(trades) == 340
|
||||
|
||||
assert trades["date"].min().to_pydatetime() == datetime(
|
||||
2023, 1, 1, 0, 3, 56, tzinfo=timezone.utc
|
||||
)
|
||||
assert trades["date"].max().to_pydatetime() == datetime(
|
||||
2023, 1, 2, 23, 17, 3, tzinfo=timezone.utc
|
||||
)
|
||||
assert trades["date"].min().to_pydatetime() == datetime(2023, 1, 1, 0, 3, 56, tzinfo=UTC)
|
||||
assert trades["date"].max().to_pydatetime() == datetime(2023, 1, 2, 23, 17, 3, tzinfo=UTC)
|
||||
# ID is not filled
|
||||
assert len(trades.loc[trades["id"] != ""]) == 0
|
||||
|
||||
|
||||
@@ -1015,10 +1015,10 @@ async def test__async_get_trade_history_id_binance(default_conf_usdt, mocker, fe
|
||||
# Don't expect to get here
|
||||
raise ValueError("Unexpected call")
|
||||
# return fetch_trades_result[:-2]
|
||||
elif kwargs.get("params", {}).get(exchange._trades_pagination_arg) == "0":
|
||||
elif kwargs.get("params", {}).get(exchange._ft_has["trades_pagination_arg"]) == "0":
|
||||
# Return first 3
|
||||
return fetch_trades_result[:-2]
|
||||
elif kwargs.get("params", {}).get(exchange._trades_pagination_arg) in (
|
||||
elif kwargs.get("params", {}).get(exchange._ft_has["trades_pagination_arg"]) in (
|
||||
fetch_trades_result[-3]["id"],
|
||||
1565798399752,
|
||||
):
|
||||
@@ -1076,10 +1076,10 @@ async def test__async_get_trade_history_id_binance_fast(
|
||||
# # Don't expect to get here
|
||||
# raise ValueError("Unexpected call")
|
||||
# # return fetch_trades_result[:-2]
|
||||
elif kwargs.get("params", {}).get(exchange._trades_pagination_arg) == "0":
|
||||
elif kwargs.get("params", {}).get(exchange._ft_has["trades_pagination_arg"]) == "0":
|
||||
# Return first 3
|
||||
return fetch_trades_result[:-2]
|
||||
# elif kwargs.get("params", {}).get(exchange._trades_pagination_arg) in (
|
||||
# elif kwargs.get("params", {}).get(exchange._ft_has['trades_pagination_arg']) in (
|
||||
# fetch_trades_result[-3]["id"],
|
||||
# 1565798399752,
|
||||
# ):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
@@ -73,7 +73,7 @@ async def test_bybit_fetch_funding_rate(default_conf, mocker):
|
||||
|
||||
|
||||
def test_bybit_get_funding_fees(default_conf, mocker):
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange="bybit")
|
||||
exchange._fetch_and_calculate_funding_fees = MagicMock()
|
||||
exchange.get_funding_fees("BTC/USDT:USDT", 1, False, now)
|
||||
@@ -117,7 +117,7 @@ def test_bybit_fetch_orders(default_conf, mocker, limit_order):
|
||||
return True
|
||||
|
||||
mocker.patch(f"{EXMS}.exchange_has", side_effect=exchange_has)
|
||||
start_time = datetime.now(timezone.utc) - timedelta(days=20)
|
||||
start_time = datetime.now(UTC) - timedelta(days=20)
|
||||
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, exchange="bybit")
|
||||
# Not available in dry-run
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import copy
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from random import randint
|
||||
from unittest.mock import MagicMock, Mock, PropertyMock, patch
|
||||
|
||||
@@ -35,7 +35,6 @@ from freqtrade.exchange.common import (
|
||||
API_FETCH_ORDER_RETRY_COUNT,
|
||||
API_RETRY_COUNT,
|
||||
calculate_backoff,
|
||||
remove_exchange_credentials,
|
||||
)
|
||||
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
|
||||
from freqtrade.util import dt_now, dt_ts
|
||||
@@ -167,20 +166,6 @@ def test_init(default_conf, mocker, caplog):
|
||||
assert log_has("Instance is running with dry_run enabled", caplog)
|
||||
|
||||
|
||||
def test_remove_exchange_credentials(default_conf) -> None:
|
||||
conf = deepcopy(default_conf)
|
||||
remove_exchange_credentials(conf["exchange"], False)
|
||||
|
||||
assert conf["exchange"]["key"] != ""
|
||||
assert conf["exchange"]["secret"] != ""
|
||||
|
||||
remove_exchange_credentials(conf["exchange"], True)
|
||||
assert conf["exchange"]["key"] == ""
|
||||
assert conf["exchange"]["secret"] == ""
|
||||
assert conf["exchange"]["password"] == ""
|
||||
assert conf["exchange"]["uid"] == ""
|
||||
|
||||
|
||||
def test_init_ccxt_kwargs(default_conf, mocker, caplog):
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
@@ -1743,7 +1728,7 @@ def test_fetch_orders(default_conf, mocker, exchange_name, limit_order):
|
||||
api_mock.fetch_closed_orders = MagicMock(return_value=[limit_order["buy"]])
|
||||
|
||||
mocker.patch(f"{EXMS}.exchange_has", return_value=True)
|
||||
start_time = datetime.now(timezone.utc) - timedelta(days=20)
|
||||
start_time = datetime.now(UTC) - timedelta(days=20)
|
||||
expected = 1
|
||||
if exchange_name == "bybit":
|
||||
expected = 3
|
||||
@@ -2121,7 +2106,7 @@ def test___now_is_time_to_refresh(default_conf, mocker, exchange_name, time_mach
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange=exchange_name)
|
||||
pair = "BTC/USDT"
|
||||
candle_type = CandleType.SPOT
|
||||
start_dt = datetime(2023, 12, 1, 0, 10, 0, tzinfo=timezone.utc)
|
||||
start_dt = datetime(2023, 12, 1, 0, 10, 0, tzinfo=UTC)
|
||||
time_machine.move_to(start_dt, tick=False)
|
||||
assert (pair, "5m", candle_type) not in exchange._pairs_last_refresh_time
|
||||
|
||||
@@ -2144,7 +2129,7 @@ def test___now_is_time_to_refresh(default_conf, mocker, exchange_name, time_mach
|
||||
assert exchange._now_is_time_to_refresh(pair, "5m", candle_type) is True
|
||||
|
||||
# Test with 1d data
|
||||
start_day_dt = datetime(2023, 12, 1, 0, 0, 0, tzinfo=timezone.utc)
|
||||
start_day_dt = datetime(2023, 12, 1, 0, 0, 0, tzinfo=UTC)
|
||||
last_closed_candle_1d = dt_ts(start_day_dt - timedelta(days=1))
|
||||
exchange._pairs_last_refresh_time[(pair, "1d", candle_type)] = last_closed_candle_1d
|
||||
|
||||
@@ -2212,7 +2197,7 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_
|
||||
async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_type):
|
||||
ohlcv = [
|
||||
[
|
||||
int((datetime.now(timezone.utc).timestamp() - 1000) * 1000),
|
||||
int((datetime.now(UTC).timestamp() - 1000) * 1000),
|
||||
1, # open
|
||||
2, # high
|
||||
3, # low
|
||||
@@ -2516,7 +2501,7 @@ def test_refresh_latest_trades(
|
||||
|
||||
@pytest.mark.parametrize("candle_type", [CandleType.FUTURES, CandleType.MARK, CandleType.SPOT])
|
||||
def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_machine) -> None:
|
||||
start = datetime(2021, 8, 1, 0, 0, 0, 0, tzinfo=timezone.utc)
|
||||
start = datetime(2021, 8, 1, 0, 0, 0, 0, tzinfo=UTC)
|
||||
ohlcv = generate_test_data_raw("1h", 100, start.strftime("%Y-%m-%d"))
|
||||
time_machine.move_to(start + timedelta(hours=99, minutes=30))
|
||||
|
||||
@@ -2610,7 +2595,7 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
|
||||
|
||||
|
||||
def test_refresh_ohlcv_with_cache(mocker, default_conf, time_machine) -> None:
|
||||
start = datetime(2021, 8, 1, 0, 0, 0, 0, tzinfo=timezone.utc)
|
||||
start = datetime(2021, 8, 1, 0, 0, 0, 0, tzinfo=UTC)
|
||||
ohlcv = generate_test_data_raw("1h", 100, start.strftime("%Y-%m-%d"))
|
||||
time_machine.move_to(start, tick=False)
|
||||
pairs = [
|
||||
@@ -2918,7 +2903,7 @@ def test_get_entry_rate(
|
||||
mocker, default_conf, caplog, side, ask, bid, last, last_ab, expected, time_machine
|
||||
) -> None:
|
||||
caplog.set_level(logging.DEBUG)
|
||||
start_dt = datetime(2023, 12, 1, 0, 10, 0, tzinfo=timezone.utc)
|
||||
start_dt = datetime(2023, 12, 1, 0, 10, 0, tzinfo=UTC)
|
||||
time_machine.move_to(start_dt, tick=False)
|
||||
if last_ab is None:
|
||||
del default_conf["entry_pricing"]["price_last_balance"]
|
||||
@@ -2955,7 +2940,7 @@ def test_get_exit_rate(
|
||||
default_conf, mocker, caplog, side, bid, ask, last, last_ab, expected, time_machine
|
||||
) -> None:
|
||||
caplog.set_level(logging.DEBUG)
|
||||
start_dt = datetime(2023, 12, 1, 0, 10, 0, tzinfo=timezone.utc)
|
||||
start_dt = datetime(2023, 12, 1, 0, 10, 0, tzinfo=UTC)
|
||||
time_machine.move_to(start_dt, tick=False)
|
||||
|
||||
default_conf["exit_pricing"]["price_side"] = side
|
||||
@@ -3284,7 +3269,7 @@ async def test__async_fetch_trades(
|
||||
assert isinstance(res, list)
|
||||
assert isinstance(res[0], list)
|
||||
assert isinstance(res[1], list)
|
||||
if exchange._trades_pagination == "id":
|
||||
if exchange._ft_has["trades_pagination"] == "id":
|
||||
if exchange_name == "kraken":
|
||||
assert pagid == 1565798399872512133
|
||||
else:
|
||||
@@ -3305,7 +3290,7 @@ async def test__async_fetch_trades(
|
||||
assert exchange._api_async.fetch_trades.call_args[1]["limit"] == 1000
|
||||
assert exchange._api_async.fetch_trades.call_args[1]["params"] == {"from": "123"}
|
||||
|
||||
if exchange._trades_pagination == "id":
|
||||
if exchange._ft_has["trades_pagination"] == "id":
|
||||
if exchange_name == "kraken":
|
||||
assert pagid == 1565798399872512133
|
||||
else:
|
||||
@@ -3394,10 +3379,10 @@ async def test__async_get_trade_history_id(
|
||||
):
|
||||
default_conf["exchange"]["only_from_ccxt"] = True
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange=exchange_name)
|
||||
if exchange._trades_pagination != "id":
|
||||
if exchange._ft_has["trades_pagination"] != "id":
|
||||
exchange.close()
|
||||
pytest.skip("Exchange does not support pagination by trade id")
|
||||
pagination_arg = exchange._trades_pagination_arg
|
||||
pagination_arg = exchange._ft_has["trades_pagination_arg"]
|
||||
|
||||
async def mock_get_trade_hist(pair, *args, **kwargs):
|
||||
if "since" in kwargs:
|
||||
@@ -3471,7 +3456,7 @@ async def test__async_get_trade_history_time(
|
||||
|
||||
caplog.set_level(logging.DEBUG)
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange=exchange_name)
|
||||
if exchange._trades_pagination != "time":
|
||||
if exchange._ft_has["trades_pagination"] != "time":
|
||||
exchange.close()
|
||||
pytest.skip("Exchange does not support pagination by timestamp")
|
||||
# Monkey-patch async function
|
||||
@@ -4034,7 +4019,7 @@ def test_get_trades_for_order(default_conf, mocker, exchange_name, trading_mode,
|
||||
assert api_mock.fetch_my_trades.call_args[0][1] == 1525478395000
|
||||
assert (
|
||||
api_mock.fetch_my_trades.call_args[0][1]
|
||||
== int(since.replace(tzinfo=timezone.utc).timestamp() - 5) * 1000
|
||||
== int(since.replace(tzinfo=UTC).timestamp() - 5) * 1000
|
||||
)
|
||||
|
||||
ccxt_exceptionhandlers(
|
||||
@@ -4800,7 +4785,7 @@ def test_calculate_backoff(retrycount, max_retries, expected):
|
||||
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
def test_get_funding_fees(default_conf_usdt, mocker, exchange_name, caplog):
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
default_conf_usdt["trading_mode"] = "futures"
|
||||
default_conf_usdt["margin_mode"] = "isolated"
|
||||
exchange = get_patched_exchange(mocker, default_conf_usdt, exchange=exchange_name)
|
||||
@@ -5017,8 +5002,8 @@ def test_calculate_funding_fees(
|
||||
):
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
kraken = get_patched_exchange(mocker, default_conf, exchange="kraken")
|
||||
prior_date = timeframe_to_prev_date("1h", datetime.now(timezone.utc) - timedelta(hours=1))
|
||||
trade_date = timeframe_to_prev_date("1h", datetime.now(timezone.utc))
|
||||
prior_date = timeframe_to_prev_date("1h", datetime.now(UTC) - timedelta(hours=1))
|
||||
trade_date = timeframe_to_prev_date("1h", datetime.now(UTC))
|
||||
funding_rates = DataFrame(
|
||||
[
|
||||
{"date": prior_date, "open": funding_rate}, # Line not used.
|
||||
@@ -5087,9 +5072,9 @@ def test_combine_funding_and_mark(
|
||||
futures_funding_rate,
|
||||
):
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
prior2_date = timeframe_to_prev_date("1h", datetime.now(timezone.utc) - timedelta(hours=2))
|
||||
prior_date = timeframe_to_prev_date("1h", datetime.now(timezone.utc) - timedelta(hours=1))
|
||||
trade_date = timeframe_to_prev_date("1h", datetime.now(timezone.utc))
|
||||
prior2_date = timeframe_to_prev_date("1h", datetime.now(UTC) - timedelta(hours=2))
|
||||
prior_date = timeframe_to_prev_date("1h", datetime.now(UTC) - timedelta(hours=1))
|
||||
trade_date = timeframe_to_prev_date("1h", datetime.now(UTC))
|
||||
funding_rates = DataFrame(
|
||||
[
|
||||
{"date": prior2_date, "open": funding_rate},
|
||||
@@ -5946,10 +5931,14 @@ def test_get_max_leverage_futures(default_conf, mocker, leverage_tiers):
|
||||
assert exchange.get_max_leverage("SPONGE/USDT:USDT", 200) == 1.0 # Pair not in leverage_tiers
|
||||
assert exchange.get_max_leverage("BTC/USDT:USDT", 0.0) == 125.0 # No stake amount
|
||||
with pytest.raises(
|
||||
InvalidOrderException, match=r"Amount 1000000000.01 too high for BTC/USDT:USDT"
|
||||
InvalidOrderException, match=r"Stake amount 1000000000.01 too high for BTC/USDT:USDT"
|
||||
):
|
||||
exchange.get_max_leverage("BTC/USDT:USDT", 1000000000.01)
|
||||
|
||||
assert exchange.get_max_leverage("TIA/USDT:USDT", 130) == 50
|
||||
assert exchange.get_max_leverage("TIA/USDT:USDT", 131) == 40
|
||||
assert exchange.get_max_leverage("TIA/USDT:USDT", 130.008) == 40
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exchange_name", ["binance", "kraken", "gate", "okx", "bybit"])
|
||||
def test__get_params(mocker, default_conf, exchange_name):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# pragma pylint: disable=missing-docstring, protected-access, invalid-name
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from math import isnan, nan
|
||||
|
||||
import pytest
|
||||
@@ -117,7 +117,7 @@ def test_check_exchange(default_conf, caplog) -> None:
|
||||
|
||||
|
||||
def test_date_minus_candles():
|
||||
date = datetime(2019, 8, 12, 13, 25, 0, tzinfo=timezone.utc)
|
||||
date = datetime(2019, 8, 12, 13, 25, 0, tzinfo=UTC)
|
||||
|
||||
assert date_minus_candles("5m", 3, date) == date - timedelta(minutes=15)
|
||||
assert date_minus_candles("5m", 5, date) == date - timedelta(minutes=25)
|
||||
@@ -167,59 +167,59 @@ def test_timeframe_to_resample_freq(timeframe, expected):
|
||||
|
||||
def test_timeframe_to_prev_date():
|
||||
# 2019-08-12 13:22:08
|
||||
date = datetime.fromtimestamp(1565616128, tz=timezone.utc)
|
||||
date = datetime.fromtimestamp(1565616128, tz=UTC)
|
||||
|
||||
tf_list = [
|
||||
# 5m -> 2019-08-12 13:20:00
|
||||
("5m", datetime(2019, 8, 12, 13, 20, 0, tzinfo=timezone.utc)),
|
||||
("5m", datetime(2019, 8, 12, 13, 20, 0, tzinfo=UTC)),
|
||||
# 10m -> 2019-08-12 13:20:00
|
||||
("10m", datetime(2019, 8, 12, 13, 20, 0, tzinfo=timezone.utc)),
|
||||
("10m", datetime(2019, 8, 12, 13, 20, 0, tzinfo=UTC)),
|
||||
# 1h -> 2019-08-12 13:00:00
|
||||
("1h", datetime(2019, 8, 12, 13, 00, 0, tzinfo=timezone.utc)),
|
||||
("1h", datetime(2019, 8, 12, 13, 00, 0, tzinfo=UTC)),
|
||||
# 2h -> 2019-08-12 12:00:00
|
||||
("2h", datetime(2019, 8, 12, 12, 00, 0, tzinfo=timezone.utc)),
|
||||
("2h", datetime(2019, 8, 12, 12, 00, 0, tzinfo=UTC)),
|
||||
# 4h -> 2019-08-12 12:00:00
|
||||
("4h", datetime(2019, 8, 12, 12, 00, 0, tzinfo=timezone.utc)),
|
||||
("4h", datetime(2019, 8, 12, 12, 00, 0, tzinfo=UTC)),
|
||||
# 1d -> 2019-08-12 00:00:00
|
||||
("1d", datetime(2019, 8, 12, 00, 00, 0, tzinfo=timezone.utc)),
|
||||
("1d", datetime(2019, 8, 12, 00, 00, 0, tzinfo=UTC)),
|
||||
]
|
||||
for interval, result in tf_list:
|
||||
assert timeframe_to_prev_date(interval, date) == result
|
||||
|
||||
date = datetime.now(tz=timezone.utc)
|
||||
date = datetime.now(tz=UTC)
|
||||
assert timeframe_to_prev_date("5m") < date
|
||||
# Does not round
|
||||
time = datetime(2019, 8, 12, 13, 20, 0, tzinfo=timezone.utc)
|
||||
time = datetime(2019, 8, 12, 13, 20, 0, tzinfo=UTC)
|
||||
assert timeframe_to_prev_date("5m", time) == time
|
||||
time = datetime(2019, 8, 12, 13, 0, 0, tzinfo=timezone.utc)
|
||||
time = datetime(2019, 8, 12, 13, 0, 0, tzinfo=UTC)
|
||||
assert timeframe_to_prev_date("1h", time) == time
|
||||
|
||||
|
||||
def test_timeframe_to_next_date():
|
||||
# 2019-08-12 13:22:08
|
||||
date = datetime.fromtimestamp(1565616128, tz=timezone.utc)
|
||||
date = datetime.fromtimestamp(1565616128, tz=UTC)
|
||||
tf_list = [
|
||||
# 5m -> 2019-08-12 13:25:00
|
||||
("5m", datetime(2019, 8, 12, 13, 25, 0, tzinfo=timezone.utc)),
|
||||
("5m", datetime(2019, 8, 12, 13, 25, 0, tzinfo=UTC)),
|
||||
# 10m -> 2019-08-12 13:30:00
|
||||
("10m", datetime(2019, 8, 12, 13, 30, 0, tzinfo=timezone.utc)),
|
||||
("10m", datetime(2019, 8, 12, 13, 30, 0, tzinfo=UTC)),
|
||||
# 1h -> 2019-08-12 14:00:00
|
||||
("1h", datetime(2019, 8, 12, 14, 00, 0, tzinfo=timezone.utc)),
|
||||
("1h", datetime(2019, 8, 12, 14, 00, 0, tzinfo=UTC)),
|
||||
# 2h -> 2019-08-12 14:00:00
|
||||
("2h", datetime(2019, 8, 12, 14, 00, 0, tzinfo=timezone.utc)),
|
||||
("2h", datetime(2019, 8, 12, 14, 00, 0, tzinfo=UTC)),
|
||||
# 4h -> 2019-08-12 14:00:00
|
||||
("4h", datetime(2019, 8, 12, 16, 00, 0, tzinfo=timezone.utc)),
|
||||
("4h", datetime(2019, 8, 12, 16, 00, 0, tzinfo=UTC)),
|
||||
# 1d -> 2019-08-13 00:00:00
|
||||
("1d", datetime(2019, 8, 13, 0, 0, 0, tzinfo=timezone.utc)),
|
||||
("1d", datetime(2019, 8, 13, 0, 0, 0, tzinfo=UTC)),
|
||||
]
|
||||
|
||||
for interval, result in tf_list:
|
||||
assert timeframe_to_next_date(interval, date) == result
|
||||
|
||||
date = datetime.now(tz=timezone.utc)
|
||||
date = datetime.now(tz=UTC)
|
||||
assert timeframe_to_next_date("5m") > date
|
||||
|
||||
date = datetime(2019, 8, 12, 13, 30, 0, tzinfo=timezone.utc)
|
||||
date = datetime(2019, 8, 12, 13, 30, 0, tzinfo=UTC)
|
||||
assert timeframe_to_next_date("5m", date) == date + timedelta(minutes=5)
|
||||
|
||||
|
||||
|
||||
@@ -50,18 +50,18 @@ def test_exchangews_cleanup_error(mocker, caplog):
|
||||
|
||||
|
||||
def patch_eventloop_threading(exchange):
|
||||
is_init = False
|
||||
init_event = threading.Event()
|
||||
|
||||
def thread_fuck():
|
||||
nonlocal is_init
|
||||
def thread_func():
|
||||
exchange._loop = asyncio.new_event_loop()
|
||||
is_init = True
|
||||
init_event.set()
|
||||
exchange._loop.run_forever()
|
||||
|
||||
x = threading.Thread(target=thread_fuck, daemon=True)
|
||||
x = threading.Thread(target=thread_func, daemon=True)
|
||||
x.start()
|
||||
while not is_init:
|
||||
pass
|
||||
# Wait for thread to be properly initialized with timeout
|
||||
if not init_event.wait(timeout=5.0):
|
||||
raise RuntimeError("Failed to initialize event loop thread")
|
||||
|
||||
|
||||
async def test_exchangews_ohlcv(mocker, time_machine, caplog):
|
||||
@@ -69,14 +69,24 @@ async def test_exchangews_ohlcv(mocker, time_machine, caplog):
|
||||
ccxt_object = MagicMock()
|
||||
caplog.set_level(logging.DEBUG)
|
||||
|
||||
async def sleeper(*args, **kwargs):
|
||||
# pass
|
||||
await asyncio.sleep(0.12)
|
||||
async def controlled_sleeper(*args, **kwargs):
|
||||
# Sleep to pass control back to the event loop
|
||||
await asyncio.sleep(0.1)
|
||||
return MagicMock()
|
||||
|
||||
ccxt_object.un_watch_ohlcv_for_symbols = AsyncMock(side_effect=NotSupported)
|
||||
async def wait_for_condition(condition_func, timeout_=5.0, check_interval=0.01):
|
||||
"""Wait for a condition to be true with timeout."""
|
||||
try:
|
||||
async with asyncio.timeout(timeout_):
|
||||
while True:
|
||||
if condition_func():
|
||||
return True
|
||||
await asyncio.sleep(check_interval)
|
||||
except TimeoutError:
|
||||
return False
|
||||
|
||||
ccxt_object.watch_ohlcv = AsyncMock(side_effect=sleeper)
|
||||
ccxt_object.un_watch_ohlcv_for_symbols = AsyncMock(side_effect=NotSupported)
|
||||
ccxt_object.watch_ohlcv = AsyncMock(side_effect=controlled_sleeper)
|
||||
ccxt_object.close = AsyncMock()
|
||||
time_machine.move_to("2024-11-01 01:00:02 +00:00")
|
||||
|
||||
@@ -90,7 +100,14 @@ async def test_exchangews_ohlcv(mocker, time_machine, caplog):
|
||||
|
||||
exchange_ws.schedule_ohlcv("ETH/BTC", "1m", CandleType.SPOT)
|
||||
exchange_ws.schedule_ohlcv("XRP/BTC", "1m", CandleType.SPOT)
|
||||
await asyncio.sleep(0.2)
|
||||
|
||||
# Wait for both pairs to be properly scheduled and watching
|
||||
await wait_for_condition(
|
||||
lambda: (
|
||||
len(exchange_ws._klines_watching) == 2 and len(exchange_ws._klines_scheduled) == 2
|
||||
),
|
||||
timeout_=2.0,
|
||||
)
|
||||
|
||||
assert exchange_ws._klines_watching == {
|
||||
("ETH/BTC", "1m", CandleType.SPOT),
|
||||
@@ -100,14 +117,21 @@ async def test_exchangews_ohlcv(mocker, time_machine, caplog):
|
||||
("ETH/BTC", "1m", CandleType.SPOT),
|
||||
("XRP/BTC", "1m", CandleType.SPOT),
|
||||
}
|
||||
await asyncio.sleep(0.1)
|
||||
assert ccxt_object.watch_ohlcv.call_count == 6
|
||||
|
||||
# Wait for the expected number of watch calls
|
||||
await wait_for_condition(lambda: ccxt_object.watch_ohlcv.call_count >= 6, timeout_=3.0)
|
||||
assert ccxt_object.watch_ohlcv.call_count >= 6
|
||||
ccxt_object.watch_ohlcv.reset_mock()
|
||||
|
||||
time_machine.shift(timedelta(minutes=5))
|
||||
exchange_ws.schedule_ohlcv("ETH/BTC", "1m", CandleType.SPOT)
|
||||
await asyncio.sleep(1)
|
||||
|
||||
# Wait for log message
|
||||
await wait_for_condition(
|
||||
lambda: log_has_re("un_watch_ohlcv_for_symbols not supported: ", caplog), timeout_=2.0
|
||||
)
|
||||
assert log_has_re("un_watch_ohlcv_for_symbols not supported: ", caplog)
|
||||
|
||||
# XRP/BTC should be cleaned up.
|
||||
assert exchange_ws._klines_watching == {
|
||||
("ETH/BTC", "1m", CandleType.SPOT),
|
||||
@@ -116,6 +140,8 @@ async def test_exchangews_ohlcv(mocker, time_machine, caplog):
|
||||
# Cleanup happened.
|
||||
ccxt_object.un_watch_ohlcv_for_symbols = AsyncMock(side_effect=ValueError)
|
||||
exchange_ws.schedule_ohlcv("ETH/BTC", "1m", CandleType.SPOT)
|
||||
|
||||
# Verify final state
|
||||
assert exchange_ws._klines_watching == {
|
||||
("ETH/BTC", "1m", CandleType.SPOT),
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
@@ -113,7 +113,7 @@ def test_fetch_my_trades_gate(mocker, default_conf, takerormaker, rate, cost):
|
||||
)
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock=api_mock, exchange="gate")
|
||||
exchange._trading_fees = tick
|
||||
trades = exchange.get_trades_for_order("22255", "ETH/USDT:USDT", datetime.now(timezone.utc))
|
||||
trades = exchange.get_trades_for_order("22255", "ETH/USDT:USDT", datetime.now(UTC))
|
||||
trade = trades[0]
|
||||
assert trade["fee"]
|
||||
assert trade["fee"]["rate"] == rate
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import pytest
|
||||
@@ -306,7 +306,7 @@ def test_hyperliquid_dry_run_liquidation_price(default_conf, mocker):
|
||||
|
||||
|
||||
def test_hyperliquid_get_funding_fees(default_conf, mocker):
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange="hyperliquid")
|
||||
exchange._fetch_and_calculate_funding_fees = MagicMock()
|
||||
exchange.get_funding_fees("BTC/USDC:USDC", 1, False, now)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from unittest.mock import AsyncMock, MagicMock, PropertyMock
|
||||
|
||||
import ccxt
|
||||
@@ -15,7 +15,7 @@ from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
||||
def test_okx_ohlcv_candle_limit(default_conf, mocker):
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange="okx")
|
||||
timeframes = ("1m", "5m", "1h")
|
||||
start_time = int(datetime(2021, 1, 1, tzinfo=timezone.utc).timestamp() * 1000)
|
||||
start_time = int(datetime(2021, 1, 1, tzinfo=UTC).timestamp() * 1000)
|
||||
|
||||
for timeframe in timeframes:
|
||||
assert exchange.ohlcv_candle_limit(timeframe, CandleType.SPOT) == 300
|
||||
@@ -29,8 +29,7 @@ def test_okx_ohlcv_candle_limit(default_conf, mocker):
|
||||
assert exchange.ohlcv_candle_limit(timeframe, CandleType.FUNDING_RATE, start_time) == 100
|
||||
one_call = int(
|
||||
(
|
||||
datetime.now(timezone.utc)
|
||||
- timedelta(minutes=290 * timeframe_to_minutes(timeframe))
|
||||
datetime.now(UTC) - timedelta(minutes=290 * timeframe_to_minutes(timeframe))
|
||||
).timestamp()
|
||||
* 1000
|
||||
)
|
||||
@@ -40,8 +39,7 @@ def test_okx_ohlcv_candle_limit(default_conf, mocker):
|
||||
|
||||
one_call = int(
|
||||
(
|
||||
datetime.now(timezone.utc)
|
||||
- timedelta(minutes=320 * timeframe_to_minutes(timeframe))
|
||||
datetime.now(UTC) - timedelta(minutes=320 * timeframe_to_minutes(timeframe))
|
||||
).timestamp()
|
||||
* 1000
|
||||
)
|
||||
@@ -693,7 +691,7 @@ def test_fetch_orders_okx(default_conf, mocker, limit_order):
|
||||
api_mock.fetch_closed_orders = MagicMock(return_value=[limit_order["buy"]])
|
||||
|
||||
mocker.patch(f"{EXMS}.exchange_has", return_value=True)
|
||||
start_time = datetime.now(timezone.utc) - timedelta(days=20)
|
||||
start_time = datetime.now(UTC) - timedelta(days=20)
|
||||
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, exchange="okx")
|
||||
# Not available in dry-run
|
||||
@@ -727,7 +725,7 @@ def test_fetch_orders_okx(default_conf, mocker, limit_order):
|
||||
api_mock.fetch_closed_orders.reset_mock()
|
||||
|
||||
# regular closed_orders endpoint only has history for 7 days.
|
||||
exchange.fetch_orders("mocked", datetime.now(timezone.utc) - timedelta(days=6))
|
||||
exchange.fetch_orders("mocked", datetime.now(UTC) - timedelta(days=6))
|
||||
assert api_mock.fetch_orders.call_count == 0
|
||||
assert api_mock.fetch_open_orders.call_count == 1
|
||||
assert api_mock.fetch_closed_orders.call_count == 1
|
||||
|
||||
@@ -5,7 +5,7 @@ However, these tests should give a good idea to determine if a new exchange is
|
||||
suitable to run with freqtrade.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -248,7 +248,7 @@ class TestCCXTExchange:
|
||||
len(exch.klines(pair_tf)) > exch.ohlcv_candle_limit(timeframe, CandleType.SPOT) * 0.90
|
||||
)
|
||||
# Check if last-timeframe is within the last 2 intervals
|
||||
now = datetime.now(timezone.utc) - timedelta(minutes=(timeframe_to_minutes(timeframe) * 2))
|
||||
now = datetime.now(UTC) - timedelta(minutes=(timeframe_to_minutes(timeframe) * 2))
|
||||
assert exch.klines(pair_tf).iloc[-1]["date"] >= timeframe_to_prev_date(timeframe, now)
|
||||
|
||||
def test_ccxt_fetch_ohlcv_startdate(self, exchange: EXCHANGE_FIXTURE_TYPE):
|
||||
@@ -266,7 +266,7 @@ class TestCCXTExchange:
|
||||
assert isinstance(ohlcv, dict)
|
||||
assert len(ohlcv[pair_tf]) == len(exch.klines(pair_tf))
|
||||
# Check if last-timeframe is within the last 2 intervals
|
||||
now = datetime.now(timezone.utc) - timedelta(minutes=(timeframe_to_minutes(timeframe) * 2))
|
||||
now = datetime.now(UTC) - timedelta(minutes=(timeframe_to_minutes(timeframe) * 2))
|
||||
assert exch.klines(pair_tf).iloc[-1]["date"] >= timeframe_to_prev_date(timeframe, now)
|
||||
assert exch.klines(pair_tf)["date"].astype(int).iloc[0] // 1e6 == since_ms
|
||||
|
||||
@@ -274,7 +274,7 @@ class TestCCXTExchange:
|
||||
self, exchange, exchangename, pair, timeframe, candle_type, factor=0.9
|
||||
):
|
||||
timeframe_ms = timeframe_to_msecs(timeframe)
|
||||
now = timeframe_to_prev_date(timeframe, datetime.now(timezone.utc))
|
||||
now = timeframe_to_prev_date(timeframe, datetime.now(UTC))
|
||||
for offset in (360, 120, 30, 10, 5, 2):
|
||||
since = now - timedelta(days=offset)
|
||||
since_ms = int(since.timestamp() * 1000)
|
||||
@@ -336,7 +336,7 @@ class TestCCXTExchange:
|
||||
exchange, exchangename = exchange_futures
|
||||
|
||||
pair = EXCHANGES[exchangename].get("futures_pair", EXCHANGES[exchangename]["pair"])
|
||||
since = int((datetime.now(timezone.utc) - timedelta(days=5)).timestamp() * 1000)
|
||||
since = int((datetime.now(UTC) - timedelta(days=5)).timestamp() * 1000)
|
||||
timeframe_ff = exchange._ft_has.get(
|
||||
"funding_fee_timeframe", exchange._ft_has["mark_ohlcv_timeframe"]
|
||||
)
|
||||
@@ -371,7 +371,7 @@ class TestCCXTExchange:
|
||||
def test_ccxt_fetch_mark_price_history(self, exchange_futures: EXCHANGE_FIXTURE_TYPE):
|
||||
exchange, exchangename = exchange_futures
|
||||
pair = EXCHANGES[exchangename].get("futures_pair", EXCHANGES[exchangename]["pair"])
|
||||
since = int((datetime.now(timezone.utc) - timedelta(days=5)).timestamp() * 1000)
|
||||
since = int((datetime.now(UTC) - timedelta(days=5)).timestamp() * 1000)
|
||||
pair_tf = (pair, "1h", CandleType.MARK)
|
||||
|
||||
mark_ohlcv = exchange.refresh_latest_ohlcv([pair_tf], since_ms=since, drop_incomplete=False)
|
||||
@@ -389,7 +389,7 @@ class TestCCXTExchange:
|
||||
def test_ccxt__calculate_funding_fees(self, exchange_futures: EXCHANGE_FIXTURE_TYPE):
|
||||
exchange, exchangename = exchange_futures
|
||||
pair = EXCHANGES[exchangename].get("futures_pair", EXCHANGES[exchangename]["pair"])
|
||||
since = datetime.now(timezone.utc) - timedelta(days=5)
|
||||
since = datetime.now(UTC) - timedelta(days=5)
|
||||
|
||||
funding_fee = exchange._fetch_and_calculate_funding_fees(
|
||||
pair, 20, is_short=False, open_date=since
|
||||
@@ -403,7 +403,7 @@ class TestCCXTExchange:
|
||||
if not (lookback := EXCHANGES[exchangename].get("trades_lookback_hours")):
|
||||
pytest.skip("test_fetch_trades not enabled for this exchange")
|
||||
pair = EXCHANGES[exchangename]["pair"]
|
||||
since = int((datetime.now(timezone.utc) - timedelta(hours=lookback)).timestamp() * 1000)
|
||||
since = int((datetime.now(UTC) - timedelta(hours=lookback)).timestamp() * 1000)
|
||||
res = exch.loop.run_until_complete(exch._async_get_trade_history(pair, since, None, None))
|
||||
assert len(res) == 2
|
||||
res_pair, res_trades = res
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import PropertyMock
|
||||
|
||||
@@ -28,7 +28,7 @@ from tests.freqai.conftest import get_patched_freqai_strategy
|
||||
def test_freqai_backtest_start_backtest_list(freqai_conf, mocker, testdatadir, caplog):
|
||||
patch_exchange(mocker)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
mocker.patch(
|
||||
"freqtrade.plugins.pairlistmanager.PairListManager.whitelist",
|
||||
PropertyMock(return_value=["HULUMULU/USDT", "XRP/USDT"]),
|
||||
@@ -73,7 +73,7 @@ def test_freqai_backtest_load_data(
|
||||
):
|
||||
patch_exchange(mocker)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
mocker.patch(
|
||||
"freqtrade.plugins.pairlistmanager.PairListManager.whitelist",
|
||||
PropertyMock(return_value=["HULUMULU/USDT", "XRP/USDT"]),
|
||||
@@ -98,7 +98,7 @@ def test_freqai_backtest_load_data(
|
||||
def test_freqai_backtest_live_models_model_not_found(freqai_conf, mocker, testdatadir, caplog):
|
||||
patch_exchange(mocker)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
mocker.patch(
|
||||
"freqtrade.plugins.pairlistmanager.PairListManager.whitelist",
|
||||
PropertyMock(return_value=["HULUMULU/USDT", "XRP/USDT"]),
|
||||
@@ -163,6 +163,6 @@ def test_freqai_backtest_consistent_timerange(mocker, freqai_conf):
|
||||
backtesting = Backtesting(deepcopy(freqai_conf))
|
||||
backtesting.start()
|
||||
|
||||
assert gbs.call_args[1]["min_date"] == datetime(2021, 11, 20, 0, 0, tzinfo=timezone.utc)
|
||||
assert gbs.call_args[1]["max_date"] == datetime(2021, 11, 21, 0, 0, tzinfo=timezone.utc)
|
||||
assert gbs.call_args[1]["min_date"] == datetime(2021, 11, 20, 0, 0, tzinfo=UTC)
|
||||
assert gbs.call_args[1]["max_date"] == datetime(2021, 11, 21, 0, 0, tzinfo=UTC)
|
||||
Backtesting.cleanup()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import shutil
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
@@ -67,9 +67,9 @@ def test_split_timerange(
|
||||
|
||||
def test_check_if_model_expired(mocker, freqai_conf):
|
||||
dk = get_patched_data_kitchen(mocker, freqai_conf)
|
||||
now = datetime.now(tz=timezone.utc).timestamp()
|
||||
now = datetime.now(tz=UTC).timestamp()
|
||||
assert dk.check_if_model_expired(now) is False
|
||||
now = (datetime.now(tz=timezone.utc) - timedelta(hours=2)).timestamp()
|
||||
now = (datetime.now(tz=UTC) - timedelta(hours=2)).timestamp()
|
||||
assert dk.check_if_model_expired(now) is True
|
||||
shutil.rmtree(Path(dk.full_path))
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import random
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from pathlib import Path
|
||||
from unittest.mock import ANY, MagicMock, PropertyMock
|
||||
|
||||
@@ -687,7 +687,7 @@ def test_backtest__check_trade_exit(default_conf, mocker) -> None:
|
||||
backtesting._set_strategy(backtesting.strategylist[0])
|
||||
pair = "UNITTEST/BTC"
|
||||
row = [
|
||||
pd.Timestamp(year=2020, month=1, day=1, hour=4, minute=55, tzinfo=timezone.utc),
|
||||
pd.Timestamp(year=2020, month=1, day=1, hour=4, minute=55, tzinfo=UTC),
|
||||
200, # Open
|
||||
201.5, # High
|
||||
195, # Low
|
||||
@@ -705,7 +705,7 @@ def test_backtest__check_trade_exit(default_conf, mocker) -> None:
|
||||
assert isinstance(trade, LocalTrade)
|
||||
|
||||
row_sell = [
|
||||
pd.Timestamp(year=2020, month=1, day=1, hour=5, minute=0, tzinfo=timezone.utc),
|
||||
pd.Timestamp(year=2020, month=1, day=1, hour=5, minute=0, tzinfo=UTC),
|
||||
200, # Open
|
||||
210.5, # High
|
||||
195, # Low
|
||||
@@ -723,7 +723,7 @@ def test_backtest__check_trade_exit(default_conf, mocker) -> None:
|
||||
res = backtesting._check_trade_exit(trade, row_sell, row_sell[0].to_pydatetime())
|
||||
assert res is not None
|
||||
assert res.exit_reason == ExitType.ROI.value
|
||||
assert res.close_date_utc == datetime(2020, 1, 1, 5, 0, tzinfo=timezone.utc)
|
||||
assert res.close_date_utc == datetime(2020, 1, 1, 5, 0, tzinfo=UTC)
|
||||
|
||||
# Enter new trade
|
||||
trade = backtesting._enter_trade(pair, row=row, direction="long")
|
||||
@@ -928,7 +928,7 @@ def test_backtest_one_detail(default_conf_usdt, mocker, testdatadir, use_detail)
|
||||
assert len(t["orders"]) == 2
|
||||
|
||||
entryo = t["orders"][0]
|
||||
entry_ts = datetime.fromtimestamp(entryo["order_filled_timestamp"] // 1000, tz=timezone.utc)
|
||||
entry_ts = datetime.fromtimestamp(entryo["order_filled_timestamp"] // 1000, tz=UTC)
|
||||
if entry_ts > t["open_date"]:
|
||||
late_entry += 1
|
||||
|
||||
@@ -1039,7 +1039,7 @@ def test_backtest_one_detail_futures(
|
||||
assert len(t["orders"]) == 2
|
||||
|
||||
entryo = t["orders"][0]
|
||||
entry_ts = datetime.fromtimestamp(entryo["order_filled_timestamp"] // 1000, tz=timezone.utc)
|
||||
entry_ts = datetime.fromtimestamp(entryo["order_filled_timestamp"] // 1000, tz=UTC)
|
||||
if entry_ts > t["open_date"]:
|
||||
late_entry += 1
|
||||
|
||||
@@ -1121,7 +1121,7 @@ def test_backtest_one_detail_futures_funding_fees(
|
||||
return df
|
||||
|
||||
def adjust_trade_position(trade, current_time, **kwargs):
|
||||
if current_time > datetime(2021, 11, 18, 2, 0, 0, tzinfo=timezone.utc):
|
||||
if current_time > datetime(2021, 11, 18, 2, 0, 0, tzinfo=UTC):
|
||||
return None
|
||||
return default_conf_usdt["stake_amount"]
|
||||
|
||||
@@ -2564,7 +2564,7 @@ def test_backtest_start_multi_strat_caching(
|
||||
mocker.patch("freqtrade.optimize.backtesting.Backtesting.backtest", backtestmock)
|
||||
mocker.patch("freqtrade.optimize.backtesting.show_backtest_results", MagicMock())
|
||||
|
||||
now = min_backtest_date = datetime.now(tz=timezone.utc)
|
||||
now = min_backtest_date = datetime.now(tz=UTC)
|
||||
start_time = now - timedelta(**start_delta) + timedelta(hours=1)
|
||||
if cache == "none":
|
||||
min_backtest_date = now + timedelta(days=1)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -8,19 +8,19 @@ from tests.conftest import create_mock_trades_usdt
|
||||
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
def test_key_value_store(time_machine):
|
||||
start = datetime(2023, 1, 1, 4, tzinfo=timezone.utc)
|
||||
start = datetime(2023, 1, 1, 4, tzinfo=UTC)
|
||||
time_machine.move_to(start, tick=False)
|
||||
|
||||
KeyValueStore.store_value("test", "testStringValue")
|
||||
KeyValueStore.store_value("test_dt", datetime.now(timezone.utc))
|
||||
KeyValueStore.store_value("test_dt", datetime.now(UTC))
|
||||
KeyValueStore.store_value("test_float", 22.51)
|
||||
KeyValueStore.store_value("test_int", 15)
|
||||
|
||||
assert KeyValueStore.get_value("test") == "testStringValue"
|
||||
assert KeyValueStore.get_value("test") == "testStringValue"
|
||||
assert KeyValueStore.get_string_value("test") == "testStringValue"
|
||||
assert KeyValueStore.get_value("test_dt") == datetime.now(timezone.utc)
|
||||
assert KeyValueStore.get_datetime_value("test_dt") == datetime.now(timezone.utc)
|
||||
assert KeyValueStore.get_value("test_dt") == datetime.now(UTC)
|
||||
assert KeyValueStore.get_datetime_value("test_dt") == datetime.now(UTC)
|
||||
assert KeyValueStore.get_string_value("test_dt") is None
|
||||
assert KeyValueStore.get_float_value("test_dt") is None
|
||||
assert KeyValueStore.get_int_value("test_dt") is None
|
||||
@@ -31,11 +31,11 @@ def test_key_value_store(time_machine):
|
||||
assert KeyValueStore.get_datetime_value("test_int") is None
|
||||
|
||||
time_machine.move_to(start + timedelta(days=20, hours=5), tick=False)
|
||||
assert KeyValueStore.get_value("test_dt") != datetime.now(timezone.utc)
|
||||
assert KeyValueStore.get_value("test_dt") != datetime.now(UTC)
|
||||
assert KeyValueStore.get_value("test_dt") == start
|
||||
# Test update works
|
||||
KeyValueStore.store_value("test_dt", datetime.now(timezone.utc))
|
||||
assert KeyValueStore.get_value("test_dt") == datetime.now(timezone.utc)
|
||||
KeyValueStore.store_value("test_dt", datetime.now(UTC))
|
||||
assert KeyValueStore.get_value("test_dt") == datetime.now(UTC)
|
||||
|
||||
KeyValueStore.store_value("test_float", 23.51)
|
||||
assert KeyValueStore.get_value("test_float") == 23.51
|
||||
@@ -52,7 +52,7 @@ def test_key_value_store(time_machine):
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
def test_set_startup_time(fee, time_machine):
|
||||
create_mock_trades_usdt(fee)
|
||||
start = datetime.now(timezone.utc)
|
||||
start = datetime.now(UTC)
|
||||
time_machine.move_to(start, tick=False)
|
||||
set_startup_time()
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# pragma pylint: disable=missing-docstring, C0103
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from types import FunctionType
|
||||
|
||||
import pytest
|
||||
@@ -265,7 +265,7 @@ def test_interest(fee, exchange, is_short, lev, minutes, rate, interest, trading
|
||||
stake_amount=20.0,
|
||||
amount=30.0,
|
||||
open_rate=2.0,
|
||||
open_date=datetime.now(timezone.utc) - timedelta(minutes=minutes),
|
||||
open_date=datetime.now(UTC) - timedelta(minutes=minutes),
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
exchange=exchange,
|
||||
@@ -605,7 +605,7 @@ def test_calc_open_close_trade_price(
|
||||
stake_amount=60.0,
|
||||
open_rate=2.0,
|
||||
amount=30.0,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=10),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=10),
|
||||
interest_rate=0.0005,
|
||||
fee_open=fee.return_value,
|
||||
fee_close=fee.return_value,
|
||||
@@ -812,7 +812,7 @@ def test_calc_open_trade_value(
|
||||
stake_amount=60.0,
|
||||
amount=30.0,
|
||||
open_rate=2.0,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=10),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=10),
|
||||
fee_open=fee_rate,
|
||||
fee_close=fee_rate,
|
||||
exchange=exchange,
|
||||
@@ -863,7 +863,7 @@ def test_calc_close_trade_price(
|
||||
stake_amount=60.0,
|
||||
amount=30.0,
|
||||
open_rate=open_rate,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=10),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=10),
|
||||
fee_open=fee_rate,
|
||||
fee_close=fee_rate,
|
||||
exchange=exchange,
|
||||
@@ -1164,7 +1164,7 @@ def test_calc_profit(
|
||||
stake_amount=60.0,
|
||||
amount=30.0 * lev,
|
||||
open_rate=2.0,
|
||||
open_date=datetime.now(tz=timezone.utc) - timedelta(minutes=10),
|
||||
open_date=datetime.now(tz=UTC) - timedelta(minutes=10),
|
||||
interest_rate=0.0005,
|
||||
exchange=exchange,
|
||||
is_short=is_short,
|
||||
@@ -1882,7 +1882,7 @@ def test_get_trades_proxy(fee, use_db, is_short):
|
||||
assert len(trades) == 2
|
||||
assert not trades[0].is_open
|
||||
|
||||
opendate = datetime.now(tz=timezone.utc) - timedelta(minutes=15)
|
||||
opendate = datetime.now(tz=UTC) - timedelta(minutes=15)
|
||||
|
||||
assert len(Trade.get_trades_proxy(open_date=opendate)) == 3
|
||||
|
||||
@@ -1989,7 +1989,7 @@ def test_fully_canceled_entry_order_count(fee, is_short):
|
||||
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
def test_update_order_from_ccxt(caplog, time_machine):
|
||||
start = datetime(2023, 1, 1, 4, tzinfo=timezone.utc)
|
||||
start = datetime(2023, 1, 1, 4, tzinfo=UTC)
|
||||
time_machine.move_to(start, tick=False)
|
||||
|
||||
# Most basic order return (only has orderid)
|
||||
@@ -2172,7 +2172,7 @@ def test_trade_truncates_string_fields():
|
||||
stake_amount=20.0,
|
||||
amount=30.0,
|
||||
open_rate=2.0,
|
||||
open_date=datetime.now(timezone.utc) - timedelta(minutes=20),
|
||||
open_date=datetime.now(UTC) - timedelta(minutes=20),
|
||||
fee_open=0.001,
|
||||
fee_close=0.001,
|
||||
exchange="binance",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -182,7 +182,7 @@ def test_trade_fromjson():
|
||||
|
||||
assert trade.id == 25
|
||||
assert trade.pair == "ETH/USDT"
|
||||
assert trade.open_date_utc == datetime(2022, 10, 18, 9, 12, 42, tzinfo=timezone.utc)
|
||||
assert trade.open_date_utc == datetime(2022, 10, 18, 9, 12, 42, tzinfo=UTC)
|
||||
assert isinstance(trade.open_date, datetime)
|
||||
assert trade.exit_reason == "no longer good"
|
||||
assert trade.realized_profit == 2.76315361
|
||||
@@ -192,7 +192,7 @@ def test_trade_fromjson():
|
||||
|
||||
assert len(trade.orders) == 5
|
||||
last_o = trade.orders[-1]
|
||||
assert last_o.order_filled_utc == datetime(2022, 10, 18, 9, 45, 22, tzinfo=timezone.utc)
|
||||
assert last_o.order_filled_utc == datetime(2022, 10, 18, 9, 45, 22, tzinfo=UTC)
|
||||
assert isinstance(last_o.order_date, datetime)
|
||||
assert last_o.funding_fee == -0.055
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user