mirror of
https://github.com/freqtrade/freqtrade.git
synced 2026-01-20 14:00:38 +00:00
Merge branch 'develop' into dependabot/pip/develop/scikit-learn-1.3.2
This commit is contained in:
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
@@ -126,7 +126,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ macos-latest ]
|
||||
os: [ "macos-latest", "macos-13" ]
|
||||
python-version: ["3.9", "3.10", "3.11"]
|
||||
|
||||
steps:
|
||||
@@ -143,14 +143,13 @@ jobs:
|
||||
id: cache
|
||||
with:
|
||||
path: ~/dependencies/
|
||||
key: ${{ runner.os }}-dependencies
|
||||
key: ${{ matrix.os }}-dependencies
|
||||
|
||||
- name: pip cache (macOS)
|
||||
uses: actions/cache@v3
|
||||
if: runner.os == 'macOS'
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
key: test-${{ matrix.os }}-${{ matrix.python-version }}-pip
|
||||
key: ${{ matrix.os }}-${{ matrix.python-version }}-pip
|
||||
|
||||
- name: TA binary *nix
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
@@ -158,7 +157,6 @@ jobs:
|
||||
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
|
||||
|
||||
- name: Installation - macOS
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
# brew update
|
||||
# TODO: Should be the brew upgrade
|
||||
@@ -175,7 +173,7 @@ jobs:
|
||||
rm /usr/local/bin/python3-config || true
|
||||
rm /usr/local/bin/python3.11-config || true
|
||||
|
||||
brew install hdf5 c-blosc
|
||||
brew install hdf5 c-blosc libomp
|
||||
python -m pip install --upgrade pip wheel
|
||||
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
||||
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
||||
@@ -461,7 +459,7 @@ jobs:
|
||||
python setup.py sdist bdist_wheel
|
||||
|
||||
- name: Publish to PyPI (Test)
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.10
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.11
|
||||
if: (github.event_name == 'release')
|
||||
with:
|
||||
user: __token__
|
||||
@@ -469,7 +467,7 @@ jobs:
|
||||
repository_url: https://test.pypi.org/legacy/
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.10
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.11
|
||||
if: (github.event_name == 'release')
|
||||
with:
|
||||
user: __token__
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11.6-slim-bullseye as base
|
||||
FROM python:3.11.6-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
@@ -28,6 +28,7 @@ hesitate to read the source code and understand the mechanism of this bot.
|
||||
Please read the [exchange specific notes](docs/exchanges.md) to learn about eventual, special configurations needed for each exchange.
|
||||
|
||||
- [X] [Binance](https://www.binance.com/)
|
||||
- [X] [Bitmart](https://bitmart.com/)
|
||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||
- [X] [Huobi](http://huobi.com/)
|
||||
- [X] [Kraken](https://kraken.com/)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.9.16-slim-bullseye as base
|
||||
FROM python:3.11.6-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
@@ -11,7 +11,7 @@ ENV FT_APP_ENV="docker"
|
||||
# Prepare environment
|
||||
RUN mkdir /freqtrade \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install sudo libatlas3-base libopenblas-base curl sqlite3 libhdf5-dev libutf8proc-dev libsnappy-dev \
|
||||
&& apt-get -y install sudo libatlas3-base libopenblas-dev curl sqlite3 libhdf5-dev libutf8proc-dev libsnappy-dev \
|
||||
&& apt-get clean \
|
||||
&& useradd -u 1000 -G sudo -U -m ftuser \
|
||||
&& chown ftuser:ftuser /freqtrade \
|
||||
@@ -24,7 +24,7 @@ WORKDIR /freqtrade
|
||||
# Install dependencies
|
||||
FROM base as python-deps
|
||||
RUN apt-get update \
|
||||
&& apt-get -y install build-essential libssl-dev libffi-dev libopenblas-dev libgfortran5 pkg-config cmake gcc \
|
||||
&& apt-get -y install build-essential libssl-dev libffi-dev libgfortran5 pkg-config cmake gcc \
|
||||
&& apt-get clean \
|
||||
&& echo "[global]\nextra-index-url=https://www.piwheels.org/simple" > /etc/pip.conf
|
||||
|
||||
|
||||
@@ -618,13 +618,13 @@ To compare multiple strategies, a list of Strategies can be provided to backtest
|
||||
This is limited to 1 timeframe value per run. However, data is only loaded once from disk so if you have multiple
|
||||
strategies you'd like to compare, this will give a nice runtime boost.
|
||||
|
||||
All listed Strategies need to be in the same directory.
|
||||
All listed Strategies need to be in the same directory, unless also `--recursive-strategy-search` is specified, where sub-directories within the strategy directory are also considered.
|
||||
|
||||
``` bash
|
||||
freqtrade backtesting --timerange 20180401-20180410 --timeframe 5m --strategy-list Strategy001 Strategy002 --export trades
|
||||
```
|
||||
|
||||
This will save the results to `user_data/backtest_results/backtest-result-<strategy>.json`, injecting the strategy-name into the target filename.
|
||||
This will save the results to `user_data/backtest_results/backtest-result-<datetime>.json`, including results for both `Strategy001` and `Strategy002`.
|
||||
There will be an additional table comparing win/losses of the different strategies (identical to the "Total" row in the first table).
|
||||
Detailed output for all strategies one after the other will be available, so make sure to scroll up to see the details per strategy.
|
||||
|
||||
|
||||
@@ -419,6 +419,9 @@ This part of the documentation is aimed at maintainers, and shows how to create
|
||||
|
||||
### Create release branch
|
||||
|
||||
!!! Note
|
||||
Make sure that the `stable` branch is up-to-date!
|
||||
|
||||
First, pick a commit that's about one week old (to not include latest additions to releases).
|
||||
|
||||
``` bash
|
||||
@@ -431,14 +434,11 @@ Determine if crucial bugfixes have been made between this commit and the current
|
||||
* Merge the release branch (stable) into this branch.
|
||||
* Edit `freqtrade/__init__.py` and add the version matching the current date (for example `2019.7` for July 2019). Minor versions can be `2019.7.1` should we need to do a second release that month. Version numbers must follow allowed versions from PEP0440 to avoid failures pushing to pypi.
|
||||
* Commit this part.
|
||||
* push that branch to the remote and create a PR against the stable branch.
|
||||
* Push that branch to the remote and create a PR against the **stable branch**.
|
||||
* Update develop version to next version following the pattern `2019.8-dev`.
|
||||
|
||||
### Create changelog from git commits
|
||||
|
||||
!!! Note
|
||||
Make sure that the `stable` branch is up-to-date!
|
||||
|
||||
``` bash
|
||||
# Needs to be done before merging / pulling that branch.
|
||||
git log --oneline --no-decorate --no-merges stable..new_release
|
||||
|
||||
@@ -302,6 +302,24 @@ We do strongly recommend to limit all API keys to the IP you're going to use it
|
||||
Bybit (futures only) supports `stoploss_on_exchange` and uses `stop-loss-limit` orders. It provides great advantages, so we recommend to benefit from it by enabling stoploss on exchange.
|
||||
On futures, Bybit supports both `stop-limit` as well as `stop-market` orders. You can use either `"limit"` or `"market"` in the `order_types.stoploss` configuration setting to decide which type to use.
|
||||
|
||||
## Bitmart
|
||||
|
||||
Bitmart requires the API key Memo (the name you give the API key) to go along with the exchange key and secret.
|
||||
It's therefore required to pass the UID as well.
|
||||
|
||||
```json
|
||||
"exchange": {
|
||||
"name": "bitmart",
|
||||
"uid": "your_bitmart_api_key_memo",
|
||||
"secret": "your_exchange_secret",
|
||||
"password": "your_exchange_api_key_password",
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
!!! Warning "Necessary Verification"
|
||||
Bitmart requires Verification Lvl2 to successfully trade on the spot market through the API - even though trading via UI works just fine with just Lvl1 verification.
|
||||
|
||||
## All exchanges
|
||||
|
||||
Should you experience constant errors with Nonce (like `InvalidNonce`), it is best to regenerate the API keys. Resetting Nonce is difficult and it's usually easier to regenerate the API keys.
|
||||
|
||||
@@ -128,12 +128,6 @@ This warning can point to one of the below problems:
|
||||
* Barely traded pair -> Check the pair on the exchange webpage, look at the timeframe your strategy uses. If the pair does not have any volume in some candles (usually visualized with a "volume 0" bar, and a "_" as candle), this pair did not have any trades in this timeframe. These pairs should ideally be avoided, as they can cause problems with order-filling.
|
||||
* API problem -> API returns wrong data (this only here for completeness, and should not happen with supported exchanges).
|
||||
|
||||
### I'm getting the "RESTRICTED_MARKET" message in the log
|
||||
|
||||
Currently known to happen for US Bittrex users.
|
||||
|
||||
Read [the Bittrex section about restricted markets](exchanges.md#restricted-markets) for more information.
|
||||
|
||||
### I'm getting the "Exchange XXX does not support market orders." message and cannot run my strategy
|
||||
|
||||
As the message says, your exchange does not support market orders and you have one of the [order types](configuration.md/#understand-order_types) set to "market". Your strategy was probably written with other exchanges in mind and sets "market" orders for "stoploss" orders, which is correct and preferable for most of the exchanges supporting market orders (but not for Bittrex and Gate.io).
|
||||
|
||||
@@ -40,6 +40,7 @@ Freqtrade is a free and open source crypto trading bot written in Python. It is
|
||||
Please read the [exchange specific notes](exchanges.md) to learn about eventual, special configurations needed for each exchange.
|
||||
|
||||
- [X] [Binance](https://www.binance.com/)
|
||||
- [X] [Bitmart](https://bitmart.com/)
|
||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||
- [X] [Huobi](http://huobi.com/)
|
||||
- [X] [Kraken](https://kraken.com/)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
markdown==3.5.1
|
||||
mkdocs==1.5.3
|
||||
mkdocs-material==9.4.10
|
||||
mkdocs-material==9.4.14
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.4
|
||||
pymdown-extensions==10.5
|
||||
jinja2==3.1.2
|
||||
|
||||
@@ -134,9 +134,9 @@ python3 scripts/rest_client.py --config rest_config.json <command> [optional par
|
||||
| `reload_config` | Reloads the configuration file.
|
||||
| `trades` | List last trades. Limited to 500 trades per call.
|
||||
| `trade/<tradeid>` | Get specific trade.
|
||||
| `trade/<tradeid>` | DELETE - Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
|
||||
| `trade/<tradeid>/open-order` | DELETE - Cancel open order for this trade.
|
||||
| `trade/<tradeid>/reload` | GET - Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.
|
||||
| `trades/<tradeid>` | DELETE - Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
|
||||
| `trades/<tradeid>/open-order` | DELETE - Cancel open order for this trade.
|
||||
| `trades/<tradeid>/reload` | GET - Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.
|
||||
| `show_config` | Shows part of the current configuration with relevant settings to operation.
|
||||
| `logs` | Shows last log messages.
|
||||
| `status` | Lists all open trades.
|
||||
|
||||
@@ -175,6 +175,7 @@ official commands. You can ask at any moment for help with `/help`.
|
||||
| `/status` | Lists all open trades
|
||||
| `/status <trade_id>` | Lists one or more specific trade. Separate multiple <trade_id> with a blank space.
|
||||
| `/status table` | List all open trades in a table format. Pending buy orders are marked with an asterisk (*) Pending sell orders are marked with a double asterisk (**)
|
||||
| `/order <trade_id>` | Lists orders of one or more specific trade. Separate multiple <trade_id> with a blank space.
|
||||
| `/trades [limit]` | List all recently closed trades in a table format.
|
||||
| `/count` | Displays number of trades used and available
|
||||
| `/locks` | Show currently locked pairs.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
""" Freqtrade bot """
|
||||
__version__ = '2023.11-dev'
|
||||
__version__ = '2023.12-dev'
|
||||
|
||||
if 'dev' in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
@@ -4,6 +4,7 @@ from freqtrade.exchange.common import remove_exchange_credentials, MAP_EXCHANGE_
|
||||
from freqtrade.exchange.exchange import Exchange
|
||||
# isort: on
|
||||
from freqtrade.exchange.binance import Binance
|
||||
from freqtrade.exchange.bitmart import Bitmart
|
||||
from freqtrade.exchange.bitpanda import Bitpanda
|
||||
from freqtrade.exchange.bittrex import Bittrex
|
||||
from freqtrade.exchange.bitvavo import Bitvavo
|
||||
|
||||
20
freqtrade/exchange/bitmart.py
Normal file
20
freqtrade/exchange/bitmart.py
Normal file
@@ -0,0 +1,20 @@
|
||||
""" Bitmart exchange subclass """
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from freqtrade.exchange import Exchange
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Bitmart(Exchange):
|
||||
"""
|
||||
Bitmart exchange class. Contains adjustments needed for Freqtrade to work
|
||||
with this exchange.
|
||||
"""
|
||||
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": False, # Bitmart API does not support stoploss orders
|
||||
"ohlcv_candle_limit": 200,
|
||||
}
|
||||
@@ -29,6 +29,7 @@ class Bybit(Exchange):
|
||||
_ft_has: Dict = {
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"ohlcv_has_history": True,
|
||||
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
|
||||
}
|
||||
_ft_has_futures: Dict = {
|
||||
"ohlcv_has_history": True,
|
||||
|
||||
@@ -52,6 +52,7 @@ MAP_EXCHANGE_CHILDCLASS = {
|
||||
|
||||
SUPPORTED_EXCHANGES = [
|
||||
'binance',
|
||||
'bitmart',
|
||||
'gate',
|
||||
'huobi',
|
||||
'kraken',
|
||||
|
||||
@@ -486,11 +486,14 @@ class Exchange:
|
||||
except ccxt.BaseError:
|
||||
logger.exception('Unable to initialize markets.')
|
||||
|
||||
def reload_markets(self) -> None:
|
||||
def reload_markets(self, force: bool = False) -> None:
|
||||
"""Reload markets both sync and async if refresh interval has passed """
|
||||
# Check whether markets have to be reloaded
|
||||
if (self._last_markets_refresh > 0) and (
|
||||
self._last_markets_refresh + self.markets_refresh_interval > dt_ts()):
|
||||
if (
|
||||
not force
|
||||
and self._last_markets_refresh > 0
|
||||
and (self._last_markets_refresh + self.markets_refresh_interval > dt_ts())
|
||||
):
|
||||
return None
|
||||
logger.debug("Performing scheduled market reload..")
|
||||
try:
|
||||
@@ -1228,16 +1231,16 @@ class Exchange:
|
||||
return order
|
||||
except ccxt.InsufficientFunds as e:
|
||||
raise InsufficientFundsError(
|
||||
f'Insufficient funds to create {ordertype} sell order on market {pair}. '
|
||||
f'Tried to sell amount {amount} at rate {limit_rate}. '
|
||||
f'Message: {e}') from e
|
||||
except ccxt.InvalidOrder as e:
|
||||
f'Insufficient funds to create {ordertype} {side} order on market {pair}. '
|
||||
f'Tried to {side} amount {amount} at rate {limit_rate} with '
|
||||
f'stop-price {stop_price_norm}. Message: {e}') from e
|
||||
except (ccxt.InvalidOrder, ccxt.BadRequest) as e:
|
||||
# Errors:
|
||||
# `Order would trigger immediately.`
|
||||
raise InvalidOrderException(
|
||||
f'Could not create {ordertype} sell order on market {pair}. '
|
||||
f'Tried to sell amount {amount} at rate {limit_rate}. '
|
||||
f'Message: {e}') from e
|
||||
f'Could not create {ordertype} {side} order on market {pair}. '
|
||||
f'Tried to {side} amount {amount} at rate {limit_rate} with '
|
||||
f'stop-price {stop_price_norm}. Message: {e}') from e
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
@@ -1496,8 +1499,9 @@ class Exchange:
|
||||
@retrier
|
||||
def fetch_bids_asks(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
|
||||
"""
|
||||
:param symbols: List of symbols to fetch
|
||||
:param cached: Allow cached result
|
||||
:return: fetch_tickers result
|
||||
:return: fetch_bids_asks result
|
||||
"""
|
||||
if not self.exchange_has('fetchBidsAsks'):
|
||||
return {}
|
||||
@@ -1546,6 +1550,12 @@ class Exchange:
|
||||
raise OperationalException(
|
||||
f'Exchange {self._api.name} does not support fetching tickers in batch. '
|
||||
f'Message: {e}') from e
|
||||
except ccxt.BadSymbol as e:
|
||||
logger.warning(f"Could not load tickers due to {e.__class__.__name__}. Message: {e} ."
|
||||
"Reloading markets.")
|
||||
self.reload_markets(True)
|
||||
# Re-raise exception to repeat the call.
|
||||
raise TemporaryError from e
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
|
||||
@@ -12,7 +12,6 @@ import numpy as np
|
||||
import pandas as pd
|
||||
import psutil
|
||||
import rapidjson
|
||||
from joblib import dump, load
|
||||
from joblib.externals import cloudpickle
|
||||
from numpy.typing import NDArray
|
||||
from pandas import DataFrame
|
||||
@@ -285,6 +284,10 @@ class FreqaiDataDrawer:
|
||||
new_pred["date_pred"] = dataframe["date"]
|
||||
hist_preds = self.historic_predictions[pair].copy()
|
||||
|
||||
# ensure both dataframes have the same date format so they can be merged
|
||||
new_pred["date_pred"] = pd.to_datetime(new_pred["date_pred"])
|
||||
hist_preds["date_pred"] = pd.to_datetime(hist_preds["date_pred"])
|
||||
|
||||
# find the closest common date between new_pred and historic predictions
|
||||
# and cut off the new_pred dataframe at that date
|
||||
common_dates = pd.merge(new_pred, hist_preds, on="date_pred", how="inner")
|
||||
@@ -295,7 +298,9 @@ class FreqaiDataDrawer:
|
||||
"predictions. You likely left your FreqAI instance offline "
|
||||
f"for more than {len(dataframe.index)} candles.")
|
||||
|
||||
df_concat = pd.concat([hist_preds, new_pred], ignore_index=True, keys=hist_preds.keys())
|
||||
# reindex new_pred columns to match the historic predictions dataframe
|
||||
new_pred_reindexed = new_pred.reindex(columns=hist_preds.columns)
|
||||
df_concat = pd.concat([hist_preds, new_pred_reindexed], ignore_index=True)
|
||||
|
||||
# any missing values will get zeroed out so users can see the exact
|
||||
# downtime in FreqUI
|
||||
@@ -318,9 +323,9 @@ class FreqaiDataDrawer:
|
||||
index = self.historic_predictions[pair].index[-1:]
|
||||
columns = self.historic_predictions[pair].columns
|
||||
|
||||
nan_df = pd.DataFrame(np.nan, index=index, columns=columns)
|
||||
zeros_df = pd.DataFrame(np.zeros, index=index, columns=columns)
|
||||
self.historic_predictions[pair] = pd.concat(
|
||||
[self.historic_predictions[pair], nan_df], ignore_index=True, axis=0)
|
||||
[self.historic_predictions[pair], zeros_df], ignore_index=True, axis=0)
|
||||
df = self.historic_predictions[pair]
|
||||
|
||||
# model outputs and associated statistics
|
||||
@@ -471,7 +476,8 @@ class FreqaiDataDrawer:
|
||||
|
||||
# Save the trained model
|
||||
if self.model_type == 'joblib':
|
||||
dump(model, save_path / f"{dk.model_filename}_model.joblib")
|
||||
with (save_path / f"{dk.model_filename}_model.joblib").open("wb") as fp:
|
||||
cloudpickle.dump(model, fp)
|
||||
elif self.model_type == 'keras':
|
||||
model.save(save_path / f"{dk.model_filename}_model.h5")
|
||||
elif self.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
|
||||
@@ -558,7 +564,8 @@ class FreqaiDataDrawer:
|
||||
if dk.live and coin in self.model_dictionary:
|
||||
model = self.model_dictionary[coin]
|
||||
elif self.model_type == 'joblib':
|
||||
model = load(dk.data_path / f"{dk.model_filename}_model.joblib")
|
||||
with (dk.data_path / f"{dk.model_filename}_model.joblib").open("rb") as fp:
|
||||
model = cloudpickle.load(fp)
|
||||
elif 'stable_baselines' in self.model_type or 'sb3_contrib' == self.model_type:
|
||||
mod = importlib.import_module(
|
||||
self.model_type, self.freqai_info['rl_config']['model_type'])
|
||||
|
||||
@@ -45,7 +45,7 @@ class XGBoostRFRegressor(BaseRegressionModel):
|
||||
|
||||
model = XGBRFRegressor(**self.model_training_parameters)
|
||||
|
||||
model.set_params(callbacks=[TBCallback(dk.data_path)], activate=self.activate_tensorboard)
|
||||
model.set_params(callbacks=[TBCallback(dk.data_path)])
|
||||
model.fit(X=X, y=y, sample_weight=sample_weight, eval_set=eval_set,
|
||||
sample_weight_eval_set=eval_weights, xgb_model=xgb_model)
|
||||
# set the callbacks to empty so that we can serialize to disk later
|
||||
|
||||
@@ -45,7 +45,7 @@ class XGBoostRegressor(BaseRegressionModel):
|
||||
|
||||
model = XGBRegressor(**self.model_training_parameters)
|
||||
|
||||
model.set_params(callbacks=[TBCallback(dk.data_path)], activate=self.activate_tensorboard)
|
||||
model.set_params(callbacks=[TBCallback(dk.data_path)])
|
||||
model.fit(X=X, y=y, sample_weight=sample_weight, eval_set=eval_set,
|
||||
sample_weight_eval_set=eval_weights, xgb_model=xgb_model)
|
||||
# set the callbacks to empty so that we can serialize to disk later
|
||||
|
||||
@@ -3,7 +3,6 @@ from typing import Any, Dict, Type, Union
|
||||
|
||||
from stable_baselines3.common.callbacks import BaseCallback
|
||||
from stable_baselines3.common.logger import HParam
|
||||
from stable_baselines3.common.vec_env import VecEnv
|
||||
|
||||
from freqtrade.freqai.RL.BaseEnvironment import BaseActions
|
||||
|
||||
@@ -13,13 +12,9 @@ class TensorboardCallback(BaseCallback):
|
||||
Custom callback for plotting additional values in tensorboard and
|
||||
episodic summary reports.
|
||||
"""
|
||||
# Override training_env type to fix type errors
|
||||
training_env: Union[VecEnv, None] = None
|
||||
|
||||
def __init__(self, verbose=1, actions: Type[Enum] = BaseActions):
|
||||
super().__init__(verbose)
|
||||
self.model: Any = None
|
||||
self.logger: Any = None
|
||||
self.actions: Type[Enum] = actions
|
||||
|
||||
def _on_training_start(self) -> None:
|
||||
@@ -47,8 +42,6 @@ class TensorboardCallback(BaseCallback):
|
||||
def _on_step(self) -> bool:
|
||||
|
||||
local_info = self.locals["infos"][0]
|
||||
if self.training_env is None:
|
||||
return True
|
||||
|
||||
if hasattr(self.training_env, 'envs'):
|
||||
tensorboard_metrics = self.training_env.envs[0].unwrapped.tensorboard_metrics
|
||||
|
||||
@@ -147,7 +147,9 @@ class Backtesting:
|
||||
|
||||
if self.config.get('freqai', {}).get('enabled', False):
|
||||
# For FreqAI, increase the required_startup to includes the training data
|
||||
self.required_startup = self.dataprovider.get_required_startup(self.timeframe)
|
||||
self.freqai_startup_candles = self.dataprovider.get_required_startup(
|
||||
self.timeframe
|
||||
)
|
||||
|
||||
# Add maximum startup candle count to configuration for informative pairs support
|
||||
self.config['startup_candle_count'] = self.required_startup
|
||||
@@ -234,12 +236,17 @@ class Backtesting:
|
||||
"""
|
||||
self.progress.init_step(BacktestState.DATALOAD, 1)
|
||||
|
||||
if self.config.get('freqai', {}).get('enabled', False):
|
||||
startup_candle_count = self.freqai_startup_candles
|
||||
else:
|
||||
startup_candle_count = self.config['startup_candle_count']
|
||||
|
||||
data = history.load_data(
|
||||
datadir=self.config['datadir'],
|
||||
pairs=self.pairlists.whitelist,
|
||||
timeframe=self.timeframe,
|
||||
timerange=self.timerange,
|
||||
startup_candles=self.config['startup_candle_count'],
|
||||
startup_candles=startup_candle_count,
|
||||
fail_without_data=True,
|
||||
data_format=self.config['dataformat_ohlcv'],
|
||||
candle_type=self.config.get('candle_type_def', CandleType.SPOT)
|
||||
|
||||
@@ -1066,7 +1066,10 @@ class LocalTrade:
|
||||
exit_amount = o.safe_amount_after_fee
|
||||
prof = self.calculate_profit(exit_rate, exit_amount, float(avg_price))
|
||||
close_profit_abs += prof.profit_abs
|
||||
close_profit = prof.profit_ratio
|
||||
if total_stake > 0:
|
||||
# This needs to be calculated based on the last occuring exit to be aligned
|
||||
# with realized_profit.
|
||||
close_profit = (close_profit_abs / total_stake) * self.leverage
|
||||
else:
|
||||
total_stake = total_stake + self._calc_open_trade_value(tmp_amount, price)
|
||||
max_stake_amount += (tmp_amount * price)
|
||||
|
||||
@@ -21,6 +21,7 @@ from freqtrade.misc import pair_to_filename
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
from freqtrade.strategy import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -636,7 +637,7 @@ def load_and_plot_trades(config: Config):
|
||||
exchange = ExchangeResolver.load_exchange(config)
|
||||
IStrategy.dp = DataProvider(config, exchange)
|
||||
strategy.ft_bot_start()
|
||||
strategy.bot_loop_start(datetime.now(timezone.utc))
|
||||
strategy_safe_wrapper(strategy.bot_loop_start)(current_time=datetime.now(timezone.utc))
|
||||
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
|
||||
timerange = plot_elements['timerange']
|
||||
trades = plot_elements['trades']
|
||||
|
||||
@@ -15,6 +15,7 @@ class Discord(Webhook):
|
||||
self.rpc = rpc
|
||||
self.strategy = config.get('strategy', '')
|
||||
self.timeframe = config.get('timeframe', '')
|
||||
self.bot_name = config.get('bot_name', '')
|
||||
|
||||
self._url = config['discord']['webhook_url']
|
||||
self._format = 'json'
|
||||
@@ -36,6 +37,7 @@ class Discord(Webhook):
|
||||
|
||||
msg['strategy'] = self.strategy
|
||||
msg['timeframe'] = self.timeframe
|
||||
msg['bot_name'] = self.bot_name
|
||||
color = 0x0000FF
|
||||
if msg['type'] in (RPCMessageType.EXIT, RPCMessageType.EXIT_FILL):
|
||||
profit_ratio = msg.get('profit_ratio')
|
||||
|
||||
@@ -223,7 +223,8 @@ class Telegram(RPCHandler):
|
||||
CommandHandler('health', self._health),
|
||||
CommandHandler('help', self._help),
|
||||
CommandHandler('version', self._version),
|
||||
CommandHandler('marketdir', self._changemarketdir)
|
||||
CommandHandler('marketdir', self._changemarketdir),
|
||||
CommandHandler('order', self._order),
|
||||
]
|
||||
callbacks = [
|
||||
CallbackQueryHandler(self._status_table, pattern='update_status_table'),
|
||||
@@ -555,6 +556,47 @@ class Telegram(RPCHandler):
|
||||
|
||||
return lines_detail
|
||||
|
||||
@authorized_only
|
||||
async def _order(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
Handler for /order.
|
||||
Returns the orders of the trade
|
||||
:param bot: telegram bot
|
||||
:param update: message update
|
||||
:return: None
|
||||
"""
|
||||
|
||||
trade_ids = []
|
||||
if context.args and len(context.args) > 0:
|
||||
trade_ids = [int(i) for i in context.args if i.isnumeric()]
|
||||
|
||||
results = self._rpc._rpc_trade_status(trade_ids=trade_ids)
|
||||
for r in results:
|
||||
lines = [
|
||||
"*Order List for Trade #*`{trade_id}`"
|
||||
]
|
||||
|
||||
lines_detail = self._prepare_order_details(
|
||||
r['orders'], r['quote_currency'], r['is_open'])
|
||||
lines.extend(lines_detail if lines_detail else "")
|
||||
await self.__send_order_msg(lines, r)
|
||||
|
||||
async def __send_order_msg(self, lines: List[str], r: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Send status message.
|
||||
"""
|
||||
msg = ''
|
||||
|
||||
for line in lines:
|
||||
if line:
|
||||
if (len(msg) + len(line) + 1) < MAX_MESSAGE_LENGTH:
|
||||
msg += line + '\n'
|
||||
else:
|
||||
await self._send_msg(msg.format(**r))
|
||||
msg = "*Order List for Trade #*`{trade_id}` - continued\n" + line + '\n'
|
||||
|
||||
await self._send_msg(msg.format(**r))
|
||||
|
||||
@authorized_only
|
||||
async def _status(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
@@ -652,9 +694,6 @@ class Telegram(RPCHandler):
|
||||
"*Open Order:* `{open_orders}`"
|
||||
+ ("- `{exit_order_status}`" if r['exit_order_status'] else ""))
|
||||
|
||||
lines_detail = self._prepare_order_details(
|
||||
r['orders'], r['quote_currency'], r['is_open'])
|
||||
lines.extend(lines_detail if lines_detail else "")
|
||||
await self.__send_status_msg(lines, r)
|
||||
|
||||
async def __send_status_msg(self, lines: List[str], r: Dict[str, Any]) -> None:
|
||||
|
||||
@@ -36,7 +36,7 @@ def merge_informative_pair(dataframe: pd.DataFrame, informative: pd.DataFrame,
|
||||
:return: Merged dataframe
|
||||
:raise: ValueError if the secondary timeframe is shorter than the dataframe timeframe
|
||||
"""
|
||||
|
||||
informative = informative.copy()
|
||||
minutes_inf = timeframe_to_minutes(timeframe_inf)
|
||||
minutes = timeframe_to_minutes(timeframe)
|
||||
if minutes == minutes_inf:
|
||||
@@ -46,10 +46,16 @@ def merge_informative_pair(dataframe: pd.DataFrame, informative: pd.DataFrame,
|
||||
# Subtract "small" timeframe so merging is not delayed by 1 small candle
|
||||
# Detailed explanation in https://github.com/freqtrade/freqtrade/issues/4073
|
||||
if not informative.empty:
|
||||
informative['date_merge'] = (
|
||||
informative[date_column] + pd.to_timedelta(minutes_inf, 'm') -
|
||||
pd.to_timedelta(minutes, 'm')
|
||||
)
|
||||
if timeframe_inf == '1M':
|
||||
informative['date_merge'] = (
|
||||
(informative[date_column] + pd.offsets.MonthBegin(1))
|
||||
- pd.to_timedelta(minutes, 'm')
|
||||
)
|
||||
else:
|
||||
informative['date_merge'] = (
|
||||
informative[date_column] + pd.to_timedelta(minutes_inf, 'm') -
|
||||
pd.to_timedelta(minutes, 'm')
|
||||
)
|
||||
else:
|
||||
informative['date_merge'] = informative[date_column]
|
||||
else:
|
||||
@@ -80,9 +86,6 @@ def merge_informative_pair(dataframe: pd.DataFrame, informative: pd.DataFrame,
|
||||
right_on=date_merge, how='left')
|
||||
dataframe = dataframe.drop(date_merge, axis=1)
|
||||
|
||||
# if ffill:
|
||||
# dataframe = dataframe.ffill()
|
||||
|
||||
return dataframe
|
||||
|
||||
|
||||
|
||||
@@ -290,9 +290,6 @@ class FreqaiExampleStrategy(IStrategy):
|
||||
|
||||
return df
|
||||
|
||||
def get_ticker_indicator(self):
|
||||
return int(self.config["timeframe"][:-1])
|
||||
|
||||
def confirm_trade_entry(
|
||||
self,
|
||||
pair: str,
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
coveralls==3.3.1
|
||||
ruff==0.1.6
|
||||
mypy==1.7.0
|
||||
mypy==1.7.1
|
||||
pre-commit==3.5.0
|
||||
pytest==7.4.3
|
||||
pytest-asyncio==0.21.1
|
||||
@@ -20,7 +20,7 @@ isort==5.12.0
|
||||
time-machine==2.13.0
|
||||
|
||||
# Convert jupyter notebooks to markdown documents
|
||||
nbconvert==7.11.0
|
||||
nbconvert==7.12.0
|
||||
|
||||
# mypy types
|
||||
types-cachetools==5.3.0.7
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
torch==2.0.1
|
||||
#until these branches will be released we can use this
|
||||
gymnasium==0.29.1
|
||||
stable_baselines3==2.1.0
|
||||
stable_baselines3==2.2.1
|
||||
sb3_contrib>=2.0.0a9
|
||||
# Progress bar for stable-baselines3 and sb3-contrib
|
||||
tqdm==4.66.1
|
||||
|
||||
@@ -2,11 +2,11 @@ numpy==1.26.2
|
||||
pandas==2.1.3
|
||||
pandas-ta==0.3.14b
|
||||
|
||||
ccxt==4.1.57
|
||||
cryptography==41.0.5
|
||||
aiohttp==3.9.0
|
||||
ccxt==4.1.76
|
||||
cryptography==41.0.7
|
||||
aiohttp==3.9.1
|
||||
SQLAlchemy==2.0.23
|
||||
python-telegram-bot==20.6
|
||||
python-telegram-bot==20.7
|
||||
# can't be hard-pinned due to telegram-bot pinning httpx with ~
|
||||
httpx>=0.24.1
|
||||
arrow==1.3.0
|
||||
@@ -37,7 +37,7 @@ sdnotify==0.3.2
|
||||
|
||||
# API Server
|
||||
fastapi==0.104.1
|
||||
pydantic==2.5.1
|
||||
pydantic==2.5.2
|
||||
uvicorn==0.24.0.post1
|
||||
pyjwt==2.8.0
|
||||
aiofiles==23.2.1
|
||||
|
||||
@@ -87,11 +87,15 @@ def get_args(args):
|
||||
|
||||
def generate_test_data(timeframe: str, size: int, start: str = '2020-07-05'):
|
||||
np.random.seed(42)
|
||||
tf_mins = timeframe_to_minutes(timeframe)
|
||||
|
||||
base = np.random.normal(20, 2, size=size)
|
||||
|
||||
date = pd.date_range(start, periods=size, freq=f'{tf_mins}min', tz='UTC')
|
||||
if timeframe == '1M':
|
||||
date = pd.date_range(start, periods=size, freq='1MS', tz='UTC')
|
||||
elif timeframe == '1w':
|
||||
date = pd.date_range(start, periods=size, freq='1W-MON', tz='UTC')
|
||||
else:
|
||||
tf_mins = timeframe_to_minutes(timeframe)
|
||||
date = pd.date_range(start, periods=size, freq=f'{tf_mins}min', tz='UTC')
|
||||
df = pd.DataFrame({
|
||||
'date': date,
|
||||
'open': base,
|
||||
|
||||
@@ -24,7 +24,7 @@ from tests.conftest import (EXMS, generate_test_data_raw, get_mock_coro, get_pat
|
||||
|
||||
|
||||
# Make sure to always keep one exchange here which is NOT subclassed!!
|
||||
EXCHANGES = ['bittrex', 'binance', 'kraken', 'gate', 'kucoin', 'bybit', 'okx']
|
||||
EXCHANGES = ['binance', 'kraken', 'gate', 'kucoin', 'bybit', 'okx']
|
||||
|
||||
get_entry_rate_data = [
|
||||
('other', 20, 19, 10, 0.0, 20), # Full ask side
|
||||
@@ -1851,7 +1851,7 @@ def test_fetch_bids_asks(default_conf, mocker):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
def test_get_tickers(default_conf, mocker, exchange_name):
|
||||
def test_get_tickers(default_conf, mocker, exchange_name, caplog):
|
||||
api_mock = MagicMock()
|
||||
tick = {'ETH/BTC': {
|
||||
'symbol': 'ETH/BTC',
|
||||
@@ -1900,6 +1900,14 @@ def test_get_tickers(default_conf, mocker, exchange_name):
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
exchange.get_tickers()
|
||||
|
||||
caplog.clear()
|
||||
api_mock.fetch_tickers = MagicMock(side_effect=[ccxt.BadSymbol("SomeSymbol"), []])
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
x = exchange.get_tickers()
|
||||
assert x == []
|
||||
assert log_has_re(r'Could not load tickers due to BadSymbol\..*SomeSymbol', caplog)
|
||||
caplog.clear()
|
||||
|
||||
api_mock.fetch_tickers = MagicMock(return_value={})
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
exchange.get_tickers()
|
||||
@@ -1969,6 +1977,34 @@ def test_fetch_ticker(default_conf, mocker, exchange_name):
|
||||
exchange.fetch_ticker(pair='XRP/ETH')
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
def test___now_is_time_to_refresh(default_conf, mocker, exchange_name, time_machine):
|
||||
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
||||
pair = 'BTC/USDT'
|
||||
candle_type = CandleType.SPOT
|
||||
start_dt = datetime(2023, 12, 1, 0, 10, 0, tzinfo=timezone.utc)
|
||||
time_machine.move_to(start_dt, tick=False)
|
||||
assert (pair, '5m', candle_type) not in exchange._pairs_last_refresh_time
|
||||
|
||||
# not refreshed yet
|
||||
assert exchange._now_is_time_to_refresh(pair, '5m', candle_type) is True
|
||||
|
||||
last_closed_candle = (start_dt - timedelta(minutes=5)).timestamp()
|
||||
exchange._pairs_last_refresh_time[(pair, '5m', candle_type)] = last_closed_candle
|
||||
|
||||
# next candle not closed yet
|
||||
time_machine.move_to(start_dt + timedelta(minutes=4, seconds=59), tick=False)
|
||||
assert exchange._now_is_time_to_refresh(pair, '5m', candle_type) is False
|
||||
|
||||
# next candle closed
|
||||
time_machine.move_to(start_dt + timedelta(minutes=5, seconds=0), tick=False)
|
||||
assert exchange._now_is_time_to_refresh(pair, '5m', candle_type) is True
|
||||
|
||||
# 1 second later (last_refresh_time didn't change)
|
||||
time_machine.move_to(start_dt + timedelta(minutes=5, seconds=1), tick=False)
|
||||
assert exchange._now_is_time_to_refresh(pair, '5m', candle_type) is True
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
@pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||
def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_type):
|
||||
@@ -3865,11 +3901,11 @@ def test_set_margin_mode(mocker, default_conf, margin_mode):
|
||||
("kraken", TradingMode.SPOT, None, False),
|
||||
("kraken", TradingMode.MARGIN, MarginMode.ISOLATED, True),
|
||||
("kraken", TradingMode.FUTURES, MarginMode.ISOLATED, True),
|
||||
("bittrex", TradingMode.SPOT, None, False),
|
||||
("bittrex", TradingMode.MARGIN, MarginMode.CROSS, True),
|
||||
("bittrex", TradingMode.MARGIN, MarginMode.ISOLATED, True),
|
||||
("bittrex", TradingMode.FUTURES, MarginMode.CROSS, True),
|
||||
("bittrex", TradingMode.FUTURES, MarginMode.ISOLATED, True),
|
||||
("bitmart", TradingMode.SPOT, None, False),
|
||||
("bitmart", TradingMode.MARGIN, MarginMode.CROSS, True),
|
||||
("bitmart", TradingMode.MARGIN, MarginMode.ISOLATED, True),
|
||||
("bitmart", TradingMode.FUTURES, MarginMode.CROSS, True),
|
||||
("bitmart", TradingMode.FUTURES, MarginMode.ISOLATED, True),
|
||||
("gate", TradingMode.MARGIN, MarginMode.ISOLATED, True),
|
||||
("okx", TradingMode.SPOT, None, False),
|
||||
("okx", TradingMode.MARGIN, MarginMode.CROSS, True),
|
||||
|
||||
@@ -41,14 +41,14 @@ def test_check_exchange(default_conf, caplog) -> None:
|
||||
caplog.clear()
|
||||
|
||||
# Test an officially supported by Freqtrade team exchange - with remapping
|
||||
default_conf.get('exchange').update({'name': 'okex'})
|
||||
default_conf.get('exchange').update({'name': 'okx'})
|
||||
assert check_exchange(default_conf)
|
||||
assert log_has_re(
|
||||
r"Exchange \"okex\" is officially supported by the Freqtrade development team\.",
|
||||
r"Exchange \"okx\" is officially supported by the Freqtrade development team\.",
|
||||
caplog)
|
||||
caplog.clear()
|
||||
# Test an available exchange, supported by ccxt
|
||||
default_conf.get('exchange').update({'name': 'huobipro'})
|
||||
default_conf.get('exchange').update({'name': 'huobijp'})
|
||||
assert check_exchange(default_conf)
|
||||
assert log_has_re(r"Exchange .* is known to the the ccxt library, available for the bot, "
|
||||
r"but not officially supported "
|
||||
|
||||
@@ -14,14 +14,6 @@ EXCHANGE_FIXTURE_TYPE = Tuple[Exchange, str]
|
||||
|
||||
# Exchanges that should be tested online
|
||||
EXCHANGES = {
|
||||
'bittrex': {
|
||||
'pair': 'BTC/USDT',
|
||||
'stake_currency': 'USDT',
|
||||
'hasQuoteVolume': False,
|
||||
'timeframe': '1h',
|
||||
'leverage_tiers_public': False,
|
||||
'leverage_in_spot_market': False,
|
||||
},
|
||||
'binance': {
|
||||
'pair': 'BTC/USDT',
|
||||
'stake_currency': 'USDT',
|
||||
@@ -227,6 +219,7 @@ EXCHANGES = {
|
||||
'timeframe': '1h',
|
||||
'futures_pair': 'BTC/USDT:USDT',
|
||||
'futures': True,
|
||||
'orderbook_max_entries': 50,
|
||||
'leverage_tiers_public': True,
|
||||
'leverage_in_spot_market': True,
|
||||
'sample_order': [
|
||||
@@ -247,6 +240,13 @@ EXCHANGES = {
|
||||
}
|
||||
]
|
||||
},
|
||||
'bitmart': {
|
||||
'pair': 'BTC/USDT',
|
||||
'stake_currency': 'USDT',
|
||||
'hasQuoteVolume': True,
|
||||
'timeframe': '1h',
|
||||
'orderbook_max_entries': 50,
|
||||
},
|
||||
'huobi': {
|
||||
'pair': 'ETH/BTC',
|
||||
'stake_currency': 'BTC',
|
||||
|
||||
@@ -133,6 +133,7 @@ class TestCCXTExchange:
|
||||
exch, exchangename = exchange
|
||||
pair = EXCHANGES[exchangename]['pair']
|
||||
l2 = exch.fetch_l2_order_book(pair)
|
||||
orderbook_max_entries = EXCHANGES[exchangename].get('orderbook_max_entries')
|
||||
assert 'asks' in l2
|
||||
assert 'bids' in l2
|
||||
assert len(l2['asks']) >= 1
|
||||
@@ -143,7 +144,7 @@ class TestCCXTExchange:
|
||||
# TODO: Gate is unstable here at the moment, ignoring the limit partially.
|
||||
return
|
||||
for val in [1, 2, 5, 25, 50, 100]:
|
||||
if val > 50 and exchangename == 'bybit':
|
||||
if orderbook_max_entries and val > orderbook_max_entries:
|
||||
continue
|
||||
l2 = exch.fetch_l2_order_book(pair, val)
|
||||
if not l2_limit_range or val in l2_limit_range:
|
||||
|
||||
@@ -10,9 +10,8 @@ from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||
from tests.conftest import get_patched_exchange
|
||||
from tests.freqai.conftest import (get_patched_data_kitchen, get_patched_freqai_strategy,
|
||||
from tests.freqai.conftest import (get_patched_data_kitchen, get_patched_freqai_strategy, is_mac,
|
||||
make_unfiltered_dataframe)
|
||||
from tests.freqai.test_freqai_interface import is_mac
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -2676,9 +2676,9 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
||||
'orders': [
|
||||
(('buy', 100, 10), (100.0, 10.0, 1000.0, 0.0, None, None)),
|
||||
(('buy', 100, 15), (200.0, 12.5, 2500.0, 0.0, None, None)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -25.0, -25.0, -0.04)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 725.0, 750.0, 0.60)),
|
||||
(('sell', 50, 5), (50.0, 12.5, 625.0, 350.0, -375.0, -0.60)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -25.0, -25.0, -0.01)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 725.0, 750.0, 0.29)),
|
||||
(('sell', 50, 5), (50.0, 12.5, 625.0, 350.0, -375.0, 0.14)),
|
||||
],
|
||||
'end_profit': 350.0,
|
||||
'end_profit_ratio': 0.14,
|
||||
@@ -2688,9 +2688,9 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
||||
'orders': [
|
||||
(('buy', 100, 10), (100.0, 10.0, 1000.0, 0.0, None, None)),
|
||||
(('buy', 100, 15), (200.0, 12.5, 2500.0, 0.0, None, None)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -28.0625, -28.0625, -0.044788)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 713.8125, 741.875, 0.59201995)),
|
||||
(('sell', 50, 5), (50.0, 12.5, 625.0, 336.625, -377.1875, -0.60199501)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -28.0625, -28.0625, -0.011197)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 713.8125, 741.875, 0.2848129)),
|
||||
(('sell', 50, 5), (50.0, 12.5, 625.0, 336.625, -377.1875, 0.1343142)),
|
||||
],
|
||||
'end_profit': 336.625,
|
||||
'end_profit_ratio': 0.1343142,
|
||||
@@ -2700,10 +2700,10 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
||||
'orders': [
|
||||
(('buy', 100, 3), (100.0, 3.0, 300.0, 0.0, None, None)),
|
||||
(('buy', 100, 7), (200.0, 5.0, 1000.0, 0.0, None, None)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 596.0, 596.0, 1.189027)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 596.0, 596.0, 1.189027)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1388.5, 792.5, 0.7186579)),
|
||||
(('sell', 150, 23), (150.0, 11.0, 1650.0, 3175.75, 1787.25, 1.08048062)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 596.0, 596.0, 0.5945137)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 596.0, 596.0, 0.5945137)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1388.5, 792.5, 0.4261653)),
|
||||
(('sell', 150, 23), (150.0, 11.0, 1650.0, 3175.75, 1787.25, 0.9747170)),
|
||||
],
|
||||
'end_profit': 3175.75,
|
||||
'end_profit_ratio': 0.9747170,
|
||||
@@ -2714,10 +2714,10 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
||||
'orders': [
|
||||
(('buy', 100, 3), (100.0, 3.0, 300.0, 0.0, None, None)),
|
||||
(('buy', 100, 7), (200.0, 5.0, 1000.0, 0.0, None, None)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 600.0, 600.0, 1.2)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 600.0, 600.0, 1.2)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1400.0, 800.0, 0.72727273)),
|
||||
(('sell', 150, 23), (150.0, 11.0, 1650.0, 3200.0, 1800.0, 1.09090909)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 600.0, 600.0, 0.6)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 600.0, 600.0, 0.6)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1400.0, 800.0, 0.43076923)),
|
||||
(('sell', 150, 23), (150.0, 11.0, 1650.0, 3200.0, 1800.0, 0.98461538)),
|
||||
],
|
||||
'end_profit': 3200.0,
|
||||
'end_profit_ratio': 0.98461538,
|
||||
@@ -2727,10 +2727,10 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
||||
'orders': [
|
||||
(('buy', 100, 8), (100.0, 8.0, 800.0, 0.0, None, None)),
|
||||
(('buy', 100, 9), (200.0, 8.5, 1700.0, 0.0, None, None)),
|
||||
(('sell', 100, 10), (100.0, 8.5, 850.0, 150.0, 150.0, 0.17647059)),
|
||||
(('buy', 150, 11), (250.0, 10, 2500.0, 150.0, 150.0, 0.17647059)),
|
||||
(('sell', 100, 12), (150.0, 10.0, 1500.0, 350.0, 200.0, 0.2)),
|
||||
(('sell', 150, 14), (150.0, 10.0, 1500.0, 950.0, 600.0, 0.40)),
|
||||
(('sell', 100, 10), (100.0, 8.5, 850.0, 150.0, 150.0, 0.08823529)),
|
||||
(('buy', 150, 11), (250.0, 10, 2500.0, 150.0, 150.0, 0.08823529)),
|
||||
(('sell', 100, 12), (150.0, 10.0, 1500.0, 350.0, 200.0, 0.1044776)),
|
||||
(('sell', 150, 14), (150.0, 10.0, 1500.0, 950.0, 600.0, 0.283582)),
|
||||
],
|
||||
'end_profit': 950.0,
|
||||
'end_profit_ratio': 0.283582,
|
||||
|
||||
@@ -109,7 +109,6 @@ def get_telegram_testobject(mocker, default_conf, mock=True, ftbot=None):
|
||||
_start_thread=MagicMock(),
|
||||
)
|
||||
if not ftbot:
|
||||
mocker.patch('freqtrade.exchange.exchange.Exchange._init_async_loop')
|
||||
ftbot = get_patched_freqtradebot(mocker, default_conf)
|
||||
rpc = RPC(ftbot)
|
||||
telegram = Telegram(rpc, default_conf)
|
||||
@@ -150,8 +149,8 @@ def test_telegram_init(default_conf, mocker, caplog) -> None:
|
||||
"['reload_conf', 'reload_config'], ['show_conf', 'show_config'], "
|
||||
"['stopbuy', 'stopentry'], ['whitelist'], ['blacklist'], "
|
||||
"['bl_delete', 'blacklist_delete'], "
|
||||
"['logs'], ['edge'], ['health'], ['help'], ['version'], ['marketdir']"
|
||||
"]")
|
||||
"['logs'], ['edge'], ['health'], ['help'], ['version'], ['marketdir'], "
|
||||
"['order']]")
|
||||
|
||||
assert log_has(message_str, caplog)
|
||||
|
||||
@@ -347,8 +346,6 @@ async def test_telegram_status_multi_entry(default_conf, update, mocker, fee) ->
|
||||
msg = msg_mock.call_args_list[3][0][0]
|
||||
assert re.search(r'Number of Entries.*2', msg)
|
||||
assert re.search(r'Number of Exits.*1', msg)
|
||||
assert re.search(r'from 1st entry rate', msg)
|
||||
assert re.search(r'Order Filled', msg)
|
||||
assert re.search(r'Close Date:', msg) is None
|
||||
assert re.search(r'Close Profit:', msg) is None
|
||||
|
||||
@@ -375,6 +372,105 @@ async def test_telegram_status_closed_trade(default_conf, update, mocker, fee) -
|
||||
assert re.search(r'Close Profit:', msg)
|
||||
|
||||
|
||||
async def test_order_handle(default_conf, update, ticker, fee, mocker) -> None:
|
||||
default_conf['max_open_trades'] = 3
|
||||
mocker.patch.multiple(
|
||||
EXMS,
|
||||
fetch_ticker=ticker,
|
||||
get_fee=fee,
|
||||
_dry_is_price_crossed=MagicMock(return_value=True),
|
||||
)
|
||||
status_table = MagicMock()
|
||||
mocker.patch.multiple(
|
||||
'freqtrade.rpc.telegram.Telegram',
|
||||
_status_table=status_table,
|
||||
)
|
||||
|
||||
telegram, freqtradebot, msg_mock = get_telegram_testobject(mocker, default_conf)
|
||||
|
||||
patch_get_signal(freqtradebot)
|
||||
|
||||
freqtradebot.state = State.RUNNING
|
||||
msg_mock.reset_mock()
|
||||
|
||||
# Create some test data
|
||||
freqtradebot.enter_positions()
|
||||
|
||||
mocker.patch('freqtrade.rpc.telegram.MAX_MESSAGE_LENGTH', 500)
|
||||
|
||||
msg_mock.reset_mock()
|
||||
context = MagicMock()
|
||||
context.args = ["2"]
|
||||
await telegram._order(update=update, context=context)
|
||||
|
||||
assert msg_mock.call_count == 1
|
||||
|
||||
msg1 = msg_mock.call_args_list[0][0][0]
|
||||
|
||||
assert 'Order List for Trade #*`2`' in msg1
|
||||
|
||||
msg_mock.reset_mock()
|
||||
mocker.patch('freqtrade.rpc.telegram.MAX_MESSAGE_LENGTH', 50)
|
||||
context = MagicMock()
|
||||
context.args = ["2"]
|
||||
await telegram._order(update=update, context=context)
|
||||
|
||||
assert msg_mock.call_count == 2
|
||||
|
||||
msg1 = msg_mock.call_args_list[0][0][0]
|
||||
msg2 = msg_mock.call_args_list[1][0][0]
|
||||
|
||||
assert 'Order List for Trade #*`2`' in msg1
|
||||
assert '*Order List for Trade #*`2` - continued' in msg2
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
async def test_telegram_order_multi_entry(default_conf, update, mocker, fee) -> None:
|
||||
default_conf['telegram']['enabled'] = False
|
||||
default_conf['position_adjustment_enable'] = True
|
||||
mocker.patch.multiple(
|
||||
EXMS,
|
||||
fetch_order=MagicMock(return_value=None),
|
||||
get_rate=MagicMock(return_value=0.22),
|
||||
)
|
||||
|
||||
telegram, _, msg_mock = get_telegram_testobject(mocker, default_conf)
|
||||
|
||||
create_mock_trades(fee)
|
||||
trades = Trade.get_open_trades()
|
||||
trade = trades[3]
|
||||
# Average may be empty on some exchanges
|
||||
trade.orders[0].average = 0
|
||||
trade.orders.append(Order(
|
||||
order_id='5412vbb',
|
||||
ft_order_side='buy',
|
||||
ft_pair=trade.pair,
|
||||
ft_is_open=False,
|
||||
ft_amount=trade.amount,
|
||||
ft_price=trade.open_rate,
|
||||
status="closed",
|
||||
symbol=trade.pair,
|
||||
order_type="market",
|
||||
side="buy",
|
||||
price=trade.open_rate * 0.95,
|
||||
average=0,
|
||||
filled=trade.amount,
|
||||
remaining=0,
|
||||
cost=trade.amount,
|
||||
order_date=trade.open_date,
|
||||
order_filled_date=trade.open_date,
|
||||
)
|
||||
)
|
||||
trade.recalc_trade_from_orders()
|
||||
Trade.commit()
|
||||
|
||||
await telegram._order(update=update, context=MagicMock())
|
||||
assert msg_mock.call_count == 4
|
||||
msg = msg_mock.call_args_list[3][0][0]
|
||||
assert re.search(r'from 1st entry rate', msg)
|
||||
assert re.search(r'Order Filled', msg)
|
||||
|
||||
|
||||
async def test_status_handle(default_conf, update, ticker, fee, mocker) -> None:
|
||||
default_conf['max_open_trades'] = 3
|
||||
mocker.patch.multiple(
|
||||
@@ -443,14 +539,12 @@ async def test_status_handle(default_conf, update, ticker, fee, mocker) -> None:
|
||||
context.args = ["2"]
|
||||
await telegram._status(update=update, context=context)
|
||||
|
||||
assert msg_mock.call_count == 2
|
||||
assert msg_mock.call_count == 1
|
||||
|
||||
msg1 = msg_mock.call_args_list[0][0][0]
|
||||
msg2 = msg_mock.call_args_list[1][0][0]
|
||||
|
||||
assert 'Close Rate' not in msg1
|
||||
assert 'Trade ID:* `2`' in msg1
|
||||
assert 'Trade ID:* `2` - continued' in msg2
|
||||
|
||||
|
||||
async def test_status_table_handle(default_conf, update, ticker, fee, mocker) -> None:
|
||||
|
||||
@@ -12,9 +12,11 @@ from tests.conftest import generate_test_data, get_patched_exchange
|
||||
def test_merge_informative_pair():
|
||||
data = generate_test_data('15m', 40)
|
||||
informative = generate_test_data('1h', 40)
|
||||
cols_inf = list(informative.columns)
|
||||
|
||||
result = merge_informative_pair(data, informative, '15m', '1h', ffill=True)
|
||||
assert isinstance(result, pd.DataFrame)
|
||||
assert list(informative.columns) == cols_inf
|
||||
assert len(result) == len(data)
|
||||
assert 'date' in result.columns
|
||||
assert result['date'].equals(data['date'])
|
||||
@@ -61,6 +63,60 @@ def test_merge_informative_pair():
|
||||
assert result.iloc[8]['date_1h'] is pd.NaT
|
||||
|
||||
|
||||
def test_merge_informative_pair_weekly():
|
||||
# Covers roughly 2 months - until 2023-01-10
|
||||
data = generate_test_data('1h', 1040, '2022-11-28')
|
||||
informative = generate_test_data('1w', 40, '2022-11-01')
|
||||
informative['day'] = informative['date'].dt.day_name()
|
||||
|
||||
result = merge_informative_pair(data, informative, '1h', '1w', ffill=True)
|
||||
assert isinstance(result, pd.DataFrame)
|
||||
# 2022-12-24 is a Saturday
|
||||
candle1 = result.loc[(result['date'] == '2022-12-24T22:00:00.000Z')]
|
||||
assert candle1.iloc[0]['date'] == pd.Timestamp('2022-12-24T22:00:00.000Z')
|
||||
assert candle1.iloc[0]['date_1w'] == pd.Timestamp('2022-12-12T00:00:00.000Z')
|
||||
|
||||
candle2 = result.loc[(result['date'] == '2022-12-24T23:00:00.000Z')]
|
||||
assert candle2.iloc[0]['date'] == pd.Timestamp('2022-12-24T23:00:00.000Z')
|
||||
assert candle2.iloc[0]['date_1w'] == pd.Timestamp('2022-12-12T00:00:00.000Z')
|
||||
|
||||
# 2022-12-25 is a Sunday
|
||||
candle3 = result.loc[(result['date'] == '2022-12-25T22:00:00.000Z')]
|
||||
assert candle3.iloc[0]['date'] == pd.Timestamp('2022-12-25T22:00:00.000Z')
|
||||
# Still old candle
|
||||
assert candle3.iloc[0]['date_1w'] == pd.Timestamp('2022-12-12T00:00:00.000Z')
|
||||
|
||||
candle4 = result.loc[(result['date'] == '2022-12-25T23:00:00.000Z')]
|
||||
assert candle4.iloc[0]['date'] == pd.Timestamp('2022-12-25T23:00:00.000Z')
|
||||
assert candle4.iloc[0]['date_1w'] == pd.Timestamp('2022-12-19T00:00:00.000Z')
|
||||
|
||||
|
||||
def test_merge_informative_pair_monthly():
|
||||
# Covers roughly 2 months - until 2023-01-10
|
||||
data = generate_test_data('1h', 1040, '2022-11-28')
|
||||
informative = generate_test_data('1M', 40, '2022-01-01')
|
||||
|
||||
result = merge_informative_pair(data, informative, '1h', '1M', ffill=True)
|
||||
assert isinstance(result, pd.DataFrame)
|
||||
candle1 = result.loc[(result['date'] == '2022-12-31T22:00:00.000Z')]
|
||||
assert candle1.iloc[0]['date'] == pd.Timestamp('2022-12-31T22:00:00.000Z')
|
||||
assert candle1.iloc[0]['date_1M'] == pd.Timestamp('2022-11-01T00:00:00.000Z')
|
||||
|
||||
candle2 = result.loc[(result['date'] == '2022-12-31T23:00:00.000Z')]
|
||||
assert candle2.iloc[0]['date'] == pd.Timestamp('2022-12-31T23:00:00.000Z')
|
||||
assert candle2.iloc[0]['date_1M'] == pd.Timestamp('2022-12-01T00:00:00.000Z')
|
||||
|
||||
# Candle is empty, as the start-date did fail.
|
||||
candle3 = result.loc[(result['date'] == '2022-11-30T22:00:00.000Z')]
|
||||
assert candle3.iloc[0]['date'] == pd.Timestamp('2022-11-30T22:00:00.000Z')
|
||||
assert candle3.iloc[0]['date_1M'] is pd.NaT
|
||||
|
||||
# First candle with 1M data merged.
|
||||
candle4 = result.loc[(result['date'] == '2022-11-30T23:00:00.000Z')]
|
||||
assert candle4.iloc[0]['date'] == pd.Timestamp('2022-11-30T23:00:00.000Z')
|
||||
assert candle4.iloc[0]['date_1M'] == pd.Timestamp('2022-11-01T00:00:00.000Z')
|
||||
|
||||
|
||||
def test_merge_informative_pair_same():
|
||||
data = generate_test_data('15m', 40)
|
||||
informative = generate_test_data('15m', 40)
|
||||
|
||||
@@ -6569,16 +6569,16 @@ def test_position_adjust2(mocker, default_conf_usdt, fee) -> None:
|
||||
# tuple 2 - amount, open_rate, stake_amount, cumulative_profit, realized_profit, rel_profit
|
||||
(('buy', 100, 10), (100.0, 10.0, 1000.0, 0.0, None, None)),
|
||||
(('buy', 100, 15), (200.0, 12.5, 2500.0, 0.0, None, None)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -28.0625, -28.0625, -0.044788)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 713.8125, 741.875, 0.59201995)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -28.0625, -28.0625, -0.011197)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 713.8125, 741.875, 0.2848129)),
|
||||
(('sell', 50, 5), (50.0, 12.5, 625.0, 336.625, 336.625, 0.1343142)), # final profit (sum)
|
||||
),
|
||||
(
|
||||
(('buy', 100, 3), (100.0, 3.0, 300.0, 0.0, None, None)),
|
||||
(('buy', 100, 7), (200.0, 5.0, 1000.0, 0.0, None, None)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 596.0, 596.0, 1.189027)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 596.0, 596.0, 1.189027)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1388.5, 792.5, 0.7186579)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 596.0, 596.0, 0.5945137)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 596.0, 596.0, 0.5945137)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1388.5, 792.5, 0.4261653)),
|
||||
(('sell', 150, 23), (150.0, 11.0, 1650.0, 3175.75, 3175.75, 0.9747170)), # final profit
|
||||
)
|
||||
])
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import time
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
@@ -440,6 +441,7 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
|
||||
assert trade.open_rate == 1.99
|
||||
assert trade.orders[-1].price == 1.96
|
||||
assert trade.orders[-1].cost == 120 * leverage
|
||||
time.sleep(0.1)
|
||||
|
||||
# Replace new order with diff. order at a lower price
|
||||
freqtrade.strategy.adjust_entry_price = MagicMock(return_value=1.95)
|
||||
|
||||
Reference in New Issue
Block a user