mirror of
https://github.com/freqtrade/freqtrade.git
synced 2025-12-14 11:51:19 +00:00
Merge pull request #11505 from freqtrade/feat/log_from_config
allow loading logging from config
This commit is contained in:
@@ -542,6 +542,10 @@
|
|||||||
"description": "Edge configuration.",
|
"description": "Edge configuration.",
|
||||||
"$ref": "#/definitions/edge"
|
"$ref": "#/definitions/edge"
|
||||||
},
|
},
|
||||||
|
"log_config": {
|
||||||
|
"description": "Logging configuration.",
|
||||||
|
"$ref": "#/definitions/logging"
|
||||||
|
},
|
||||||
"freqai": {
|
"freqai": {
|
||||||
"description": "FreqAI configuration.",
|
"description": "FreqAI configuration.",
|
||||||
"$ref": "#/definitions/freqai"
|
"$ref": "#/definitions/freqai"
|
||||||
@@ -1281,6 +1285,30 @@
|
|||||||
"allowed_risk"
|
"allowed_risk"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"logging": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"version": {
|
||||||
|
"type": "number",
|
||||||
|
"const": 1
|
||||||
|
},
|
||||||
|
"formatters": {
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"type": "object"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"version",
|
||||||
|
"formatters",
|
||||||
|
"handlers",
|
||||||
|
"root"
|
||||||
|
]
|
||||||
|
},
|
||||||
"external_message_consumer": {
|
"external_message_consumer": {
|
||||||
"description": "Configuration for external message consumer.",
|
"description": "Configuration for external message consumer.",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
|||||||
@@ -188,30 +188,111 @@ as the watchdog.
|
|||||||
|
|
||||||
## Advanced Logging
|
## Advanced Logging
|
||||||
|
|
||||||
|
Freqtrade uses the default logging module provided by python.
|
||||||
|
Python allows for extensive [logging configuration](https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig) in this regards - way more than what can be covered here.
|
||||||
|
|
||||||
|
Default logging (Colored terminal output) is setup by default if no `log_config` is provided.
|
||||||
|
Using `--logfile logfile.log` will enable the RotatingFileHandler.
|
||||||
|
If you're not content with the log format - or with the default settings provided for the RotatingFileHandler, you can customize logging to your liking.
|
||||||
|
|
||||||
|
The default configuration looks roughly like the below - with the file handler being provided - but not enabled.
|
||||||
|
|
||||||
|
``` json hl_lines="5-7 13-16 27"
|
||||||
|
{
|
||||||
|
"log_config": {
|
||||||
|
"version": 1,
|
||||||
|
"formatters": {
|
||||||
|
"basic": {
|
||||||
|
"format": "%(message)s"
|
||||||
|
},
|
||||||
|
"standard": {
|
||||||
|
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"class": "freqtrade.loggers.ft_rich_handler.FtRichHandler",
|
||||||
|
"formatter": "basic"
|
||||||
|
},
|
||||||
|
"file": {
|
||||||
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
|
"formatter": "standard",
|
||||||
|
// "filename": "someRandomLogFile.log",
|
||||||
|
"maxBytes": 10485760,
|
||||||
|
"backupCount": 10
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"handlers": [
|
||||||
|
"console",
|
||||||
|
// "file"
|
||||||
|
],
|
||||||
|
"level": "INFO",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Note "highlighted lines"
|
||||||
|
Highlighted lines in the above code-block define the Rich handler and belong together.
|
||||||
|
The formatter "standard" and "file" will belong to the FileHandler.
|
||||||
|
|
||||||
|
Each handler must use one of the defined formatters (by name) - and it's class must be available and a valid logging class.
|
||||||
|
To actually use a handler - it must be in the "handlers" section inside the "root" segment.
|
||||||
|
If this section is left out, freqtrade will provide no output (in the non-configured handler, anyway).
|
||||||
|
|
||||||
|
!!! Tip "Explicit log configuration"
|
||||||
|
We recommend to extract the logging configuration from your main configuration, and provide it to your bot via [multiple configuration files](configuration.md#multiple-configuration-files) functionality. This will avoid unnecessary code duplication.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
On many Linux systems the bot can be configured to send its log messages to `syslog` or `journald` system services. Logging to a remote `syslog` server is also available on Windows. The special values for the `--logfile` command line option can be used for this.
|
On many Linux systems the bot can be configured to send its log messages to `syslog` or `journald` system services. Logging to a remote `syslog` server is also available on Windows. The special values for the `--logfile` command line option can be used for this.
|
||||||
|
|
||||||
### Logging to syslog
|
### Logging to syslog
|
||||||
|
|
||||||
To send Freqtrade log messages to a local or remote `syslog` service use the `--logfile` command line option with the value in the following format:
|
To send Freqtrade log messages to a local or remote `syslog` service use the `"log_config"` setup option to configure logging.
|
||||||
|
|
||||||
* `--logfile syslog:<syslog_address>` -- send log messages to `syslog` service using the `<syslog_address>` as the syslog address.
|
``` json
|
||||||
|
{
|
||||||
|
// ...
|
||||||
|
"log_config": {
|
||||||
|
"version": 1,
|
||||||
|
"formatters": {
|
||||||
|
"syslog_fmt": {
|
||||||
|
"format": "%(name)s - %(levelname)s - %(message)s"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
// Other handlers?
|
||||||
|
"syslog": {
|
||||||
|
"class": "logging.handlers.SysLogHandler",
|
||||||
|
"formatter": "syslog_fmt",
|
||||||
|
// Use one of the other options above as address instead?
|
||||||
|
"address": "/dev/log"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"handlers": [
|
||||||
|
// other handlers
|
||||||
|
"syslog",
|
||||||
|
|
||||||
The syslog address can be either a Unix domain socket (socket filename) or a UDP socket specification, consisting of IP address and UDP port, separated by the `:` character.
|
]
|
||||||
|
}
|
||||||
|
|
||||||
So, the following are the examples of possible usages:
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
* `--logfile syslog:/dev/log` -- log to syslog (rsyslog) using the `/dev/log` socket, suitable for most systems.
|
[Additional log-handlers](#advanced-logging) may need to be configured to for example also have log output in the console.
|
||||||
* `--logfile syslog` -- same as above, the shortcut for `/dev/log`.
|
|
||||||
* `--logfile syslog:/var/run/syslog` -- log to syslog (rsyslog) using the `/var/run/syslog` socket. Use this on MacOS.
|
#### Syslog usage
|
||||||
* `--logfile syslog:localhost:514` -- log to local syslog using UDP socket, if it listens on port 514.
|
|
||||||
* `--logfile syslog:<ip>:514` -- log to remote syslog at IP address and port 514. This may be used on Windows for remote logging to an external syslog server.
|
|
||||||
|
|
||||||
Log messages are send to `syslog` with the `user` facility. So you can see them with the following commands:
|
Log messages are send to `syslog` with the `user` facility. So you can see them with the following commands:
|
||||||
|
|
||||||
* `tail -f /var/log/user`, or
|
* `tail -f /var/log/user`, or
|
||||||
* install a comprehensive graphical viewer (for instance, 'Log File Viewer' for Ubuntu).
|
* install a comprehensive graphical viewer (for instance, 'Log File Viewer' for Ubuntu).
|
||||||
|
|
||||||
On many systems `syslog` (`rsyslog`) fetches data from `journald` (and vice versa), so both `--logfile syslog` or `--logfile journald` can be used and the messages be viewed with both `journalctl` and a syslog viewer utility. You can combine this in any way which suites you better.
|
On many systems `syslog` (`rsyslog`) fetches data from `journald` (and vice versa), so both syslog or journald can be used and the messages be viewed with both `journalctl` and a syslog viewer utility. You can combine this in any way which suites you better.
|
||||||
|
|
||||||
For `rsyslog` the messages from the bot can be redirected into a separate dedicated log file. To achieve this, add
|
For `rsyslog` the messages from the bot can be redirected into a separate dedicated log file. To achieve this, add
|
||||||
|
|
||||||
@@ -228,13 +309,69 @@ For `syslog` (`rsyslog`), the reduction mode can be switched on. This will reduc
|
|||||||
$RepeatedMsgReduction on
|
$RepeatedMsgReduction on
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Syslog addressing
|
||||||
|
|
||||||
|
The syslog address can be either a Unix domain socket (socket filename) or a UDP socket specification, consisting of IP address and UDP port, separated by the `:` character.
|
||||||
|
|
||||||
|
|
||||||
|
So, the following are the examples of possible addresses:
|
||||||
|
|
||||||
|
* `"address": "/dev/log"` -- log to syslog (rsyslog) using the `/dev/log` socket, suitable for most systems.
|
||||||
|
* `"address": "/var/run/syslog"` -- log to syslog (rsyslog) using the `/var/run/syslog` socket. Use this on MacOS.
|
||||||
|
* `"address": "localhost:514"` -- log to local syslog using UDP socket, if it listens on port 514.
|
||||||
|
* `"address": "<ip>:514"` -- log to remote syslog at IP address and port 514. This may be used on Windows for remote logging to an external syslog server.
|
||||||
|
|
||||||
|
|
||||||
|
??? Info "Deprecated - configure syslog via command line"
|
||||||
|
|
||||||
|
`--logfile syslog:<syslog_address>` -- send log messages to `syslog` service using the `<syslog_address>` as the syslog address.
|
||||||
|
|
||||||
|
The syslog address can be either a Unix domain socket (socket filename) or a UDP socket specification, consisting of IP address and UDP port, separated by the `:` character.
|
||||||
|
|
||||||
|
So, the following are the examples of possible usages:
|
||||||
|
|
||||||
|
* `--logfile syslog:/dev/log` -- log to syslog (rsyslog) using the `/dev/log` socket, suitable for most systems.
|
||||||
|
* `--logfile syslog` -- same as above, the shortcut for `/dev/log`.
|
||||||
|
* `--logfile syslog:/var/run/syslog` -- log to syslog (rsyslog) using the `/var/run/syslog` socket. Use this on MacOS.
|
||||||
|
* `--logfile syslog:localhost:514` -- log to local syslog using UDP socket, if it listens on port 514.
|
||||||
|
* `--logfile syslog:<ip>:514` -- log to remote syslog at IP address and port 514. This may be used on Windows for remote logging to an external syslog server.
|
||||||
|
|
||||||
### Logging to journald
|
### Logging to journald
|
||||||
|
|
||||||
This needs the `cysystemd` python package installed as dependency (`pip install cysystemd`), which is not available on Windows. Hence, the whole journald logging functionality is not available for a bot running on Windows.
|
This needs the `cysystemd` python package installed as dependency (`pip install cysystemd`), which is not available on Windows. Hence, the whole journald logging functionality is not available for a bot running on Windows.
|
||||||
|
|
||||||
To send Freqtrade log messages to `journald` system service use the `--logfile` command line option with the value in the following format:
|
To send Freqtrade log messages to `journald` system service, add the following configuration snippet to your configuration.
|
||||||
|
|
||||||
* `--logfile journald` -- send log messages to `journald`.
|
``` json
|
||||||
|
{
|
||||||
|
// ...
|
||||||
|
"log_config": {
|
||||||
|
"version": 1,
|
||||||
|
"formatters": {
|
||||||
|
"journald_fmt": {
|
||||||
|
"format": "%(name)s - %(levelname)s - %(message)s"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
// Other handlers?
|
||||||
|
"journald": {
|
||||||
|
"class": "cysystemd.journal.JournaldLogHandler",
|
||||||
|
"formatter": "journald_fmt",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"handlers": [
|
||||||
|
// ..
|
||||||
|
"journald",
|
||||||
|
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
[Additional log-handlers](#advanced-logging) may need to be configured to for example also have log output in the console.
|
||||||
|
|
||||||
Log messages are send to `journald` with the `user` facility. So you can see them with the following commands:
|
Log messages are send to `journald` with the `user` facility. So you can see them with the following commands:
|
||||||
|
|
||||||
@@ -244,3 +381,51 @@ Log messages are send to `journald` with the `user` facility. So you can see the
|
|||||||
There are many other options in the `journalctl` utility to filter the messages, see manual pages for this utility.
|
There are many other options in the `journalctl` utility to filter the messages, see manual pages for this utility.
|
||||||
|
|
||||||
On many systems `syslog` (`rsyslog`) fetches data from `journald` (and vice versa), so both `--logfile syslog` or `--logfile journald` can be used and the messages be viewed with both `journalctl` and a syslog viewer utility. You can combine this in any way which suites you better.
|
On many systems `syslog` (`rsyslog`) fetches data from `journald` (and vice versa), so both `--logfile syslog` or `--logfile journald` can be used and the messages be viewed with both `journalctl` and a syslog viewer utility. You can combine this in any way which suites you better.
|
||||||
|
|
||||||
|
??? Info "Deprecated - configure journald via command line"
|
||||||
|
To send Freqtrade log messages to `journald` system service use the `--logfile` command line option with the value in the following format:
|
||||||
|
|
||||||
|
`--logfile journald` -- send log messages to `journald`.
|
||||||
|
|
||||||
|
### Log format as JSON
|
||||||
|
|
||||||
|
You can also configure the default output stream to use JSON format instead.
|
||||||
|
The "fmt_dict" attribute defines the keys for the json output - as well as the [python logging LogRecord attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes).
|
||||||
|
|
||||||
|
The below configuration will change the default output to JSON. The same formatter could however also be used in combination with the `RotatingFileHandler`.
|
||||||
|
We recommend to keep one format in human readable form.
|
||||||
|
|
||||||
|
``` json
|
||||||
|
{
|
||||||
|
// ...
|
||||||
|
"log_config": {
|
||||||
|
"version": 1,
|
||||||
|
"formatters": {
|
||||||
|
"json": {
|
||||||
|
"()": "freqtrade.loggers.json_formatter.JsonFormatter",
|
||||||
|
"fmt_dict": {
|
||||||
|
"timestamp": "asctime",
|
||||||
|
"level": "levelname",
|
||||||
|
"logger": "name",
|
||||||
|
"message": "message"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
// Other handlers?
|
||||||
|
"jsonStream": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"formatter": "json"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"handlers": [
|
||||||
|
// ..
|
||||||
|
"jsonStream",
|
||||||
|
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|||||||
@@ -282,6 +282,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
|||||||
| `dataformat_ohlcv` | Data format to use to store historical candle (OHLCV) data. <br> *Defaults to `feather`*. <br> **Datatype:** String
|
| `dataformat_ohlcv` | Data format to use to store historical candle (OHLCV) data. <br> *Defaults to `feather`*. <br> **Datatype:** String
|
||||||
| `dataformat_trades` | Data format to use to store historical trades data. <br> *Defaults to `feather`*. <br> **Datatype:** String
|
| `dataformat_trades` | Data format to use to store historical trades data. <br> *Defaults to `feather`*. <br> **Datatype:** String
|
||||||
| `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage (and decreasing train/inference timing in FreqAI). (Currently only affects FreqAI use-cases) <br> **Datatype:** Boolean. <br> Default: `False`.
|
| `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage (and decreasing train/inference timing in FreqAI). (Currently only affects FreqAI use-cases) <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||||
|
| `log_config` | Dictionary containing the log config for python logging. [more info](advanced-setup.md#advanced-logging) <br> **Datatype:** dict. <br> Default: `FtRichHandler`
|
||||||
|
|
||||||
### Parameters in the strategy
|
### Parameters in the strategy
|
||||||
|
|
||||||
|
|||||||
@@ -88,3 +88,8 @@ Setting protections from the configuration via `"protections": [],` has been rem
|
|||||||
Using hdf5 as data storage has been deprecated in 2024.12 and was removed in 2025.1. We recommend switching to the feather data format.
|
Using hdf5 as data storage has been deprecated in 2024.12 and was removed in 2025.1. We recommend switching to the feather data format.
|
||||||
|
|
||||||
Please use the [`convert-data` subcommand](data-download.md#sub-command-convert-data) to convert your existing data to one of the supported formats before updating.
|
Please use the [`convert-data` subcommand](data-download.md#sub-command-convert-data) to convert your existing data to one of the supported formats before updating.
|
||||||
|
|
||||||
|
## Configuring advanced logging via config
|
||||||
|
|
||||||
|
Configuring syslog and journald via `--logfile systemd` and `--logfile journald` respectively has been deprecated in 2025.3.
|
||||||
|
Please use configuration based [log setup](advanced-setup.md#advanced-logging) instead.
|
||||||
|
|||||||
@@ -425,6 +425,10 @@ CONF_SCHEMA = {
|
|||||||
"description": "Edge configuration.",
|
"description": "Edge configuration.",
|
||||||
"$ref": "#/definitions/edge",
|
"$ref": "#/definitions/edge",
|
||||||
},
|
},
|
||||||
|
"log_config": {
|
||||||
|
"description": "Logging configuration.",
|
||||||
|
"$ref": "#/definitions/logging",
|
||||||
|
},
|
||||||
"freqai": {
|
"freqai": {
|
||||||
"description": "FreqAI configuration.",
|
"description": "FreqAI configuration.",
|
||||||
"$ref": "#/definitions/freqai",
|
"$ref": "#/definitions/freqai",
|
||||||
@@ -883,6 +887,28 @@ CONF_SCHEMA = {
|
|||||||
},
|
},
|
||||||
"required": ["process_throttle_secs", "allowed_risk"],
|
"required": ["process_throttle_secs", "allowed_risk"],
|
||||||
},
|
},
|
||||||
|
"logging": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"version": {"type": "number", "const": 1},
|
||||||
|
"formatters": {
|
||||||
|
"type": "object",
|
||||||
|
# In theory the below, but can be more flexible
|
||||||
|
# based on logging.config documentation
|
||||||
|
# "additionalProperties": {
|
||||||
|
# "type": "object",
|
||||||
|
# "properties": {
|
||||||
|
# "format": {"type": "string"},
|
||||||
|
# "datefmt": {"type": "string"},
|
||||||
|
# },
|
||||||
|
# "required": ["format"],
|
||||||
|
# },
|
||||||
|
},
|
||||||
|
"handlers": {"type": "object"},
|
||||||
|
"root": {"type": "object"},
|
||||||
|
},
|
||||||
|
"required": ["version", "formatters", "handlers", "root"],
|
||||||
|
},
|
||||||
"external_message_consumer": {
|
"external_message_consumer": {
|
||||||
"description": "Configuration for external message consumer.",
|
"description": "Configuration for external message consumer.",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
|||||||
@@ -1,14 +1,16 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import os
|
||||||
|
from copy import deepcopy
|
||||||
from logging import Formatter
|
from logging import Formatter
|
||||||
from logging.handlers import RotatingFileHandler, SysLogHandler
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.loggers.buffering_handler import FTBufferingHandler
|
from freqtrade.loggers.buffering_handler import FTBufferingHandler
|
||||||
from freqtrade.loggers.ft_rich_handler import FtRichHandler
|
from freqtrade.loggers.ft_rich_handler import FtRichHandler
|
||||||
from freqtrade.loggers.rich_console import get_rich_console
|
from freqtrade.loggers.rich_console import get_rich_console
|
||||||
from freqtrade.loggers.set_log_levels import set_loggers
|
|
||||||
|
|
||||||
|
|
||||||
# from freqtrade.loggers.std_err_stream_handler import FTStdErrStreamHandler
|
# from freqtrade.loggers.std_err_stream_handler import FTStdErrStreamHandler
|
||||||
@@ -53,63 +55,140 @@ def setup_logging_pre() -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def setup_logging(config: Config) -> None:
|
FT_LOGGING_CONFIG = {
|
||||||
"""
|
"version": 1,
|
||||||
Process -v/--verbose, --logfile options
|
# "incremental": True,
|
||||||
"""
|
# "disable_existing_loggers": False,
|
||||||
# Log level
|
"formatters": {
|
||||||
verbosity = config["verbosity"]
|
"basic": {"format": "%(message)s"},
|
||||||
logging.root.addHandler(bufferHandler)
|
"standard": {
|
||||||
if config.get("print_colorized", True):
|
"format": LOGFORMAT,
|
||||||
logger.info("Enabling colorized output.")
|
},
|
||||||
error_console._color_system = error_console._detect_color_system()
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"class": "freqtrade.loggers.ft_rich_handler.FtRichHandler",
|
||||||
|
"formatter": "basic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"handlers": [
|
||||||
|
"console",
|
||||||
|
# "file",
|
||||||
|
],
|
||||||
|
"level": "INFO",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
logfile = config.get("logfile")
|
|
||||||
|
|
||||||
if logfile:
|
def _set_log_levels(
|
||||||
|
log_config: dict[str, Any], verbosity: int = 0, api_verbosity: str = "info"
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Set the logging level for the different loggers
|
||||||
|
"""
|
||||||
|
if "loggers" not in log_config:
|
||||||
|
log_config["loggers"] = {}
|
||||||
|
|
||||||
|
# Set default levels for third party libraries
|
||||||
|
third_party_loggers = {
|
||||||
|
"freqtrade": logging.INFO if verbosity <= 1 else logging.DEBUG,
|
||||||
|
"requests": logging.INFO if verbosity <= 1 else logging.DEBUG,
|
||||||
|
"urllib3": logging.INFO if verbosity <= 1 else logging.DEBUG,
|
||||||
|
"httpcore": logging.INFO if verbosity <= 1 else logging.DEBUG,
|
||||||
|
"ccxt.base.exchange": logging.INFO if verbosity <= 2 else logging.DEBUG,
|
||||||
|
"telegram": logging.INFO,
|
||||||
|
"httpx": logging.WARNING,
|
||||||
|
"werkzeug": logging.ERROR if api_verbosity == "error" else logging.INFO,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add third party loggers to the configuration
|
||||||
|
for logger_name, level in third_party_loggers.items():
|
||||||
|
if logger_name not in log_config["loggers"]:
|
||||||
|
log_config["loggers"][logger_name] = {
|
||||||
|
"level": logging.getLevelName(level),
|
||||||
|
"propagate": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _add_root_handler(log_config: dict[str, Any], handler_name: str):
|
||||||
|
if handler_name not in log_config["root"]["handlers"]:
|
||||||
|
log_config["root"]["handlers"].append(handler_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_formatter(log_config: dict[str, Any], format_name: str, format_: str):
|
||||||
|
if format_name not in log_config["formatters"]:
|
||||||
|
log_config["formatters"][format_name] = {"format": format_}
|
||||||
|
|
||||||
|
|
||||||
|
def _create_log_config(config: Config) -> dict[str, Any]:
|
||||||
|
# Get log_config from user config or use default
|
||||||
|
log_config = config.get("log_config", deepcopy(FT_LOGGING_CONFIG))
|
||||||
|
|
||||||
|
if logfile := config.get("logfile"):
|
||||||
s = logfile.split(":")
|
s = logfile.split(":")
|
||||||
if s[0] == "syslog":
|
if s[0] == "syslog":
|
||||||
# Address can be either a string (socket filename) for Unix domain socket or
|
logger.warning(
|
||||||
# a tuple (hostname, port) for UDP socket.
|
"DEPRECATED: Configuring syslog logging via command line is deprecated."
|
||||||
# Address can be omitted (i.e. simple 'syslog' used as the value of
|
"Please use the log_config option in the configuration file instead."
|
||||||
# config['logfilename']), which defaults to '/dev/log', applicable for most
|
)
|
||||||
# of the systems.
|
# Add syslog handler to the config
|
||||||
address = (s[1], int(s[2])) if len(s) > 2 else s[1] if len(s) > 1 else "/dev/log"
|
log_config["handlers"]["syslog"] = {
|
||||||
if handler_sl := get_existing_handlers(SysLogHandler):
|
"class": "logging.handlers.SysLogHandler",
|
||||||
logging.root.removeHandler(handler_sl)
|
"formatter": "syslog_format",
|
||||||
handler_sl = SysLogHandler(address=address)
|
"address": (s[1], int(s[2])) if len(s) > 2 else s[1] if len(s) > 1 else "/dev/log",
|
||||||
# No datetime field for logging into syslog, to allow syslog
|
}
|
||||||
# to perform reduction of repeating messages if this is set in the
|
|
||||||
# syslog config. The messages should be equal for this.
|
_add_formatter(log_config, "syslog_format", "%(name)s - %(levelname)s - %(message)s")
|
||||||
handler_sl.setFormatter(Formatter("%(name)s - %(levelname)s - %(message)s"))
|
_add_root_handler(log_config, "syslog")
|
||||||
logging.root.addHandler(handler_sl)
|
|
||||||
elif s[0] == "journald": # pragma: no cover
|
elif s[0] == "journald": # pragma: no cover
|
||||||
|
# Check if we have the module available
|
||||||
|
logger.warning(
|
||||||
|
"DEPRECATED: Configuring Journald logging via command line is deprecated."
|
||||||
|
"Please use the log_config option in the configuration file instead."
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
from cysystemd.journal import JournaldLogHandler
|
from cysystemd.journal import JournaldLogHandler # noqa: F401
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
"You need the cysystemd python package be installed in "
|
"You need the cysystemd python package be installed in "
|
||||||
"order to use logging to journald."
|
"order to use logging to journald."
|
||||||
)
|
)
|
||||||
if handler_jd := get_existing_handlers(JournaldLogHandler):
|
|
||||||
logging.root.removeHandler(handler_jd)
|
# Add journald handler to the config
|
||||||
handler_jd = JournaldLogHandler()
|
log_config["handlers"]["journald"] = {
|
||||||
# No datetime field for logging into journald, to allow syslog
|
"class": "cysystemd.journal.JournaldLogHandler",
|
||||||
# to perform reduction of repeating messages if this is set in the
|
"formatter": "journald_format",
|
||||||
# syslog config. The messages should be equal for this.
|
}
|
||||||
handler_jd.setFormatter(Formatter("%(name)s - %(levelname)s - %(message)s"))
|
|
||||||
logging.root.addHandler(handler_jd)
|
_add_formatter(log_config, "journald_format", "%(name)s - %(levelname)s - %(message)s")
|
||||||
|
_add_root_handler(log_config, "journald")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if handler_rf := get_existing_handlers(RotatingFileHandler):
|
# Regular file logging
|
||||||
logging.root.removeHandler(handler_rf)
|
# Update existing file handler configuration
|
||||||
|
if "file" in log_config["handlers"]:
|
||||||
|
log_config["handlers"]["file"]["filename"] = logfile
|
||||||
|
else:
|
||||||
|
log_config["handlers"]["file"] = {
|
||||||
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
|
"formatter": "standard",
|
||||||
|
"filename": logfile,
|
||||||
|
"maxBytes": 1024 * 1024 * 10, # 10Mb
|
||||||
|
"backupCount": 10,
|
||||||
|
}
|
||||||
|
_add_root_handler(log_config, "file")
|
||||||
|
|
||||||
|
# Dynamically update some handlers
|
||||||
|
for handler_config in log_config.get("handlers", {}).values():
|
||||||
|
if handler_config.get("class") == "freqtrade.loggers.ft_rich_handler.FtRichHandler":
|
||||||
|
handler_config["console"] = error_console
|
||||||
|
elif handler_config.get("class") == "logging.handlers.RotatingFileHandler":
|
||||||
|
logfile_path = Path(handler_config["filename"])
|
||||||
try:
|
try:
|
||||||
logfile_path = Path(logfile)
|
# Create parent for filehandler
|
||||||
logfile_path.parent.mkdir(parents=True, exist_ok=True)
|
logfile_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
handler_rf = RotatingFileHandler(
|
|
||||||
logfile_path,
|
|
||||||
maxBytes=1024 * 1024 * 10, # 10Mb
|
|
||||||
backupCount=10,
|
|
||||||
)
|
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
f'Failed to create or access log file "{logfile_path.absolute()}". '
|
f'Failed to create or access log file "{logfile_path.absolute()}". '
|
||||||
@@ -119,10 +198,34 @@ def setup_logging(config: Config) -> None:
|
|||||||
"non-root user, delete and recreate the directories you need, and then try "
|
"non-root user, delete and recreate the directories you need, and then try "
|
||||||
"again."
|
"again."
|
||||||
)
|
)
|
||||||
handler_rf.setFormatter(Formatter(LOGFORMAT))
|
return log_config
|
||||||
logging.root.addHandler(handler_rf)
|
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging(config: Config) -> None:
|
||||||
|
"""
|
||||||
|
Process -v/--verbose, --logfile options
|
||||||
|
"""
|
||||||
|
verbosity = config["verbosity"]
|
||||||
|
if os.environ.get("PYTEST_VERSION") is None or config.get("ft_tests_force_logging"):
|
||||||
|
log_config = _create_log_config(config)
|
||||||
|
_set_log_levels(
|
||||||
|
log_config, verbosity, config.get("api_server", {}).get("verbosity", "info")
|
||||||
|
)
|
||||||
|
|
||||||
|
logging.config.dictConfig(log_config)
|
||||||
|
|
||||||
|
# Add buffer handler to root logger
|
||||||
|
if bufferHandler not in logging.root.handlers:
|
||||||
|
logging.root.addHandler(bufferHandler)
|
||||||
|
|
||||||
|
# Set color system for console output
|
||||||
|
if config.get("print_colorized", True):
|
||||||
|
logger.info("Enabling colorized output.")
|
||||||
|
error_console._color_system = error_console._detect_color_system()
|
||||||
|
|
||||||
|
logging.info("Logfile configured")
|
||||||
|
|
||||||
|
# Set verbosity levels
|
||||||
logging.root.setLevel(logging.INFO if verbosity < 1 else logging.DEBUG)
|
logging.root.setLevel(logging.INFO if verbosity < 1 else logging.DEBUG)
|
||||||
set_loggers(verbosity, config.get("api_server", {}).get("verbosity", "info"))
|
|
||||||
|
|
||||||
logger.info("Verbosity set to %s", verbosity)
|
logger.info("Verbosity set to %s", verbosity)
|
||||||
|
|||||||
74
freqtrade/loggers/json_formatter.py
Normal file
74
freqtrade/loggers/json_formatter.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
class JsonFormatter(logging.Formatter):
|
||||||
|
"""
|
||||||
|
Formatter that outputs JSON strings after parsing the LogRecord.
|
||||||
|
|
||||||
|
@param dict fmt_dict: Key: logging format attribute pairs. Defaults to {"message": "message"}.
|
||||||
|
@param str time_format: time.strftime() format string. Default: "%Y-%m-%dT%H:%M:%S"
|
||||||
|
@param str msec_format: Microsecond formatting. Appended at the end. Default: "%s.%03dZ"
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
fmt_dict: dict | None = None,
|
||||||
|
time_format: str = "%Y-%m-%dT%H:%M:%S",
|
||||||
|
msec_format: str = "%s.%03dZ",
|
||||||
|
):
|
||||||
|
self.fmt_dict = (
|
||||||
|
fmt_dict
|
||||||
|
if fmt_dict is not None
|
||||||
|
else {
|
||||||
|
"timestamp": "asctime",
|
||||||
|
"level": "levelname",
|
||||||
|
"logger": "name",
|
||||||
|
"message": "message",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.default_time_format = time_format
|
||||||
|
self.default_msec_format = msec_format
|
||||||
|
self.datefmt = None
|
||||||
|
|
||||||
|
def usesTime(self) -> bool:
|
||||||
|
"""
|
||||||
|
Look for the attribute in the format dict values instead of the fmt string.
|
||||||
|
"""
|
||||||
|
return "asctime" in self.fmt_dict.values()
|
||||||
|
|
||||||
|
def formatMessage(self, record) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def formatMessageDict(self, record) -> dict:
|
||||||
|
"""
|
||||||
|
Return a dictionary of the relevant LogRecord attributes instead of a string.
|
||||||
|
KeyError is raised if an unknown attribute is provided in the fmt_dict.
|
||||||
|
"""
|
||||||
|
return {fmt_key: record.__dict__[fmt_val] for fmt_key, fmt_val in self.fmt_dict.items()}
|
||||||
|
|
||||||
|
def format(self, record) -> str:
|
||||||
|
"""
|
||||||
|
Mostly the same as the parent's class method, the difference being that a dict is
|
||||||
|
manipulated and dumped as JSON instead of a string.
|
||||||
|
"""
|
||||||
|
record.message = record.getMessage()
|
||||||
|
|
||||||
|
if self.usesTime():
|
||||||
|
record.asctime = self.formatTime(record, self.datefmt)
|
||||||
|
|
||||||
|
message_dict = self.formatMessageDict(record)
|
||||||
|
|
||||||
|
if record.exc_info:
|
||||||
|
# Cache the traceback text to avoid converting it multiple times
|
||||||
|
# (it's constant anyway)
|
||||||
|
if not record.exc_text:
|
||||||
|
record.exc_text = self.formatException(record.exc_info)
|
||||||
|
|
||||||
|
if record.exc_text:
|
||||||
|
message_dict["exc_info"] = record.exc_text
|
||||||
|
|
||||||
|
if record.stack_info:
|
||||||
|
message_dict["stack_info"] = self.formatStack(record.stack_info)
|
||||||
|
|
||||||
|
return json.dumps(message_dict, default=str)
|
||||||
@@ -4,25 +4,6 @@ import logging
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def set_loggers(verbosity: int = 0, api_verbosity: str = "info") -> None:
|
|
||||||
"""
|
|
||||||
Set the logging level for third party libraries
|
|
||||||
:param verbosity: Verbosity level. amount of `-v` passed to the command line
|
|
||||||
:return: None
|
|
||||||
"""
|
|
||||||
for logger_name in ("requests", "urllib3", "httpcore"):
|
|
||||||
logging.getLogger(logger_name).setLevel(logging.INFO if verbosity <= 1 else logging.DEBUG)
|
|
||||||
logging.getLogger("ccxt.base.exchange").setLevel(
|
|
||||||
logging.INFO if verbosity <= 2 else logging.DEBUG
|
|
||||||
)
|
|
||||||
logging.getLogger("telegram").setLevel(logging.INFO)
|
|
||||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
logging.getLogger("werkzeug").setLevel(
|
|
||||||
logging.ERROR if api_verbosity == "error" else logging.INFO
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
__BIAS_TESTER_LOGGERS = [
|
__BIAS_TESTER_LOGGERS = [
|
||||||
"freqtrade.resolvers",
|
"freqtrade.resolvers",
|
||||||
"freqtrade.strategy.hyper",
|
"freqtrade.strategy.hyper",
|
||||||
|
|||||||
@@ -549,6 +549,14 @@ def user_dir(mocker, tmp_path) -> Path:
|
|||||||
return user_dir
|
return user_dir
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def keep_log_config_loggers(mocker):
|
||||||
|
# Mock the _handle_existing_loggers function to prevent it from disabling all loggers.
|
||||||
|
# This is necessary to keep all loggers active, and avoid random failures if
|
||||||
|
# this file is ran before the test_rest_client file.
|
||||||
|
mocker.patch("logging.config._handle_existing_loggers")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def patch_coingecko(mocker) -> None:
|
def patch_coingecko(mocker) -> None:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import pytest
|
|||||||
|
|
||||||
from freqtrade.enums import CandleType
|
from freqtrade.enums import CandleType
|
||||||
from freqtrade.exchange.exchange_utils import timeframe_to_prev_date
|
from freqtrade.exchange.exchange_utils import timeframe_to_prev_date
|
||||||
from freqtrade.loggers.set_log_levels import set_loggers
|
|
||||||
from freqtrade.util.datetime_helpers import dt_now
|
from freqtrade.util.datetime_helpers import dt_now
|
||||||
from tests.conftest import log_has_re
|
from tests.conftest import log_has_re
|
||||||
from tests.exchange_online.conftest import EXCHANGE_WS_FIXTURE_TYPE
|
from tests.exchange_online.conftest import EXCHANGE_WS_FIXTURE_TYPE
|
||||||
@@ -50,7 +49,6 @@ class TestCCXTExchangeWs:
|
|||||||
assert res[pair_tf] is not None
|
assert res[pair_tf] is not None
|
||||||
df1 = res[pair_tf]
|
df1 = res[pair_tf]
|
||||||
caplog.set_level(logging.DEBUG)
|
caplog.set_level(logging.DEBUG)
|
||||||
set_loggers(1)
|
|
||||||
assert df1.iloc[-1]["date"] == curr_candle
|
assert df1.iloc[-1]["date"] == curr_candle
|
||||||
|
|
||||||
# Wait until the next candle (might be up to 1 minute).
|
# Wait until the next candle (might be up to 1 minute).
|
||||||
|
|||||||
@@ -603,7 +603,7 @@ def test_cli_verbose_with_params(default_conf, mocker, caplog) -> None:
|
|||||||
patched_configuration_load_config_file(mocker, default_conf)
|
patched_configuration_load_config_file(mocker, default_conf)
|
||||||
|
|
||||||
# Prevent setting loggers
|
# Prevent setting loggers
|
||||||
mocker.patch("freqtrade.loggers.set_loggers", MagicMock)
|
mocker.patch("freqtrade.loggers.logging.config.dictConfig", MagicMock)
|
||||||
arglist = ["trade", "-vvv"]
|
arglist = ["trade", "-vvv"]
|
||||||
args = Arguments(arglist).get_parsed_arg()
|
args = Arguments(arglist).get_parsed_arg()
|
||||||
|
|
||||||
@@ -614,7 +614,9 @@ def test_cli_verbose_with_params(default_conf, mocker, caplog) -> None:
|
|||||||
assert log_has("Verbosity set to 3", caplog)
|
assert log_has("Verbosity set to 3", caplog)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("keep_log_config_loggers")
|
||||||
def test_set_logfile(default_conf, mocker, tmp_path):
|
def test_set_logfile(default_conf, mocker, tmp_path):
|
||||||
|
default_conf["ft_tests_force_logging"] = True
|
||||||
patched_configuration_load_config_file(mocker, default_conf)
|
patched_configuration_load_config_file(mocker, default_conf)
|
||||||
f = tmp_path / "test_file.log"
|
f = tmp_path / "test_file.log"
|
||||||
assert not f.is_file()
|
assert not f.is_file()
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@@ -7,7 +8,6 @@ from freqtrade.exceptions import OperationalException
|
|||||||
from freqtrade.loggers import (
|
from freqtrade.loggers import (
|
||||||
FTBufferingHandler,
|
FTBufferingHandler,
|
||||||
FtRichHandler,
|
FtRichHandler,
|
||||||
set_loggers,
|
|
||||||
setup_logging,
|
setup_logging,
|
||||||
setup_logging_pre,
|
setup_logging_pre,
|
||||||
)
|
)
|
||||||
@@ -17,6 +17,7 @@ from freqtrade.loggers.set_log_levels import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("keep_log_config_loggers")
|
||||||
def test_set_loggers() -> None:
|
def test_set_loggers() -> None:
|
||||||
# Reset Logging to Debug, otherwise this fails randomly as it's set globally
|
# Reset Logging to Debug, otherwise this fails randomly as it's set globally
|
||||||
logging.getLogger("requests").setLevel(logging.DEBUG)
|
logging.getLogger("requests").setLevel(logging.DEBUG)
|
||||||
@@ -27,8 +28,11 @@ def test_set_loggers() -> None:
|
|||||||
previous_value1 = logging.getLogger("requests").level
|
previous_value1 = logging.getLogger("requests").level
|
||||||
previous_value2 = logging.getLogger("ccxt.base.exchange").level
|
previous_value2 = logging.getLogger("ccxt.base.exchange").level
|
||||||
previous_value3 = logging.getLogger("telegram").level
|
previous_value3 = logging.getLogger("telegram").level
|
||||||
|
config = {
|
||||||
set_loggers()
|
"verbosity": 1,
|
||||||
|
"ft_tests_force_logging": True,
|
||||||
|
}
|
||||||
|
setup_logging(config)
|
||||||
|
|
||||||
value1 = logging.getLogger("requests").level
|
value1 = logging.getLogger("requests").level
|
||||||
assert previous_value1 is not value1
|
assert previous_value1 is not value1
|
||||||
@@ -41,15 +45,17 @@ def test_set_loggers() -> None:
|
|||||||
value3 = logging.getLogger("telegram").level
|
value3 = logging.getLogger("telegram").level
|
||||||
assert previous_value3 is not value3
|
assert previous_value3 is not value3
|
||||||
assert value3 is logging.INFO
|
assert value3 is logging.INFO
|
||||||
|
config["verbosity"] = 2
|
||||||
set_loggers(verbosity=2)
|
setup_logging(config)
|
||||||
|
|
||||||
assert logging.getLogger("requests").level is logging.DEBUG
|
assert logging.getLogger("requests").level is logging.DEBUG
|
||||||
assert logging.getLogger("ccxt.base.exchange").level is logging.INFO
|
assert logging.getLogger("ccxt.base.exchange").level is logging.INFO
|
||||||
assert logging.getLogger("telegram").level is logging.INFO
|
assert logging.getLogger("telegram").level is logging.INFO
|
||||||
assert logging.getLogger("werkzeug").level is logging.INFO
|
assert logging.getLogger("werkzeug").level is logging.INFO
|
||||||
|
|
||||||
set_loggers(verbosity=3, api_verbosity="error")
|
config["verbosity"] = 3
|
||||||
|
config["api_server"] = {"verbosity": "error"}
|
||||||
|
setup_logging(config)
|
||||||
|
|
||||||
assert logging.getLogger("requests").level is logging.DEBUG
|
assert logging.getLogger("requests").level is logging.DEBUG
|
||||||
assert logging.getLogger("ccxt.base.exchange").level is logging.DEBUG
|
assert logging.getLogger("ccxt.base.exchange").level is logging.DEBUG
|
||||||
@@ -58,12 +64,14 @@ def test_set_loggers() -> None:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||||
|
@pytest.mark.usefixtures("keep_log_config_loggers")
|
||||||
def test_set_loggers_syslog():
|
def test_set_loggers_syslog():
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
orig_handlers = logger.handlers
|
orig_handlers = logger.handlers
|
||||||
logger.handlers = []
|
logger.handlers = []
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
|
"ft_tests_force_logging": True,
|
||||||
"verbosity": 2,
|
"verbosity": 2,
|
||||||
"logfile": "syslog:/dev/log",
|
"logfile": "syslog:/dev/log",
|
||||||
}
|
}
|
||||||
@@ -82,12 +90,14 @@ def test_set_loggers_syslog():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||||
|
@pytest.mark.usefixtures("keep_log_config_loggers")
|
||||||
def test_set_loggers_Filehandler(tmp_path):
|
def test_set_loggers_Filehandler(tmp_path):
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
orig_handlers = logger.handlers
|
orig_handlers = logger.handlers
|
||||||
logger.handlers = []
|
logger.handlers = []
|
||||||
logfile = tmp_path / "logs/ft_logfile.log"
|
logfile = tmp_path / "logs/ft_logfile.log"
|
||||||
config = {
|
config = {
|
||||||
|
"ft_tests_force_logging": True,
|
||||||
"verbosity": 2,
|
"verbosity": 2,
|
||||||
"logfile": str(logfile),
|
"logfile": str(logfile),
|
||||||
}
|
}
|
||||||
@@ -108,6 +118,7 @@ def test_set_loggers_Filehandler(tmp_path):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||||
|
@pytest.mark.usefixtures("keep_log_config_loggers")
|
||||||
def test_set_loggers_Filehandler_without_permission(tmp_path):
|
def test_set_loggers_Filehandler_without_permission(tmp_path):
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
orig_handlers = logger.handlers
|
orig_handlers = logger.handlers
|
||||||
@@ -117,6 +128,7 @@ def test_set_loggers_Filehandler_without_permission(tmp_path):
|
|||||||
tmp_path.chmod(0o400)
|
tmp_path.chmod(0o400)
|
||||||
logfile = tmp_path / "logs/ft_logfile.log"
|
logfile = tmp_path / "logs/ft_logfile.log"
|
||||||
config = {
|
config = {
|
||||||
|
"ft_tests_force_logging": True,
|
||||||
"verbosity": 2,
|
"verbosity": 2,
|
||||||
"logfile": str(logfile),
|
"logfile": str(logfile),
|
||||||
}
|
}
|
||||||
@@ -131,12 +143,14 @@ def test_set_loggers_Filehandler_without_permission(tmp_path):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="systemd is not installed on every system, so we're not testing this.")
|
@pytest.mark.skip(reason="systemd is not installed on every system, so we're not testing this.")
|
||||||
def test_set_loggers_journald(mocker):
|
@pytest.mark.usefixtures("keep_log_config_loggers")
|
||||||
|
def test_set_loggers_journald():
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
orig_handlers = logger.handlers
|
orig_handlers = logger.handlers
|
||||||
logger.handlers = []
|
logger.handlers = []
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
|
"ft_tests_force_logging": True,
|
||||||
"verbosity": 2,
|
"verbosity": 2,
|
||||||
"logfile": "journald",
|
"logfile": "journald",
|
||||||
}
|
}
|
||||||
@@ -150,12 +164,14 @@ def test_set_loggers_journald(mocker):
|
|||||||
logger.handlers = orig_handlers
|
logger.handlers = orig_handlers
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("keep_log_config_loggers")
|
||||||
def test_set_loggers_journald_importerror(import_fails):
|
def test_set_loggers_journald_importerror(import_fails):
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
orig_handlers = logger.handlers
|
orig_handlers = logger.handlers
|
||||||
logger.handlers = []
|
logger.handlers = []
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
|
"ft_tests_force_logging": True,
|
||||||
"verbosity": 2,
|
"verbosity": 2,
|
||||||
"logfile": "journald",
|
"logfile": "journald",
|
||||||
}
|
}
|
||||||
@@ -164,6 +180,56 @@ def test_set_loggers_journald_importerror(import_fails):
|
|||||||
logger.handlers = orig_handlers
|
logger.handlers = orig_handlers
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("keep_log_config_loggers")
|
||||||
|
def test_set_loggers_json_format(capsys):
|
||||||
|
logger = logging.getLogger()
|
||||||
|
orig_handlers = logger.handlers
|
||||||
|
logger.handlers = []
|
||||||
|
|
||||||
|
config = {
|
||||||
|
"ft_tests_force_logging": True,
|
||||||
|
"verbosity": 2,
|
||||||
|
"log_config": {
|
||||||
|
"version": 1,
|
||||||
|
"formatters": {
|
||||||
|
"json": {
|
||||||
|
"()": "freqtrade.loggers.json_formatter.JsonFormatter",
|
||||||
|
"fmt_dict": {
|
||||||
|
"timestamp": "asctime",
|
||||||
|
"level": "levelname",
|
||||||
|
"logger": "name",
|
||||||
|
"message": "message",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"json": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"formatter": "json",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"handlers": ["json"],
|
||||||
|
"level": "DEBUG",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_logging_pre()
|
||||||
|
setup_logging(config)
|
||||||
|
assert len(logger.handlers) == 2
|
||||||
|
assert [x for x in logger.handlers if type(x).__name__ == "StreamHandler"]
|
||||||
|
assert [x for x in logger.handlers if isinstance(x, FTBufferingHandler)]
|
||||||
|
|
||||||
|
logger.info("Test message")
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert re.search(r'{"timestamp": ".*"Test message".*', captured.err)
|
||||||
|
|
||||||
|
# reset handlers to not break pytest
|
||||||
|
logger.handlers = orig_handlers
|
||||||
|
|
||||||
|
|
||||||
def test_reduce_verbosity():
|
def test_reduce_verbosity():
|
||||||
setup_logging_pre()
|
setup_logging_pre()
|
||||||
reduce_verbosity_for_bias_tester()
|
reduce_verbosity_for_bias_tester()
|
||||||
|
|||||||
Reference in New Issue
Block a user