refactor: Update utc to UTC

This commit is contained in:
Matthias
2025-07-04 08:56:42 +02:00
parent 5f59e1435e
commit 37cc949d94
15 changed files with 49 additions and 50 deletions

View File

@@ -4,9 +4,8 @@ This module contains the argument manager class
import logging
import re
from datetime import datetime, timezone
from typing_extensions import Self
from datetime import UTC, datetime
from typing import Self
from freqtrade.constants import DATETIME_PRINT_FORMAT
from freqtrade.exceptions import ConfigurationError
@@ -152,7 +151,7 @@ class TimeRange:
if stype[0] == "date" and len(starts) == 8:
start = int(
datetime.strptime(starts, "%Y%m%d")
.replace(tzinfo=timezone.utc)
.replace(tzinfo=UTC)
.timestamp()
)
elif len(starts) == 13:
@@ -165,7 +164,7 @@ class TimeRange:
if stype[1] == "date" and len(stops) == 8:
stop = int(
datetime.strptime(stops, "%Y%m%d")
.replace(tzinfo=timezone.utc)
.replace(tzinfo=UTC)
.timestamp()
)
elif len(stops) == 13:

View File

@@ -5,7 +5,7 @@ Helpers when analyzing backtest data
import logging
import zipfile
from copy import copy
from datetime import datetime, timezone
from datetime import UTC, datetime
from io import BytesIO, StringIO
from pathlib import Path
from typing import Any, Literal
@@ -324,7 +324,7 @@ def find_existing_backtest_stats(
if min_backtest_date is not None:
backtest_date = strategy_metadata["backtest_start_time"]
backtest_date = datetime.fromtimestamp(backtest_date, tz=timezone.utc)
backtest_date = datetime.fromtimestamp(backtest_date, tz=UTC)
if backtest_date < min_backtest_date:
# Do not use a cached result for this strategy as first result is too old.
del run_ids[strategy_name]

View File

@@ -7,7 +7,7 @@ Common Interface for bot and strategy to access data.
import logging
from collections import deque
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
@@ -98,7 +98,7 @@ class DataProvider:
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
pair_key = (pair, timeframe, candle_type)
self.__cached_pairs[pair_key] = (dataframe, datetime.now(timezone.utc))
self.__cached_pairs[pair_key] = (dataframe, datetime.now(UTC))
# For multiple producers we will want to merge the pairlists instead of overwriting
def _set_producer_pairs(self, pairlist: list[str], producer_name: str = "default"):
@@ -131,7 +131,7 @@ class DataProvider:
"data": {
"key": pair_key,
"df": dataframe.tail(1),
"la": datetime.now(timezone.utc),
"la": datetime.now(UTC),
},
}
self.__rpc.send_msg(msg)
@@ -164,7 +164,7 @@ class DataProvider:
if producer_name not in self.__producer_pairs_df:
self.__producer_pairs_df[producer_name] = {}
_last_analyzed = datetime.now(timezone.utc) if not last_analyzed else last_analyzed
_last_analyzed = datetime.now(UTC) if not last_analyzed else last_analyzed
self.__producer_pairs_df[producer_name][pair_key] = (dataframe, _last_analyzed)
logger.debug(f"External DataFrame for {pair_key} from {producer_name} added.")
@@ -275,12 +275,12 @@ class DataProvider:
# If we have no data from this Producer yet
if producer_name not in self.__producer_pairs_df:
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
# If we do have data from that Producer, but no data on this pair_key
if pair_key not in self.__producer_pairs_df[producer_name]:
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
# We have it, return this data
df, la = self.__producer_pairs_df[producer_name][pair_key]
@@ -396,10 +396,10 @@ class DataProvider:
if (max_index := self.__slice_index.get(pair)) is not None:
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES) : max_index]
else:
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
return df, date
else:
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
@property
def runmode(self) -> RunMode:

View File

@@ -8,7 +8,7 @@ import logging
import re
from abc import ABC, abstractmethod
from copy import deepcopy
from datetime import datetime, timezone
from datetime import UTC, datetime
from pathlib import Path
from pandas import DataFrame, to_datetime
@@ -118,8 +118,8 @@ class IDataHandler(ABC):
df = self._ohlcv_load(pair, timeframe, None, candle_type)
if df.empty:
return (
datetime.fromtimestamp(0, tz=timezone.utc),
datetime.fromtimestamp(0, tz=timezone.utc),
datetime.fromtimestamp(0, tz=UTC),
datetime.fromtimestamp(0, tz=UTC),
0,
)
return df.iloc[0]["date"].to_pydatetime(), df.iloc[-1]["date"].to_pydatetime(), len(df)
@@ -201,8 +201,8 @@ class IDataHandler(ABC):
df = self._trades_load(pair, trading_mode)
if df.empty:
return (
datetime.fromtimestamp(0, tz=timezone.utc),
datetime.fromtimestamp(0, tz=timezone.utc),
datetime.fromtimestamp(0, tz=UTC),
datetime.fromtimestamp(0, tz=UTC),
0,
)
return (

View File

@@ -1,7 +1,7 @@
"""Binance exchange subclass"""
import logging
from datetime import datetime, timezone
from datetime import UTC, datetime
from pathlib import Path
import ccxt
@@ -160,7 +160,7 @@ class Binance(Exchange):
since_ms = x[3][0][0]
logger.info(
f"Candle-data for {pair} available starting with "
f"{datetime.fromtimestamp(since_ms // 1000, tz=timezone.utc).isoformat()}."
f"{datetime.fromtimestamp(since_ms // 1000, tz=UTC).isoformat()}."
)
if until_ms and since_ms >= until_ms:
logger.warning(

View File

@@ -1,7 +1,7 @@
"""Bitpanda exchange subclass"""
import logging
from datetime import datetime, timezone
from datetime import UTC, datetime
from freqtrade.exchange import Exchange
@@ -34,5 +34,5 @@ class Bitpanda(Exchange):
:param pair: Pair the order is for
:param since: datetime object of the order creation time. Assumes object is in UTC.
"""
params = {"to": int(datetime.now(timezone.utc).timestamp() * 1000)}
params = {"to": int(datetime.now(UTC).timestamp() * 1000)}
return super().get_trades_for_order(order_id, pair, since, params)

View File

@@ -9,7 +9,7 @@ import logging
import signal
from collections.abc import Coroutine, Generator
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from datetime import UTC, datetime, timedelta
from math import floor, isnan
from threading import Lock
from typing import Any, Literal, TypeGuard, TypeVar
@@ -655,7 +655,7 @@ class Exchange:
if isinstance(markets, Exception):
raise markets
return None
except asyncio.TimeoutError as e:
except TimeoutError as e:
logger.warning("Could not load markets. Reason: %s", e)
raise TemporaryError from e
@@ -2220,7 +2220,7 @@ class Exchange:
_params = params if params else {}
my_trades = self._api.fetch_my_trades(
pair,
int((since.replace(tzinfo=timezone.utc).timestamp() - 5) * 1000),
int((since.replace(tzinfo=UTC).timestamp() - 5) * 1000),
params=_params,
)
matched_trades = [trade for trade in my_trades if trade["order"] == order_id]
@@ -3347,7 +3347,7 @@ class Exchange:
if not filename.parent.is_dir():
filename.parent.mkdir(parents=True)
data = {
"updated": datetime.now(timezone.utc),
"updated": datetime.now(UTC),
"data": tiers,
}
file_dump_json(filename, data)
@@ -3369,7 +3369,7 @@ class Exchange:
updated = tiers.get("updated")
if updated:
updated_dt = parser.parse(updated)
if updated_dt < datetime.now(timezone.utc) - cache_time:
if updated_dt < datetime.now(UTC) - cache_time:
logger.info("Cached leverage tiers are outdated. Will update.")
return None
return tiers.get("data")
@@ -3584,7 +3584,7 @@ class Exchange:
mark_price_type = CandleType.from_string(self._ft_has["mark_ohlcv_price"])
if not close_date:
close_date = datetime.now(timezone.utc)
close_date = datetime.now(UTC)
since_ms = dt_ts(timeframe_to_prev_date(timeframe, open_date))
mark_comb: PairWithTimeframe = (pair, timeframe, mark_price_type)

View File

@@ -3,7 +3,7 @@ Exchange support utils
"""
import inspect
from datetime import datetime, timedelta, timezone
from datetime import UTC, datetime, timedelta
from math import ceil, floor, isnan
from typing import Any
@@ -148,7 +148,7 @@ def date_minus_candles(timeframe: str, candle_count: int, date: datetime | None
"""
if not date:
date = datetime.now(timezone.utc)
date = datetime.now(UTC)
tf_min = timeframe_to_minutes(timeframe)
new_date = timeframe_to_prev_date(timeframe, date) - timedelta(minutes=tf_min * candle_count)

View File

@@ -1,4 +1,4 @@
from datetime import datetime, timezone
from datetime import UTC, datetime
import ccxt
from ccxt import ROUND_DOWN, ROUND_UP
@@ -59,7 +59,7 @@ def timeframe_to_prev_date(timeframe: str, date: datetime | None = None) -> date
:returns: date of previous candle (with utc timezone)
"""
if not date:
date = datetime.now(timezone.utc)
date = datetime.now(UTC)
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_DOWN) // 1000
return dt_from_ts(new_timestamp)
@@ -73,6 +73,6 @@ def timeframe_to_next_date(timeframe: str, date: datetime | None = None) -> date
:returns: date of next candle (with utc timezone)
"""
if not date:
date = datetime.now(timezone.utc)
date = datetime.now(UTC)
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_UP) // 1000
return dt_from_ts(new_timestamp)

View File

@@ -5,7 +5,7 @@ import re
import shutil
import threading
import warnings
from datetime import datetime, timedelta, timezone
from datetime import UTC, datetime, timedelta
from pathlib import Path
from typing import Any, TypedDict
@@ -116,7 +116,7 @@ class FreqaiDataDrawer:
if metric not in self.metric_tracker[pair]:
self.metric_tracker[pair][metric] = {"timestamp": [], "value": []}
timestamp = int(datetime.now(timezone.utc).timestamp())
timestamp = int(datetime.now(UTC).timestamp())
self.metric_tracker[pair][metric]["value"].append(value)
self.metric_tracker[pair][metric]["timestamp"].append(timestamp)

View File

@@ -3,7 +3,7 @@ import inspect
import logging
import random
import shutil
from datetime import datetime, timezone
from datetime import UTC, datetime
from pathlib import Path
from typing import Any
@@ -341,7 +341,7 @@ class FreqaiDataKitchen:
full_timerange = TimeRange.parse_timerange(tr)
config_timerange = TimeRange.parse_timerange(self.config["timerange"])
if config_timerange.stopts == 0:
config_timerange.stopts = int(datetime.now(tz=timezone.utc).timestamp())
config_timerange.stopts = int(datetime.now(tz=UTC).timestamp())
timerange_train = copy.deepcopy(full_timerange)
timerange_backtest = copy.deepcopy(full_timerange)
@@ -525,7 +525,7 @@ class FreqaiDataKitchen:
:return:
bool = If the model is expired or not.
"""
time = datetime.now(tz=timezone.utc).timestamp()
time = datetime.now(tz=UTC).timestamp()
elapsed_time = (time - trained_timestamp) / 3600 # hours
max_time = self.freqai_config.get("expiration_hours", 0)
if max_time > 0:
@@ -536,7 +536,7 @@ class FreqaiDataKitchen:
def check_if_new_training_required(
self, trained_timestamp: int
) -> tuple[bool, TimeRange, TimeRange]:
time = datetime.now(tz=timezone.utc).timestamp()
time = datetime.now(tz=UTC).timestamp()
trained_timerange = TimeRange()
data_load_timerange = TimeRange()

View File

@@ -3,7 +3,7 @@ import threading
import time
from abc import ABC, abstractmethod
from collections import deque
from datetime import datetime, timezone
from datetime import UTC, datetime
from pathlib import Path
from typing import Any, Literal
@@ -76,7 +76,7 @@ class IFreqaiModel(ABC):
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config)
# set current candle to arbitrary historical date
self.current_candle: datetime = datetime.fromtimestamp(637887600, tz=timezone.utc)
self.current_candle: datetime = datetime.fromtimestamp(637887600, tz=UTC)
self.dd.current_candle = self.current_candle
self.scanning = False
self.ft_params = self.freqai_info["feature_parameters"]

View File

@@ -1,5 +1,5 @@
import logging
from datetime import datetime, timezone
from datetime import UTC, datetime
from pathlib import Path
from typing import Any
@@ -64,7 +64,7 @@ def get_required_data_timerange(config: Config) -> TimeRange:
Used to compute the required data download time range
for auto data-download in FreqAI
"""
time = datetime.now(tz=timezone.utc).timestamp()
time = datetime.now(tz=UTC).timestamp()
timeframes = config["freqai"]["feature_parameters"].get("include_timeframes")

View File

@@ -102,7 +102,7 @@ class WebSocketChannel:
self._send_times.append(total_time)
self._calc_send_limit()
except asyncio.TimeoutError:
except TimeoutError:
logger.info(f"Connection for {self} timed out, disconnecting")
raise
@@ -201,8 +201,8 @@ class WebSocketChannel:
try:
await task
except (
TimeoutError,
asyncio.CancelledError,
asyncio.TimeoutError,
WebSocketDisconnect,
ConnectionClosed,
RuntimeError,

View File

@@ -5,7 +5,7 @@ This module contains class to define a RPC communications
import logging
from abc import abstractmethod
from collections.abc import Generator, Sequence
from datetime import date, datetime, timedelta, timezone
from datetime import UTC, date, datetime, timedelta
from typing import TYPE_CHECKING, Any
import psutil
@@ -375,7 +375,7 @@ class RPC:
"""
:param timeunit: Valid entries are 'days', 'weeks', 'months'
"""
start_date = datetime.now(timezone.utc).date()
start_date = datetime.now(UTC).date()
if timeunit == "weeks":
# weekly
start_date = start_date - timedelta(days=start_date.weekday()) # Monday
@@ -1259,7 +1259,7 @@ class RPC:
for lock in locks:
lock.active = False
lock.lock_end_time = datetime.now(timezone.utc)
lock.lock_end_time = datetime.now(UTC)
Trade.commit()