mirror of
https://github.com/freqtrade/freqtrade.git
synced 2025-12-16 04:41:15 +00:00
refactor: Update utc to UTC
This commit is contained in:
@@ -4,9 +4,8 @@ This module contains the argument manager class
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
from typing import Self
|
||||||
from typing_extensions import Self
|
|
||||||
|
|
||||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||||
from freqtrade.exceptions import ConfigurationError
|
from freqtrade.exceptions import ConfigurationError
|
||||||
@@ -152,7 +151,7 @@ class TimeRange:
|
|||||||
if stype[0] == "date" and len(starts) == 8:
|
if stype[0] == "date" and len(starts) == 8:
|
||||||
start = int(
|
start = int(
|
||||||
datetime.strptime(starts, "%Y%m%d")
|
datetime.strptime(starts, "%Y%m%d")
|
||||||
.replace(tzinfo=timezone.utc)
|
.replace(tzinfo=UTC)
|
||||||
.timestamp()
|
.timestamp()
|
||||||
)
|
)
|
||||||
elif len(starts) == 13:
|
elif len(starts) == 13:
|
||||||
@@ -165,7 +164,7 @@ class TimeRange:
|
|||||||
if stype[1] == "date" and len(stops) == 8:
|
if stype[1] == "date" and len(stops) == 8:
|
||||||
stop = int(
|
stop = int(
|
||||||
datetime.strptime(stops, "%Y%m%d")
|
datetime.strptime(stops, "%Y%m%d")
|
||||||
.replace(tzinfo=timezone.utc)
|
.replace(tzinfo=UTC)
|
||||||
.timestamp()
|
.timestamp()
|
||||||
)
|
)
|
||||||
elif len(stops) == 13:
|
elif len(stops) == 13:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ Helpers when analyzing backtest data
|
|||||||
import logging
|
import logging
|
||||||
import zipfile
|
import zipfile
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from io import BytesIO, StringIO
|
from io import BytesIO, StringIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Literal
|
from typing import Any, Literal
|
||||||
@@ -324,7 +324,7 @@ def find_existing_backtest_stats(
|
|||||||
|
|
||||||
if min_backtest_date is not None:
|
if min_backtest_date is not None:
|
||||||
backtest_date = strategy_metadata["backtest_start_time"]
|
backtest_date = strategy_metadata["backtest_start_time"]
|
||||||
backtest_date = datetime.fromtimestamp(backtest_date, tz=timezone.utc)
|
backtest_date = datetime.fromtimestamp(backtest_date, tz=UTC)
|
||||||
if backtest_date < min_backtest_date:
|
if backtest_date < min_backtest_date:
|
||||||
# Do not use a cached result for this strategy as first result is too old.
|
# Do not use a cached result for this strategy as first result is too old.
|
||||||
del run_ids[strategy_name]
|
del run_ids[strategy_name]
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ Common Interface for bot and strategy to access data.
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
|
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
|
||||||
@@ -98,7 +98,7 @@ class DataProvider:
|
|||||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||||
"""
|
"""
|
||||||
pair_key = (pair, timeframe, candle_type)
|
pair_key = (pair, timeframe, candle_type)
|
||||||
self.__cached_pairs[pair_key] = (dataframe, datetime.now(timezone.utc))
|
self.__cached_pairs[pair_key] = (dataframe, datetime.now(UTC))
|
||||||
|
|
||||||
# For multiple producers we will want to merge the pairlists instead of overwriting
|
# For multiple producers we will want to merge the pairlists instead of overwriting
|
||||||
def _set_producer_pairs(self, pairlist: list[str], producer_name: str = "default"):
|
def _set_producer_pairs(self, pairlist: list[str], producer_name: str = "default"):
|
||||||
@@ -131,7 +131,7 @@ class DataProvider:
|
|||||||
"data": {
|
"data": {
|
||||||
"key": pair_key,
|
"key": pair_key,
|
||||||
"df": dataframe.tail(1),
|
"df": dataframe.tail(1),
|
||||||
"la": datetime.now(timezone.utc),
|
"la": datetime.now(UTC),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
self.__rpc.send_msg(msg)
|
self.__rpc.send_msg(msg)
|
||||||
@@ -164,7 +164,7 @@ class DataProvider:
|
|||||||
if producer_name not in self.__producer_pairs_df:
|
if producer_name not in self.__producer_pairs_df:
|
||||||
self.__producer_pairs_df[producer_name] = {}
|
self.__producer_pairs_df[producer_name] = {}
|
||||||
|
|
||||||
_last_analyzed = datetime.now(timezone.utc) if not last_analyzed else last_analyzed
|
_last_analyzed = datetime.now(UTC) if not last_analyzed else last_analyzed
|
||||||
|
|
||||||
self.__producer_pairs_df[producer_name][pair_key] = (dataframe, _last_analyzed)
|
self.__producer_pairs_df[producer_name][pair_key] = (dataframe, _last_analyzed)
|
||||||
logger.debug(f"External DataFrame for {pair_key} from {producer_name} added.")
|
logger.debug(f"External DataFrame for {pair_key} from {producer_name} added.")
|
||||||
@@ -275,12 +275,12 @@ class DataProvider:
|
|||||||
# If we have no data from this Producer yet
|
# If we have no data from this Producer yet
|
||||||
if producer_name not in self.__producer_pairs_df:
|
if producer_name not in self.__producer_pairs_df:
|
||||||
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
|
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
|
||||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
|
||||||
|
|
||||||
# If we do have data from that Producer, but no data on this pair_key
|
# If we do have data from that Producer, but no data on this pair_key
|
||||||
if pair_key not in self.__producer_pairs_df[producer_name]:
|
if pair_key not in self.__producer_pairs_df[producer_name]:
|
||||||
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
|
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
|
||||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
|
||||||
|
|
||||||
# We have it, return this data
|
# We have it, return this data
|
||||||
df, la = self.__producer_pairs_df[producer_name][pair_key]
|
df, la = self.__producer_pairs_df[producer_name][pair_key]
|
||||||
@@ -396,10 +396,10 @@ class DataProvider:
|
|||||||
if (max_index := self.__slice_index.get(pair)) is not None:
|
if (max_index := self.__slice_index.get(pair)) is not None:
|
||||||
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES) : max_index]
|
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES) : max_index]
|
||||||
else:
|
else:
|
||||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
|
||||||
return df, date
|
return df, date
|
||||||
else:
|
else:
|
||||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
return (DataFrame(), datetime.fromtimestamp(0, tz=UTC))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def runmode(self) -> RunMode:
|
def runmode(self) -> RunMode:
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import logging
|
|||||||
import re
|
import re
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from pandas import DataFrame, to_datetime
|
from pandas import DataFrame, to_datetime
|
||||||
@@ -118,8 +118,8 @@ class IDataHandler(ABC):
|
|||||||
df = self._ohlcv_load(pair, timeframe, None, candle_type)
|
df = self._ohlcv_load(pair, timeframe, None, candle_type)
|
||||||
if df.empty:
|
if df.empty:
|
||||||
return (
|
return (
|
||||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
datetime.fromtimestamp(0, tz=UTC),
|
||||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
datetime.fromtimestamp(0, tz=UTC),
|
||||||
0,
|
0,
|
||||||
)
|
)
|
||||||
return df.iloc[0]["date"].to_pydatetime(), df.iloc[-1]["date"].to_pydatetime(), len(df)
|
return df.iloc[0]["date"].to_pydatetime(), df.iloc[-1]["date"].to_pydatetime(), len(df)
|
||||||
@@ -201,8 +201,8 @@ class IDataHandler(ABC):
|
|||||||
df = self._trades_load(pair, trading_mode)
|
df = self._trades_load(pair, trading_mode)
|
||||||
if df.empty:
|
if df.empty:
|
||||||
return (
|
return (
|
||||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
datetime.fromtimestamp(0, tz=UTC),
|
||||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
datetime.fromtimestamp(0, tz=UTC),
|
||||||
0,
|
0,
|
||||||
)
|
)
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Binance exchange subclass"""
|
"""Binance exchange subclass"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import ccxt
|
import ccxt
|
||||||
@@ -160,7 +160,7 @@ class Binance(Exchange):
|
|||||||
since_ms = x[3][0][0]
|
since_ms = x[3][0][0]
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Candle-data for {pair} available starting with "
|
f"Candle-data for {pair} available starting with "
|
||||||
f"{datetime.fromtimestamp(since_ms // 1000, tz=timezone.utc).isoformat()}."
|
f"{datetime.fromtimestamp(since_ms // 1000, tz=UTC).isoformat()}."
|
||||||
)
|
)
|
||||||
if until_ms and since_ms >= until_ms:
|
if until_ms and since_ms >= until_ms:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Bitpanda exchange subclass"""
|
"""Bitpanda exchange subclass"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
|
||||||
@@ -34,5 +34,5 @@ class Bitpanda(Exchange):
|
|||||||
:param pair: Pair the order is for
|
:param pair: Pair the order is for
|
||||||
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
||||||
"""
|
"""
|
||||||
params = {"to": int(datetime.now(timezone.utc).timestamp() * 1000)}
|
params = {"to": int(datetime.now(UTC).timestamp() * 1000)}
|
||||||
return super().get_trades_for_order(order_id, pair, since, params)
|
return super().get_trades_for_order(order_id, pair, since, params)
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import logging
|
|||||||
import signal
|
import signal
|
||||||
from collections.abc import Coroutine, Generator
|
from collections.abc import Coroutine, Generator
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import UTC, datetime, timedelta
|
||||||
from math import floor, isnan
|
from math import floor, isnan
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
from typing import Any, Literal, TypeGuard, TypeVar
|
from typing import Any, Literal, TypeGuard, TypeVar
|
||||||
@@ -655,7 +655,7 @@ class Exchange:
|
|||||||
if isinstance(markets, Exception):
|
if isinstance(markets, Exception):
|
||||||
raise markets
|
raise markets
|
||||||
return None
|
return None
|
||||||
except asyncio.TimeoutError as e:
|
except TimeoutError as e:
|
||||||
logger.warning("Could not load markets. Reason: %s", e)
|
logger.warning("Could not load markets. Reason: %s", e)
|
||||||
raise TemporaryError from e
|
raise TemporaryError from e
|
||||||
|
|
||||||
@@ -2220,7 +2220,7 @@ class Exchange:
|
|||||||
_params = params if params else {}
|
_params = params if params else {}
|
||||||
my_trades = self._api.fetch_my_trades(
|
my_trades = self._api.fetch_my_trades(
|
||||||
pair,
|
pair,
|
||||||
int((since.replace(tzinfo=timezone.utc).timestamp() - 5) * 1000),
|
int((since.replace(tzinfo=UTC).timestamp() - 5) * 1000),
|
||||||
params=_params,
|
params=_params,
|
||||||
)
|
)
|
||||||
matched_trades = [trade for trade in my_trades if trade["order"] == order_id]
|
matched_trades = [trade for trade in my_trades if trade["order"] == order_id]
|
||||||
@@ -3347,7 +3347,7 @@ class Exchange:
|
|||||||
if not filename.parent.is_dir():
|
if not filename.parent.is_dir():
|
||||||
filename.parent.mkdir(parents=True)
|
filename.parent.mkdir(parents=True)
|
||||||
data = {
|
data = {
|
||||||
"updated": datetime.now(timezone.utc),
|
"updated": datetime.now(UTC),
|
||||||
"data": tiers,
|
"data": tiers,
|
||||||
}
|
}
|
||||||
file_dump_json(filename, data)
|
file_dump_json(filename, data)
|
||||||
@@ -3369,7 +3369,7 @@ class Exchange:
|
|||||||
updated = tiers.get("updated")
|
updated = tiers.get("updated")
|
||||||
if updated:
|
if updated:
|
||||||
updated_dt = parser.parse(updated)
|
updated_dt = parser.parse(updated)
|
||||||
if updated_dt < datetime.now(timezone.utc) - cache_time:
|
if updated_dt < datetime.now(UTC) - cache_time:
|
||||||
logger.info("Cached leverage tiers are outdated. Will update.")
|
logger.info("Cached leverage tiers are outdated. Will update.")
|
||||||
return None
|
return None
|
||||||
return tiers.get("data")
|
return tiers.get("data")
|
||||||
@@ -3584,7 +3584,7 @@ class Exchange:
|
|||||||
mark_price_type = CandleType.from_string(self._ft_has["mark_ohlcv_price"])
|
mark_price_type = CandleType.from_string(self._ft_has["mark_ohlcv_price"])
|
||||||
|
|
||||||
if not close_date:
|
if not close_date:
|
||||||
close_date = datetime.now(timezone.utc)
|
close_date = datetime.now(UTC)
|
||||||
since_ms = dt_ts(timeframe_to_prev_date(timeframe, open_date))
|
since_ms = dt_ts(timeframe_to_prev_date(timeframe, open_date))
|
||||||
|
|
||||||
mark_comb: PairWithTimeframe = (pair, timeframe, mark_price_type)
|
mark_comb: PairWithTimeframe = (pair, timeframe, mark_price_type)
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ Exchange support utils
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import UTC, datetime, timedelta
|
||||||
from math import ceil, floor, isnan
|
from math import ceil, floor, isnan
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -148,7 +148,7 @@ def date_minus_candles(timeframe: str, candle_count: int, date: datetime | None
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
if not date:
|
if not date:
|
||||||
date = datetime.now(timezone.utc)
|
date = datetime.now(UTC)
|
||||||
|
|
||||||
tf_min = timeframe_to_minutes(timeframe)
|
tf_min = timeframe_to_minutes(timeframe)
|
||||||
new_date = timeframe_to_prev_date(timeframe, date) - timedelta(minutes=tf_min * candle_count)
|
new_date = timeframe_to_prev_date(timeframe, date) - timedelta(minutes=tf_min * candle_count)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
import ccxt
|
import ccxt
|
||||||
from ccxt import ROUND_DOWN, ROUND_UP
|
from ccxt import ROUND_DOWN, ROUND_UP
|
||||||
@@ -59,7 +59,7 @@ def timeframe_to_prev_date(timeframe: str, date: datetime | None = None) -> date
|
|||||||
:returns: date of previous candle (with utc timezone)
|
:returns: date of previous candle (with utc timezone)
|
||||||
"""
|
"""
|
||||||
if not date:
|
if not date:
|
||||||
date = datetime.now(timezone.utc)
|
date = datetime.now(UTC)
|
||||||
|
|
||||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_DOWN) // 1000
|
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_DOWN) // 1000
|
||||||
return dt_from_ts(new_timestamp)
|
return dt_from_ts(new_timestamp)
|
||||||
@@ -73,6 +73,6 @@ def timeframe_to_next_date(timeframe: str, date: datetime | None = None) -> date
|
|||||||
:returns: date of next candle (with utc timezone)
|
:returns: date of next candle (with utc timezone)
|
||||||
"""
|
"""
|
||||||
if not date:
|
if not date:
|
||||||
date = datetime.now(timezone.utc)
|
date = datetime.now(UTC)
|
||||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_UP) // 1000
|
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, dt_ts(date), ROUND_UP) // 1000
|
||||||
return dt_from_ts(new_timestamp)
|
return dt_from_ts(new_timestamp)
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import re
|
|||||||
import shutil
|
import shutil
|
||||||
import threading
|
import threading
|
||||||
import warnings
|
import warnings
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import UTC, datetime, timedelta
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, TypedDict
|
from typing import Any, TypedDict
|
||||||
|
|
||||||
@@ -116,7 +116,7 @@ class FreqaiDataDrawer:
|
|||||||
if metric not in self.metric_tracker[pair]:
|
if metric not in self.metric_tracker[pair]:
|
||||||
self.metric_tracker[pair][metric] = {"timestamp": [], "value": []}
|
self.metric_tracker[pair][metric] = {"timestamp": [], "value": []}
|
||||||
|
|
||||||
timestamp = int(datetime.now(timezone.utc).timestamp())
|
timestamp = int(datetime.now(UTC).timestamp())
|
||||||
self.metric_tracker[pair][metric]["value"].append(value)
|
self.metric_tracker[pair][metric]["value"].append(value)
|
||||||
self.metric_tracker[pair][metric]["timestamp"].append(timestamp)
|
self.metric_tracker[pair][metric]["timestamp"].append(timestamp)
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import inspect
|
|||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -341,7 +341,7 @@ class FreqaiDataKitchen:
|
|||||||
full_timerange = TimeRange.parse_timerange(tr)
|
full_timerange = TimeRange.parse_timerange(tr)
|
||||||
config_timerange = TimeRange.parse_timerange(self.config["timerange"])
|
config_timerange = TimeRange.parse_timerange(self.config["timerange"])
|
||||||
if config_timerange.stopts == 0:
|
if config_timerange.stopts == 0:
|
||||||
config_timerange.stopts = int(datetime.now(tz=timezone.utc).timestamp())
|
config_timerange.stopts = int(datetime.now(tz=UTC).timestamp())
|
||||||
timerange_train = copy.deepcopy(full_timerange)
|
timerange_train = copy.deepcopy(full_timerange)
|
||||||
timerange_backtest = copy.deepcopy(full_timerange)
|
timerange_backtest = copy.deepcopy(full_timerange)
|
||||||
|
|
||||||
@@ -525,7 +525,7 @@ class FreqaiDataKitchen:
|
|||||||
:return:
|
:return:
|
||||||
bool = If the model is expired or not.
|
bool = If the model is expired or not.
|
||||||
"""
|
"""
|
||||||
time = datetime.now(tz=timezone.utc).timestamp()
|
time = datetime.now(tz=UTC).timestamp()
|
||||||
elapsed_time = (time - trained_timestamp) / 3600 # hours
|
elapsed_time = (time - trained_timestamp) / 3600 # hours
|
||||||
max_time = self.freqai_config.get("expiration_hours", 0)
|
max_time = self.freqai_config.get("expiration_hours", 0)
|
||||||
if max_time > 0:
|
if max_time > 0:
|
||||||
@@ -536,7 +536,7 @@ class FreqaiDataKitchen:
|
|||||||
def check_if_new_training_required(
|
def check_if_new_training_required(
|
||||||
self, trained_timestamp: int
|
self, trained_timestamp: int
|
||||||
) -> tuple[bool, TimeRange, TimeRange]:
|
) -> tuple[bool, TimeRange, TimeRange]:
|
||||||
time = datetime.now(tz=timezone.utc).timestamp()
|
time = datetime.now(tz=UTC).timestamp()
|
||||||
trained_timerange = TimeRange()
|
trained_timerange = TimeRange()
|
||||||
data_load_timerange = TimeRange()
|
data_load_timerange = TimeRange()
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import threading
|
|||||||
import time
|
import time
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Literal
|
from typing import Any, Literal
|
||||||
|
|
||||||
@@ -76,7 +76,7 @@ class IFreqaiModel(ABC):
|
|||||||
|
|
||||||
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config)
|
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config)
|
||||||
# set current candle to arbitrary historical date
|
# set current candle to arbitrary historical date
|
||||||
self.current_candle: datetime = datetime.fromtimestamp(637887600, tz=timezone.utc)
|
self.current_candle: datetime = datetime.fromtimestamp(637887600, tz=UTC)
|
||||||
self.dd.current_candle = self.current_candle
|
self.dd.current_candle = self.current_candle
|
||||||
self.scanning = False
|
self.scanning = False
|
||||||
self.ft_params = self.freqai_info["feature_parameters"]
|
self.ft_params = self.freqai_info["feature_parameters"]
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -64,7 +64,7 @@ def get_required_data_timerange(config: Config) -> TimeRange:
|
|||||||
Used to compute the required data download time range
|
Used to compute the required data download time range
|
||||||
for auto data-download in FreqAI
|
for auto data-download in FreqAI
|
||||||
"""
|
"""
|
||||||
time = datetime.now(tz=timezone.utc).timestamp()
|
time = datetime.now(tz=UTC).timestamp()
|
||||||
|
|
||||||
timeframes = config["freqai"]["feature_parameters"].get("include_timeframes")
|
timeframes = config["freqai"]["feature_parameters"].get("include_timeframes")
|
||||||
|
|
||||||
|
|||||||
@@ -102,7 +102,7 @@ class WebSocketChannel:
|
|||||||
self._send_times.append(total_time)
|
self._send_times.append(total_time)
|
||||||
|
|
||||||
self._calc_send_limit()
|
self._calc_send_limit()
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
logger.info(f"Connection for {self} timed out, disconnecting")
|
logger.info(f"Connection for {self} timed out, disconnecting")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@@ -201,8 +201,8 @@ class WebSocketChannel:
|
|||||||
try:
|
try:
|
||||||
await task
|
await task
|
||||||
except (
|
except (
|
||||||
|
TimeoutError,
|
||||||
asyncio.CancelledError,
|
asyncio.CancelledError,
|
||||||
asyncio.TimeoutError,
|
|
||||||
WebSocketDisconnect,
|
WebSocketDisconnect,
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
RuntimeError,
|
RuntimeError,
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ This module contains class to define a RPC communications
|
|||||||
import logging
|
import logging
|
||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from collections.abc import Generator, Sequence
|
from collections.abc import Generator, Sequence
|
||||||
from datetime import date, datetime, timedelta, timezone
|
from datetime import UTC, date, datetime, timedelta
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
import psutil
|
import psutil
|
||||||
@@ -375,7 +375,7 @@ class RPC:
|
|||||||
"""
|
"""
|
||||||
:param timeunit: Valid entries are 'days', 'weeks', 'months'
|
:param timeunit: Valid entries are 'days', 'weeks', 'months'
|
||||||
"""
|
"""
|
||||||
start_date = datetime.now(timezone.utc).date()
|
start_date = datetime.now(UTC).date()
|
||||||
if timeunit == "weeks":
|
if timeunit == "weeks":
|
||||||
# weekly
|
# weekly
|
||||||
start_date = start_date - timedelta(days=start_date.weekday()) # Monday
|
start_date = start_date - timedelta(days=start_date.weekday()) # Monday
|
||||||
@@ -1259,7 +1259,7 @@ class RPC:
|
|||||||
|
|
||||||
for lock in locks:
|
for lock in locks:
|
||||||
lock.active = False
|
lock.active = False
|
||||||
lock.lock_end_time = datetime.now(timezone.utc)
|
lock.lock_end_time = datetime.now(UTC)
|
||||||
|
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user