ruff format: rpc modules

This commit is contained in:
Matthias
2024-05-12 16:51:11 +02:00
parent cebbe0121e
commit 5f64cc8e76
23 changed files with 1994 additions and 1689 deletions

View File

@@ -21,8 +21,9 @@ router_login = APIRouter()
def verify_auth(api_config, username: str, password: str): def verify_auth(api_config, username: str, password: str):
"""Verify username/password""" """Verify username/password"""
return (secrets.compare_digest(username, api_config.get('username')) and return secrets.compare_digest(username, api_config.get("username")) and secrets.compare_digest(
secrets.compare_digest(password, api_config.get('password'))) password, api_config.get("password")
)
httpbasic = HTTPBasic(auto_error=False) httpbasic = HTTPBasic(auto_error=False)
@@ -38,7 +39,7 @@ def get_user_from_token(token, secret_key: str, token_type: str = "access") -> s
) )
try: try:
payload = jwt.decode(token, secret_key, algorithms=[ALGORITHM]) payload = jwt.decode(token, secret_key, algorithms=[ALGORITHM])
username: str = payload.get("identity", {}).get('u') username: str = payload.get("identity", {}).get("u")
if username is None: if username is None:
raise credentials_exception raise credentials_exception
if payload.get("type") != token_type: if payload.get("type") != token_type:
@@ -55,10 +56,10 @@ def get_user_from_token(token, secret_key: str, token_type: str = "access") -> s
async def validate_ws_token( async def validate_ws_token(
ws: WebSocket, ws: WebSocket,
ws_token: Union[str, None] = Query(default=None, alias="token"), ws_token: Union[str, None] = Query(default=None, alias="token"),
api_config: Dict[str, Any] = Depends(get_api_config) api_config: Dict[str, Any] = Depends(get_api_config),
): ):
secret_ws_token = api_config.get('ws_token', None) secret_ws_token = api_config.get("ws_token", None)
secret_jwt_key = api_config.get('jwt_secret_key', 'super-secret') secret_jwt_key = api_config.get("jwt_secret_key", "super-secret")
# Check if ws_token is/in secret_ws_token # Check if ws_token is/in secret_ws_token
if ws_token and secret_ws_token: if ws_token and secret_ws_token:
@@ -66,10 +67,9 @@ async def validate_ws_token(
if isinstance(secret_ws_token, str): if isinstance(secret_ws_token, str):
is_valid_ws_token = secrets.compare_digest(secret_ws_token, ws_token) is_valid_ws_token = secrets.compare_digest(secret_ws_token, ws_token)
elif isinstance(secret_ws_token, list): elif isinstance(secret_ws_token, list):
is_valid_ws_token = any([ is_valid_ws_token = any(
secrets.compare_digest(potential, ws_token) [secrets.compare_digest(potential, ws_token) for potential in secret_ws_token]
for potential in secret_ws_token )
])
if is_valid_ws_token: if is_valid_ws_token:
return ws_token return ws_token
@@ -94,20 +94,24 @@ def create_token(data: dict, secret_key: str, token_type: str = "access") -> str
expire = datetime.now(timezone.utc) + timedelta(days=30) expire = datetime.now(timezone.utc) + timedelta(days=30)
else: else:
raise ValueError() raise ValueError()
to_encode.update({ to_encode.update(
{
"exp": expire, "exp": expire,
"iat": datetime.now(timezone.utc), "iat": datetime.now(timezone.utc),
"type": token_type, "type": token_type,
}) }
)
encoded_jwt = jwt.encode(to_encode, secret_key, algorithm=ALGORITHM) encoded_jwt = jwt.encode(to_encode, secret_key, algorithm=ALGORITHM)
return encoded_jwt return encoded_jwt
def http_basic_or_jwt_token(form_data: HTTPBasicCredentials = Depends(httpbasic), def http_basic_or_jwt_token(
form_data: HTTPBasicCredentials = Depends(httpbasic),
token: str = Depends(oauth2_scheme), token: str = Depends(oauth2_scheme),
api_config=Depends(get_api_config)): api_config=Depends(get_api_config),
):
if token: if token:
return get_user_from_token(token, api_config.get('jwt_secret_key', 'super-secret')) return get_user_from_token(token, api_config.get("jwt_secret_key", "super-secret"))
elif form_data and verify_auth(api_config, form_data.username, form_data.password): elif form_data and verify_auth(api_config, form_data.username, form_data.password):
return form_data.username return form_data.username
@@ -117,15 +121,16 @@ def http_basic_or_jwt_token(form_data: HTTPBasicCredentials = Depends(httpbasic)
) )
@router_login.post('/token/login', response_model=AccessAndRefreshToken) @router_login.post("/token/login", response_model=AccessAndRefreshToken)
def token_login(form_data: HTTPBasicCredentials = Depends(security), def token_login(
api_config=Depends(get_api_config)): form_data: HTTPBasicCredentials = Depends(security), api_config=Depends(get_api_config)
):
if verify_auth(api_config, form_data.username, form_data.password): if verify_auth(api_config, form_data.username, form_data.password):
token_data = {'identity': {'u': form_data.username}} token_data = {"identity": {"u": form_data.username}}
access_token = create_token(token_data, api_config.get('jwt_secret_key', 'super-secret')) access_token = create_token(token_data, api_config.get("jwt_secret_key", "super-secret"))
refresh_token = create_token(token_data, api_config.get('jwt_secret_key', 'super-secret'), refresh_token = create_token(
token_type="refresh") token_data, api_config.get("jwt_secret_key", "super-secret"), token_type="refresh"
)
return { return {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
@@ -137,12 +142,12 @@ def token_login(form_data: HTTPBasicCredentials = Depends(security),
) )
@router_login.post('/token/refresh', response_model=AccessToken) @router_login.post("/token/refresh", response_model=AccessToken)
def token_refresh(token: str = Depends(oauth2_scheme), api_config=Depends(get_api_config)): def token_refresh(token: str = Depends(oauth2_scheme), api_config=Depends(get_api_config)):
# Refresh token # Refresh token
u = get_user_from_token(token, api_config.get( u = get_user_from_token(token, api_config.get("jwt_secret_key", "super-secret"), "refresh")
'jwt_secret_key', 'super-secret'), 'refresh') token_data = {"identity": {"u": u}}
token_data = {'identity': {'u': u}} access_token = create_token(
access_token = create_token(token_data, api_config.get('jwt_secret_key', 'super-secret'), token_data, api_config.get("jwt_secret_key", "super-secret"), token_type="access"
token_type="access") )
return {'access_token': access_token} return {"access_token": access_token}

View File

@@ -27,105 +27,113 @@ logger = logging.getLogger(__name__)
router = APIRouter() router = APIRouter()
@router.get('/background', response_model=List[BackgroundTaskStatus], tags=['webserver']) @router.get("/background", response_model=List[BackgroundTaskStatus], tags=["webserver"])
def background_job_list(): def background_job_list():
return [
return [{ {
'job_id': jobid, "job_id": jobid,
'job_category': job['category'], "job_category": job["category"],
'status': job['status'], "status": job["status"],
'running': job['is_running'], "running": job["is_running"],
'progress': job.get('progress'), "progress": job.get("progress"),
'error': job.get('error', None), "error": job.get("error", None),
} for jobid, job in ApiBG.jobs.items()] }
for jobid, job in ApiBG.jobs.items()
]
@router.get('/background/{jobid}', response_model=BackgroundTaskStatus, tags=['webserver']) @router.get("/background/{jobid}", response_model=BackgroundTaskStatus, tags=["webserver"])
def background_job(jobid: str): def background_job(jobid: str):
if not (job := ApiBG.jobs.get(jobid)): if not (job := ApiBG.jobs.get(jobid)):
raise HTTPException(status_code=404, detail='Job not found.') raise HTTPException(status_code=404, detail="Job not found.")
return { return {
'job_id': jobid, "job_id": jobid,
'job_category': job['category'], "job_category": job["category"],
'status': job['status'], "status": job["status"],
'running': job['is_running'], "running": job["is_running"],
'progress': job.get('progress'), "progress": job.get("progress"),
'error': job.get('error', None), "error": job.get("error", None),
} }
@router.get('/pairlists/available', @router.get(
response_model=PairListsResponse, tags=['pairlists', 'webserver']) "/pairlists/available", response_model=PairListsResponse, tags=["pairlists", "webserver"]
)
def list_pairlists(config=Depends(get_config)): def list_pairlists(config=Depends(get_config)):
from freqtrade.resolvers import PairListResolver from freqtrade.resolvers import PairListResolver
pairlists = PairListResolver.search_all_objects(
config, False)
pairlists = sorted(pairlists, key=lambda x: x['name'])
return {'pairlists': [{ pairlists = PairListResolver.search_all_objects(config, False)
"name": x['name'], pairlists = sorted(pairlists, key=lambda x: x["name"])
"is_pairlist_generator": x['class'].is_pairlist_generator,
"params": x['class'].available_parameters(), return {
"description": x['class'].description(), "pairlists": [
} for x in pairlists {
]} "name": x["name"],
"is_pairlist_generator": x["class"].is_pairlist_generator,
"params": x["class"].available_parameters(),
"description": x["class"].description(),
}
for x in pairlists
]
}
def __run_pairlist(job_id: str, config_loc: Config): def __run_pairlist(job_id: str, config_loc: Config):
try: try:
ApiBG.jobs[job_id]["is_running"] = True
ApiBG.jobs[job_id]['is_running'] = True
from freqtrade.plugins.pairlistmanager import PairListManager from freqtrade.plugins.pairlistmanager import PairListManager
with FtNoDBContext(): with FtNoDBContext():
exchange = get_exchange(config_loc) exchange = get_exchange(config_loc)
pairlists = PairListManager(exchange, config_loc) pairlists = PairListManager(exchange, config_loc)
pairlists.refresh_pairlist() pairlists.refresh_pairlist()
ApiBG.jobs[job_id]['result'] = { ApiBG.jobs[job_id]["result"] = {
'method': pairlists.name_list, "method": pairlists.name_list,
'length': len(pairlists.whitelist), "length": len(pairlists.whitelist),
'whitelist': pairlists.whitelist "whitelist": pairlists.whitelist,
} }
ApiBG.jobs[job_id]['status'] = 'success' ApiBG.jobs[job_id]["status"] = "success"
except (OperationalException, Exception) as e: except (OperationalException, Exception) as e:
logger.exception(e) logger.exception(e)
ApiBG.jobs[job_id]['error'] = str(e) ApiBG.jobs[job_id]["error"] = str(e)
ApiBG.jobs[job_id]['status'] = 'failed' ApiBG.jobs[job_id]["status"] = "failed"
finally: finally:
ApiBG.jobs[job_id]['is_running'] = False ApiBG.jobs[job_id]["is_running"] = False
ApiBG.pairlist_running = False ApiBG.pairlist_running = False
@router.post('/pairlists/evaluate', response_model=BgJobStarted, tags=['pairlists', 'webserver']) @router.post("/pairlists/evaluate", response_model=BgJobStarted, tags=["pairlists", "webserver"])
def pairlists_evaluate(payload: PairListsPayload, background_tasks: BackgroundTasks, def pairlists_evaluate(
config=Depends(get_config)): payload: PairListsPayload, background_tasks: BackgroundTasks, config=Depends(get_config)
):
if ApiBG.pairlist_running: if ApiBG.pairlist_running:
raise HTTPException(status_code=400, detail='Pairlist evaluation is already running.') raise HTTPException(status_code=400, detail="Pairlist evaluation is already running.")
config_loc = deepcopy(config) config_loc = deepcopy(config)
config_loc['stake_currency'] = payload.stake_currency config_loc["stake_currency"] = payload.stake_currency
config_loc['pairlists'] = payload.pairlists config_loc["pairlists"] = payload.pairlists
handleExchangePayload(payload, config_loc) handleExchangePayload(payload, config_loc)
# TODO: overwrite blacklist? make it optional and fall back to the one in config? # TODO: overwrite blacklist? make it optional and fall back to the one in config?
# Outcome depends on the UI approach. # Outcome depends on the UI approach.
config_loc['exchange']['pair_blacklist'] = payload.blacklist config_loc["exchange"]["pair_blacklist"] = payload.blacklist
# Random job id # Random job id
job_id = ApiBG.get_job_id() job_id = ApiBG.get_job_id()
ApiBG.jobs[job_id] = { ApiBG.jobs[job_id] = {
'category': 'pairlist', "category": "pairlist",
'status': 'pending', "status": "pending",
'progress': None, "progress": None,
'is_running': False, "is_running": False,
'result': {}, "result": {},
'error': None, "error": None,
} }
background_tasks.add_task(__run_pairlist, job_id, config_loc) background_tasks.add_task(__run_pairlist, job_id, config_loc)
ApiBG.pairlist_running = True ApiBG.pairlist_running = True
return { return {
'status': 'Pairlist evaluation started in background.', "status": "Pairlist evaluation started in background.",
'job_id': job_id, "job_id": job_id,
} }
@@ -135,31 +143,35 @@ def handleExchangePayload(payload: ExchangeModePayloadMixin, config_loc: Config)
Updates the configuration with the payload values. Updates the configuration with the payload values.
""" """
if payload.exchange: if payload.exchange:
config_loc['exchange']['name'] = payload.exchange config_loc["exchange"]["name"] = payload.exchange
if payload.trading_mode: if payload.trading_mode:
config_loc['trading_mode'] = payload.trading_mode config_loc["trading_mode"] = payload.trading_mode
config_loc['candle_type_def'] = CandleType.get_default( config_loc["candle_type_def"] = CandleType.get_default(
config_loc.get('trading_mode', 'spot') or 'spot') config_loc.get("trading_mode", "spot") or "spot"
)
if payload.margin_mode: if payload.margin_mode:
config_loc['margin_mode'] = payload.margin_mode config_loc["margin_mode"] = payload.margin_mode
@router.get('/pairlists/evaluate/{jobid}', response_model=WhitelistEvaluateResponse, @router.get(
tags=['pairlists', 'webserver']) "/pairlists/evaluate/{jobid}",
response_model=WhitelistEvaluateResponse,
tags=["pairlists", "webserver"],
)
def pairlists_evaluate_get(jobid: str): def pairlists_evaluate_get(jobid: str):
if not (job := ApiBG.jobs.get(jobid)): if not (job := ApiBG.jobs.get(jobid)):
raise HTTPException(status_code=404, detail='Job not found.') raise HTTPException(status_code=404, detail="Job not found.")
if job['is_running']: if job["is_running"]:
raise HTTPException(status_code=400, detail='Job not finished yet.') raise HTTPException(status_code=400, detail="Job not finished yet.")
if error := job['error']: if error := job["error"]:
return { return {
'status': 'failed', "status": "failed",
'error': error, "error": error,
} }
return { return {
'status': 'success', "status": "success",
'result': job['result'], "result": job["result"],
} }

View File

@@ -49,67 +49,67 @@ def __run_backtest_bg(btconfig: Config):
asyncio.set_event_loop(asyncio.new_event_loop()) asyncio.set_event_loop(asyncio.new_event_loop())
try: try:
# Reload strategy # Reload strategy
lastconfig = ApiBG.bt['last_config'] lastconfig = ApiBG.bt["last_config"]
strat = StrategyResolver.load_strategy(btconfig) strat = StrategyResolver.load_strategy(btconfig)
validate_config_consistency(btconfig) validate_config_consistency(btconfig)
if ( if (
not ApiBG.bt['bt'] not ApiBG.bt["bt"]
or lastconfig.get('timeframe') != strat.timeframe or lastconfig.get("timeframe") != strat.timeframe
or lastconfig.get('timeframe_detail') != btconfig.get('timeframe_detail') or lastconfig.get("timeframe_detail") != btconfig.get("timeframe_detail")
or lastconfig.get('timerange') != btconfig['timerange'] or lastconfig.get("timerange") != btconfig["timerange"]
): ):
from freqtrade.optimize.backtesting import Backtesting from freqtrade.optimize.backtesting import Backtesting
ApiBG.bt['bt'] = Backtesting(btconfig)
ApiBG.bt['bt'].load_bt_data_detail() ApiBG.bt["bt"] = Backtesting(btconfig)
ApiBG.bt["bt"].load_bt_data_detail()
else: else:
ApiBG.bt['bt'].config = btconfig ApiBG.bt["bt"].config = btconfig
ApiBG.bt['bt'].init_backtest() ApiBG.bt["bt"].init_backtest()
# Only reload data if timeframe changed. # Only reload data if timeframe changed.
if ( if (
not ApiBG.bt['data'] not ApiBG.bt["data"]
or not ApiBG.bt['timerange'] or not ApiBG.bt["timerange"]
or lastconfig.get('timeframe') != strat.timeframe or lastconfig.get("timeframe") != strat.timeframe
or lastconfig.get('timerange') != btconfig['timerange'] or lastconfig.get("timerange") != btconfig["timerange"]
): ):
ApiBG.bt['data'], ApiBG.bt['timerange'] = ApiBG.bt[ ApiBG.bt["data"], ApiBG.bt["timerange"] = ApiBG.bt["bt"].load_bt_data()
'bt'].load_bt_data()
lastconfig['timerange'] = btconfig['timerange'] lastconfig["timerange"] = btconfig["timerange"]
lastconfig['timeframe'] = strat.timeframe lastconfig["timeframe"] = strat.timeframe
lastconfig['protections'] = btconfig.get('protections', []) lastconfig["protections"] = btconfig.get("protections", [])
lastconfig['enable_protections'] = btconfig.get('enable_protections') lastconfig["enable_protections"] = btconfig.get("enable_protections")
lastconfig['dry_run_wallet'] = btconfig.get('dry_run_wallet') lastconfig["dry_run_wallet"] = btconfig.get("dry_run_wallet")
ApiBG.bt['bt'].enable_protections = btconfig.get('enable_protections', False) ApiBG.bt["bt"].enable_protections = btconfig.get("enable_protections", False)
ApiBG.bt['bt'].strategylist = [strat] ApiBG.bt["bt"].strategylist = [strat]
ApiBG.bt['bt'].results = get_BacktestResultType_default() ApiBG.bt["bt"].results = get_BacktestResultType_default()
ApiBG.bt['bt'].load_prior_backtest() ApiBG.bt["bt"].load_prior_backtest()
ApiBG.bt['bt'].abort = False ApiBG.bt["bt"].abort = False
strategy_name = strat.get_strategy_name() strategy_name = strat.get_strategy_name()
if (ApiBG.bt['bt'].results and if ApiBG.bt["bt"].results and strategy_name in ApiBG.bt["bt"].results["strategy"]:
strategy_name in ApiBG.bt['bt'].results['strategy']):
# When previous result hash matches - reuse that result and skip backtesting. # When previous result hash matches - reuse that result and skip backtesting.
logger.info(f'Reusing result of previous backtest for {strategy_name}') logger.info(f"Reusing result of previous backtest for {strategy_name}")
else: else:
min_date, max_date = ApiBG.bt['bt'].backtest_one_strategy( min_date, max_date = ApiBG.bt["bt"].backtest_one_strategy(
strat, ApiBG.bt['data'], ApiBG.bt['timerange']) strat, ApiBG.bt["data"], ApiBG.bt["timerange"]
ApiBG.bt['bt'].results = generate_backtest_stats(
ApiBG.bt['data'], ApiBG.bt['bt'].all_results,
min_date=min_date, max_date=max_date)
if btconfig.get('export', 'none') == 'trades':
combined_res = combined_dataframes_with_rel_mean(ApiBG.bt['data'], min_date, max_date)
fn = store_backtest_stats(
btconfig['exportfilename'],
ApiBG.bt['bt'].results,
datetime.now().strftime("%Y-%m-%d_%H-%M-%S"),
market_change_data=combined_res
) )
ApiBG.bt['bt'].results['metadata'][strategy_name]['filename'] = str(fn.stem)
ApiBG.bt['bt'].results['metadata'][strategy_name]['strategy'] = strategy_name ApiBG.bt["bt"].results = generate_backtest_stats(
ApiBG.bt["data"], ApiBG.bt["bt"].all_results, min_date=min_date, max_date=max_date
)
if btconfig.get("export", "none") == "trades":
combined_res = combined_dataframes_with_rel_mean(ApiBG.bt["data"], min_date, max_date)
fn = store_backtest_stats(
btconfig["exportfilename"],
ApiBG.bt["bt"].results,
datetime.now().strftime("%Y-%m-%d_%H-%M-%S"),
market_change_data=combined_res,
)
ApiBG.bt["bt"].results["metadata"][strategy_name]["filename"] = str(fn.stem)
ApiBG.bt["bt"].results["metadata"][strategy_name]["strategy"] = strategy_name
logger.info("Backtest finished.") logger.info("Backtest finished.")
@@ -118,38 +118,38 @@ def __run_backtest_bg(btconfig: Config):
except (Exception, OperationalException, DependencyException) as e: except (Exception, OperationalException, DependencyException) as e:
logger.exception(f"Backtesting caused an error: {e}") logger.exception(f"Backtesting caused an error: {e}")
ApiBG.bt['bt_error'] = str(e) ApiBG.bt["bt_error"] = str(e)
finally: finally:
ApiBG.bgtask_running = False ApiBG.bgtask_running = False
@router.post('/backtest', response_model=BacktestResponse, tags=['webserver', 'backtest']) @router.post("/backtest", response_model=BacktestResponse, tags=["webserver", "backtest"])
async def api_start_backtest( async def api_start_backtest(
bt_settings: BacktestRequest, background_tasks: BackgroundTasks, bt_settings: BacktestRequest, background_tasks: BackgroundTasks, config=Depends(get_config)
config=Depends(get_config)): ):
ApiBG.bt['bt_error'] = None ApiBG.bt["bt_error"] = None
"""Start backtesting if not done so already""" """Start backtesting if not done so already"""
if ApiBG.bgtask_running: if ApiBG.bgtask_running:
raise RPCException('Bot Background task already running') raise RPCException("Bot Background task already running")
if ':' in bt_settings.strategy: if ":" in bt_settings.strategy:
raise HTTPException(status_code=500, detail="base64 encoded strategies are not allowed.") raise HTTPException(status_code=500, detail="base64 encoded strategies are not allowed.")
btconfig = deepcopy(config) btconfig = deepcopy(config)
remove_exchange_credentials(btconfig['exchange'], True) remove_exchange_credentials(btconfig["exchange"], True)
settings = dict(bt_settings) settings = dict(bt_settings)
if settings.get('freqai', None) is not None: if settings.get("freqai", None) is not None:
settings['freqai'] = dict(settings['freqai']) settings["freqai"] = dict(settings["freqai"])
# Pydantic models will contain all keys, but non-provided ones are None # Pydantic models will contain all keys, but non-provided ones are None
btconfig = deep_merge_dicts(settings, btconfig, allow_null_overrides=False) btconfig = deep_merge_dicts(settings, btconfig, allow_null_overrides=False)
try: try:
btconfig['stake_amount'] = float(btconfig['stake_amount']) btconfig["stake_amount"] = float(btconfig["stake_amount"])
except ValueError: except ValueError:
pass pass
# Force dry-run for backtesting # Force dry-run for backtesting
btconfig['dry_run'] = True btconfig["dry_run"] = True
# Start backtesting # Start backtesting
# Initialize backtesting object # Initialize backtesting object
@@ -166,39 +166,41 @@ async def api_start_backtest(
} }
@router.get('/backtest', response_model=BacktestResponse, tags=['webserver', 'backtest']) @router.get("/backtest", response_model=BacktestResponse, tags=["webserver", "backtest"])
def api_get_backtest(): def api_get_backtest():
""" """
Get backtesting result. Get backtesting result.
Returns Result after backtesting has been ran. Returns Result after backtesting has been ran.
""" """
from freqtrade.persistence import LocalTrade from freqtrade.persistence import LocalTrade
if ApiBG.bgtask_running: if ApiBG.bgtask_running:
return { return {
"status": "running", "status": "running",
"running": True, "running": True,
"step": (ApiBG.bt['bt'].progress.action if ApiBG.bt['bt'] "step": (
else str(BacktestState.STARTUP)), ApiBG.bt["bt"].progress.action if ApiBG.bt["bt"] else str(BacktestState.STARTUP)
"progress": ApiBG.bt['bt'].progress.progress if ApiBG.bt['bt'] else 0, ),
"progress": ApiBG.bt["bt"].progress.progress if ApiBG.bt["bt"] else 0,
"trade_count": len(LocalTrade.trades), "trade_count": len(LocalTrade.trades),
"status_msg": "Backtest running", "status_msg": "Backtest running",
} }
if not ApiBG.bt['bt']: if not ApiBG.bt["bt"]:
return { return {
"status": "not_started", "status": "not_started",
"running": False, "running": False,
"step": "", "step": "",
"progress": 0, "progress": 0,
"status_msg": "Backtest not yet executed" "status_msg": "Backtest not yet executed",
} }
if ApiBG.bt['bt_error']: if ApiBG.bt["bt_error"]:
return { return {
"status": "error", "status": "error",
"running": False, "running": False,
"step": "", "step": "",
"progress": 0, "progress": 0,
"status_msg": f"Backtest failed with {ApiBG.bt['bt_error']}" "status_msg": f"Backtest failed with {ApiBG.bt['bt_error']}",
} }
return { return {
@@ -207,11 +209,11 @@ def api_get_backtest():
"status_msg": "Backtest ended", "status_msg": "Backtest ended",
"step": "finished", "step": "finished",
"progress": 1, "progress": 1,
"backtest_result": ApiBG.bt['bt'].results, "backtest_result": ApiBG.bt["bt"].results,
} }
@router.delete('/backtest', response_model=BacktestResponse, tags=['webserver', 'backtest']) @router.delete("/backtest", response_model=BacktestResponse, tags=["webserver", "backtest"])
def api_delete_backtest(): def api_delete_backtest():
"""Reset backtesting""" """Reset backtesting"""
if ApiBG.bgtask_running: if ApiBG.bgtask_running:
@@ -222,12 +224,12 @@ def api_delete_backtest():
"progress": 0, "progress": 0,
"status_msg": "Backtest running", "status_msg": "Backtest running",
} }
if ApiBG.bt['bt']: if ApiBG.bt["bt"]:
ApiBG.bt['bt'].cleanup() ApiBG.bt["bt"].cleanup()
del ApiBG.bt['bt'] del ApiBG.bt["bt"]
ApiBG.bt['bt'] = None ApiBG.bt["bt"] = None
del ApiBG.bt['data'] del ApiBG.bt["data"]
ApiBG.bt['data'] = None ApiBG.bt["data"] = None
logger.info("Backtesting reset") logger.info("Backtesting reset")
return { return {
"status": "reset", "status": "reset",
@@ -238,7 +240,7 @@ def api_delete_backtest():
} }
@router.get('/backtest/abort', response_model=BacktestResponse, tags=['webserver', 'backtest']) @router.get("/backtest/abort", response_model=BacktestResponse, tags=["webserver", "backtest"])
def api_backtest_abort(): def api_backtest_abort():
if not ApiBG.bgtask_running: if not ApiBG.bgtask_running:
return { return {
@@ -248,7 +250,7 @@ def api_backtest_abort():
"progress": 0, "progress": 0,
"status_msg": "Backtest ended", "status_msg": "Backtest ended",
} }
ApiBG.bt['bt'].abort = True ApiBG.bt["bt"].abort = True
return { return {
"status": "stopping", "status": "stopping",
"running": False, "running": False,
@@ -258,24 +260,26 @@ def api_backtest_abort():
} }
@router.get('/backtest/history', response_model=List[BacktestHistoryEntry], @router.get(
tags=['webserver', 'backtest']) "/backtest/history", response_model=List[BacktestHistoryEntry], tags=["webserver", "backtest"]
)
def api_backtest_history(config=Depends(get_config)): def api_backtest_history(config=Depends(get_config)):
# Get backtest result history, read from metadata files # Get backtest result history, read from metadata files
return get_backtest_resultlist(config['user_data_dir'] / 'backtest_results') return get_backtest_resultlist(config["user_data_dir"] / "backtest_results")
@router.get('/backtest/history/result', response_model=BacktestResponse, @router.get(
tags=['webserver', 'backtest']) "/backtest/history/result", response_model=BacktestResponse, tags=["webserver", "backtest"]
)
def api_backtest_history_result(filename: str, strategy: str, config=Depends(get_config)): def api_backtest_history_result(filename: str, strategy: str, config=Depends(get_config)):
# Get backtest result history, read from metadata files # Get backtest result history, read from metadata files
bt_results_base: Path = config['user_data_dir'] / 'backtest_results' bt_results_base: Path = config["user_data_dir"] / "backtest_results"
fn = (bt_results_base / filename).with_suffix('.json') fn = (bt_results_base / filename).with_suffix(".json")
results: Dict[str, Any] = { results: Dict[str, Any] = {
'metadata': {}, "metadata": {},
'strategy': {}, "strategy": {},
'strategy_comparison': [], "strategy_comparison": [],
} }
if not is_file_in_dir(fn, bt_results_base): if not is_file_in_dir(fn, bt_results_base):
raise HTTPException(status_code=404, detail="File not found.") raise HTTPException(status_code=404, detail="File not found.")
@@ -290,33 +294,38 @@ def api_backtest_history_result(filename: str, strategy: str, config=Depends(get
} }
@router.delete('/backtest/history/{file}', response_model=List[BacktestHistoryEntry], @router.delete(
tags=['webserver', 'backtest']) "/backtest/history/{file}",
response_model=List[BacktestHistoryEntry],
tags=["webserver", "backtest"],
)
def api_delete_backtest_history_entry(file: str, config=Depends(get_config)): def api_delete_backtest_history_entry(file: str, config=Depends(get_config)):
# Get backtest result history, read from metadata files # Get backtest result history, read from metadata files
bt_results_base: Path = config['user_data_dir'] / 'backtest_results' bt_results_base: Path = config["user_data_dir"] / "backtest_results"
file_abs = (bt_results_base / file).with_suffix('.json') file_abs = (bt_results_base / file).with_suffix(".json")
# Ensure file is in backtest_results directory # Ensure file is in backtest_results directory
if not is_file_in_dir(file_abs, bt_results_base): if not is_file_in_dir(file_abs, bt_results_base):
raise HTTPException(status_code=404, detail="File not found.") raise HTTPException(status_code=404, detail="File not found.")
delete_backtest_result(file_abs) delete_backtest_result(file_abs)
return get_backtest_resultlist(config['user_data_dir'] / 'backtest_results') return get_backtest_resultlist(config["user_data_dir"] / "backtest_results")
@router.patch('/backtest/history/{file}', response_model=List[BacktestHistoryEntry], @router.patch(
tags=['webserver', 'backtest']) "/backtest/history/{file}",
def api_update_backtest_history_entry(file: str, body: BacktestMetadataUpdate, response_model=List[BacktestHistoryEntry],
config=Depends(get_config)): tags=["webserver", "backtest"],
)
def api_update_backtest_history_entry(
file: str, body: BacktestMetadataUpdate, config=Depends(get_config)
):
# Get backtest result history, read from metadata files # Get backtest result history, read from metadata files
bt_results_base: Path = config['user_data_dir'] / 'backtest_results' bt_results_base: Path = config["user_data_dir"] / "backtest_results"
file_abs = (bt_results_base / file).with_suffix('.json') file_abs = (bt_results_base / file).with_suffix(".json")
# Ensure file is in backtest_results directory # Ensure file is in backtest_results directory
if not is_file_in_dir(file_abs, bt_results_base): if not is_file_in_dir(file_abs, bt_results_base):
raise HTTPException(status_code=404, detail="File not found.") raise HTTPException(status_code=404, detail="File not found.")
content = { content = {"notes": body.notes}
'notes': body.notes
}
try: try:
update_backtest_metadata(file_abs, body.strategy, content) update_backtest_metadata(file_abs, body.strategy, content)
except ValueError as e: except ValueError as e:
@@ -325,18 +334,21 @@ def api_update_backtest_history_entry(file: str, body: BacktestMetadataUpdate,
return get_backtest_result(file_abs) return get_backtest_result(file_abs)
@router.get('/backtest/history/{file}/market_change', response_model=BacktestMarketChange, @router.get(
tags=['webserver', 'backtest']) "/backtest/history/{file}/market_change",
response_model=BacktestMarketChange,
tags=["webserver", "backtest"],
)
def api_get_backtest_market_change(file: str, config=Depends(get_config)): def api_get_backtest_market_change(file: str, config=Depends(get_config)):
bt_results_base: Path = config['user_data_dir'] / 'backtest_results' bt_results_base: Path = config["user_data_dir"] / "backtest_results"
file_abs = (bt_results_base / f"{file}_market_change").with_suffix('.feather') file_abs = (bt_results_base / f"{file}_market_change").with_suffix(".feather")
# Ensure file is in backtest_results directory # Ensure file is in backtest_results directory
if not is_file_in_dir(file_abs, bt_results_base): if not is_file_in_dir(file_abs, bt_results_base):
raise HTTPException(status_code=404, detail="File not found.") raise HTTPException(status_code=404, detail="File not found.")
df = get_backtest_market_change(file_abs) df = get_backtest_market_change(file_abs)
return { return {
'columns': df.columns.tolist(), "columns": df.columns.tolist(),
'data': df.values.tolist(), "data": df.values.tolist(),
'length': len(df), "length": len(df),
} }

View File

@@ -381,7 +381,7 @@ class Locks(BaseModel):
class LocksPayload(BaseModel): class LocksPayload(BaseModel):
pair: str pair: str
side: str = '*' # Default to both sides side: str = "*" # Default to both sides
until: AwareDatetime until: AwareDatetime
reason: Optional[str] = None reason: Optional[str] = None
@@ -561,7 +561,7 @@ class BacktestHistoryEntry(BaseModel):
strategy: str strategy: str
run_id: str run_id: str
backtest_start_time: int backtest_start_time: int
notes: Optional[str] = '' notes: Optional[str] = ""
backtest_start_ts: Optional[int] = None backtest_start_ts: Optional[int] = None
backtest_end_ts: Optional[int] = None backtest_end_ts: Optional[int] = None
timeframe: Optional[str] = None timeframe: Optional[str] = None
@@ -570,7 +570,7 @@ class BacktestHistoryEntry(BaseModel):
class BacktestMetadataUpdate(BaseModel): class BacktestMetadataUpdate(BaseModel):
strategy: str strategy: str
notes: str = '' notes: str = ""
class BacktestMarketChange(BaseModel): class BacktestMarketChange(BaseModel):

View File

@@ -90,80 +90,84 @@ router_public = APIRouter()
router = APIRouter() router = APIRouter()
@router_public.get('/ping', response_model=Ping) @router_public.get("/ping", response_model=Ping)
def ping(): def ping():
"""simple ping""" """simple ping"""
return {"status": "pong"} return {"status": "pong"}
@router.get('/version', response_model=Version, tags=['info']) @router.get("/version", response_model=Version, tags=["info"])
def version(): def version():
"""Bot Version info""" """Bot Version info"""
return {"version": __version__} return {"version": __version__}
@router.get('/balance', response_model=Balances, tags=['info']) @router.get("/balance", response_model=Balances, tags=["info"])
def balance(rpc: RPC = Depends(get_rpc), config=Depends(get_config)): def balance(rpc: RPC = Depends(get_rpc), config=Depends(get_config)):
"""Account Balances""" """Account Balances"""
return rpc._rpc_balance(config['stake_currency'], config.get('fiat_display_currency', ''),) return rpc._rpc_balance(
config["stake_currency"],
config.get("fiat_display_currency", ""),
)
@router.get('/count', response_model=Count, tags=['info']) @router.get("/count", response_model=Count, tags=["info"])
def count(rpc: RPC = Depends(get_rpc)): def count(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_count() return rpc._rpc_count()
@router.get('/entries', response_model=List[Entry], tags=['info']) @router.get("/entries", response_model=List[Entry], tags=["info"])
def entries(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)): def entries(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_enter_tag_performance(pair) return rpc._rpc_enter_tag_performance(pair)
@router.get('/exits', response_model=List[Exit], tags=['info']) @router.get("/exits", response_model=List[Exit], tags=["info"])
def exits(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)): def exits(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_exit_reason_performance(pair) return rpc._rpc_exit_reason_performance(pair)
@router.get('/mix_tags', response_model=List[MixTag], tags=['info']) @router.get("/mix_tags", response_model=List[MixTag], tags=["info"])
def mix_tags(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)): def mix_tags(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_mix_tag_performance(pair) return rpc._rpc_mix_tag_performance(pair)
@router.get('/performance', response_model=List[PerformanceEntry], tags=['info']) @router.get("/performance", response_model=List[PerformanceEntry], tags=["info"])
def performance(rpc: RPC = Depends(get_rpc)): def performance(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_performance() return rpc._rpc_performance()
@router.get('/profit', response_model=Profit, tags=['info']) @router.get("/profit", response_model=Profit, tags=["info"])
def profit(rpc: RPC = Depends(get_rpc), config=Depends(get_config)): def profit(rpc: RPC = Depends(get_rpc), config=Depends(get_config)):
return rpc._rpc_trade_statistics(config['stake_currency'], return rpc._rpc_trade_statistics(config["stake_currency"], config.get("fiat_display_currency"))
config.get('fiat_display_currency')
)
@router.get('/stats', response_model=Stats, tags=['info']) @router.get("/stats", response_model=Stats, tags=["info"])
def stats(rpc: RPC = Depends(get_rpc)): def stats(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_stats() return rpc._rpc_stats()
@router.get('/daily', response_model=DailyWeeklyMonthly, tags=['info']) @router.get("/daily", response_model=DailyWeeklyMonthly, tags=["info"])
def daily(timescale: int = 7, rpc: RPC = Depends(get_rpc), config=Depends(get_config)): def daily(timescale: int = 7, rpc: RPC = Depends(get_rpc), config=Depends(get_config)):
return rpc._rpc_timeunit_profit(timescale, config['stake_currency'], return rpc._rpc_timeunit_profit(
config.get('fiat_display_currency', '')) timescale, config["stake_currency"], config.get("fiat_display_currency", "")
)
@router.get('/weekly', response_model=DailyWeeklyMonthly, tags=['info']) @router.get("/weekly", response_model=DailyWeeklyMonthly, tags=["info"])
def weekly(timescale: int = 4, rpc: RPC = Depends(get_rpc), config=Depends(get_config)): def weekly(timescale: int = 4, rpc: RPC = Depends(get_rpc), config=Depends(get_config)):
return rpc._rpc_timeunit_profit(timescale, config['stake_currency'], return rpc._rpc_timeunit_profit(
config.get('fiat_display_currency', ''), 'weeks') timescale, config["stake_currency"], config.get("fiat_display_currency", ""), "weeks"
)
@router.get('/monthly', response_model=DailyWeeklyMonthly, tags=['info']) @router.get("/monthly", response_model=DailyWeeklyMonthly, tags=["info"])
def monthly(timescale: int = 3, rpc: RPC = Depends(get_rpc), config=Depends(get_config)): def monthly(timescale: int = 3, rpc: RPC = Depends(get_rpc), config=Depends(get_config)):
return rpc._rpc_timeunit_profit(timescale, config['stake_currency'], return rpc._rpc_timeunit_profit(
config.get('fiat_display_currency', ''), 'months') timescale, config["stake_currency"], config.get("fiat_display_currency", ""), "months"
)
@router.get('/status', response_model=List[OpenTradeSchema], tags=['info']) @router.get("/status", response_model=List[OpenTradeSchema], tags=["info"])
def status(rpc: RPC = Depends(get_rpc)): def status(rpc: RPC = Depends(get_rpc)):
try: try:
return rpc._rpc_trade_status() return rpc._rpc_trade_status()
@@ -173,274 +177,305 @@ def status(rpc: RPC = Depends(get_rpc)):
# Using the responsemodel here will cause a ~100% increase in response time (from 1s to 2s) # Using the responsemodel here will cause a ~100% increase in response time (from 1s to 2s)
# on big databases. Correct response model: response_model=TradeResponse, # on big databases. Correct response model: response_model=TradeResponse,
@router.get('/trades', tags=['info', 'trading']) @router.get("/trades", tags=["info", "trading"])
def trades(limit: int = 500, offset: int = 0, rpc: RPC = Depends(get_rpc)): def trades(limit: int = 500, offset: int = 0, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_trade_history(limit, offset=offset, order_by_id=True) return rpc._rpc_trade_history(limit, offset=offset, order_by_id=True)
@router.get('/trade/{tradeid}', response_model=OpenTradeSchema, tags=['info', 'trading']) @router.get("/trade/{tradeid}", response_model=OpenTradeSchema, tags=["info", "trading"])
def trade(tradeid: int = 0, rpc: RPC = Depends(get_rpc)): def trade(tradeid: int = 0, rpc: RPC = Depends(get_rpc)):
try: try:
return rpc._rpc_trade_status([tradeid])[0] return rpc._rpc_trade_status([tradeid])[0]
except (RPCException, KeyError): except (RPCException, KeyError):
raise HTTPException(status_code=404, detail='Trade not found.') raise HTTPException(status_code=404, detail="Trade not found.")
@router.delete('/trades/{tradeid}', response_model=DeleteTrade, tags=['info', 'trading']) @router.delete("/trades/{tradeid}", response_model=DeleteTrade, tags=["info", "trading"])
def trades_delete(tradeid: int, rpc: RPC = Depends(get_rpc)): def trades_delete(tradeid: int, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_delete(tradeid) return rpc._rpc_delete(tradeid)
@router.delete('/trades/{tradeid}/open-order', response_model=OpenTradeSchema, tags=['trading']) @router.delete("/trades/{tradeid}/open-order", response_model=OpenTradeSchema, tags=["trading"])
def trade_cancel_open_order(tradeid: int, rpc: RPC = Depends(get_rpc)): def trade_cancel_open_order(tradeid: int, rpc: RPC = Depends(get_rpc)):
rpc._rpc_cancel_open_order(tradeid) rpc._rpc_cancel_open_order(tradeid)
return rpc._rpc_trade_status([tradeid])[0] return rpc._rpc_trade_status([tradeid])[0]
@router.post('/trades/{tradeid}/reload', response_model=OpenTradeSchema, tags=['trading']) @router.post("/trades/{tradeid}/reload", response_model=OpenTradeSchema, tags=["trading"])
def trade_reload(tradeid: int, rpc: RPC = Depends(get_rpc)): def trade_reload(tradeid: int, rpc: RPC = Depends(get_rpc)):
rpc._rpc_reload_trade_from_exchange(tradeid) rpc._rpc_reload_trade_from_exchange(tradeid)
return rpc._rpc_trade_status([tradeid])[0] return rpc._rpc_trade_status([tradeid])[0]
# TODO: Missing response model # TODO: Missing response model
@router.get('/edge', tags=['info']) @router.get("/edge", tags=["info"])
def edge(rpc: RPC = Depends(get_rpc)): def edge(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_edge() return rpc._rpc_edge()
@router.get('/show_config', response_model=ShowConfig, tags=['info']) @router.get("/show_config", response_model=ShowConfig, tags=["info"])
def show_config(rpc: Optional[RPC] = Depends(get_rpc_optional), config=Depends(get_config)): def show_config(rpc: Optional[RPC] = Depends(get_rpc_optional), config=Depends(get_config)):
state = '' state = ""
strategy_version = None strategy_version = None
if rpc: if rpc:
state = rpc._freqtrade.state state = rpc._freqtrade.state
strategy_version = rpc._freqtrade.strategy.version() strategy_version = rpc._freqtrade.strategy.version()
resp = RPC._rpc_show_config(config, state, strategy_version) resp = RPC._rpc_show_config(config, state, strategy_version)
resp['api_version'] = API_VERSION resp["api_version"] = API_VERSION
return resp return resp
# /forcebuy is deprecated with short addition. use /forceentry instead # /forcebuy is deprecated with short addition. use /forceentry instead
@router.post('/forceenter', response_model=ForceEnterResponse, tags=['trading']) @router.post("/forceenter", response_model=ForceEnterResponse, tags=["trading"])
@router.post('/forcebuy', response_model=ForceEnterResponse, tags=['trading']) @router.post("/forcebuy", response_model=ForceEnterResponse, tags=["trading"])
def force_entry(payload: ForceEnterPayload, rpc: RPC = Depends(get_rpc)): def force_entry(payload: ForceEnterPayload, rpc: RPC = Depends(get_rpc)):
ordertype = payload.ordertype.value if payload.ordertype else None ordertype = payload.ordertype.value if payload.ordertype else None
trade = rpc._rpc_force_entry(payload.pair, payload.price, order_side=payload.side, trade = rpc._rpc_force_entry(
order_type=ordertype, stake_amount=payload.stakeamount, payload.pair,
enter_tag=payload.entry_tag or 'force_entry', payload.price,
leverage=payload.leverage) order_side=payload.side,
order_type=ordertype,
stake_amount=payload.stakeamount,
enter_tag=payload.entry_tag or "force_entry",
leverage=payload.leverage,
)
if trade: if trade:
return ForceEnterResponse.model_validate(trade.to_json()) return ForceEnterResponse.model_validate(trade.to_json())
else: else:
return ForceEnterResponse.model_validate( return ForceEnterResponse.model_validate(
{"status": f"Error entering {payload.side} trade for pair {payload.pair}."}) {"status": f"Error entering {payload.side} trade for pair {payload.pair}."}
)
# /forcesell is deprecated with short addition. use /forceexit instead # /forcesell is deprecated with short addition. use /forceexit instead
@router.post('/forceexit', response_model=ResultMsg, tags=['trading']) @router.post("/forceexit", response_model=ResultMsg, tags=["trading"])
@router.post('/forcesell', response_model=ResultMsg, tags=['trading']) @router.post("/forcesell", response_model=ResultMsg, tags=["trading"])
def forceexit(payload: ForceExitPayload, rpc: RPC = Depends(get_rpc)): def forceexit(payload: ForceExitPayload, rpc: RPC = Depends(get_rpc)):
ordertype = payload.ordertype.value if payload.ordertype else None ordertype = payload.ordertype.value if payload.ordertype else None
return rpc._rpc_force_exit(str(payload.tradeid), ordertype, amount=payload.amount) return rpc._rpc_force_exit(str(payload.tradeid), ordertype, amount=payload.amount)
@router.get('/blacklist', response_model=BlacklistResponse, tags=['info', 'pairlist']) @router.get("/blacklist", response_model=BlacklistResponse, tags=["info", "pairlist"])
def blacklist(rpc: RPC = Depends(get_rpc)): def blacklist(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_blacklist() return rpc._rpc_blacklist()
@router.post('/blacklist', response_model=BlacklistResponse, tags=['info', 'pairlist']) @router.post("/blacklist", response_model=BlacklistResponse, tags=["info", "pairlist"])
def blacklist_post(payload: BlacklistPayload, rpc: RPC = Depends(get_rpc)): def blacklist_post(payload: BlacklistPayload, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_blacklist(payload.blacklist) return rpc._rpc_blacklist(payload.blacklist)
@router.delete('/blacklist', response_model=BlacklistResponse, tags=['info', 'pairlist']) @router.delete("/blacklist", response_model=BlacklistResponse, tags=["info", "pairlist"])
def blacklist_delete(pairs_to_delete: List[str] = Query([]), rpc: RPC = Depends(get_rpc)): def blacklist_delete(pairs_to_delete: List[str] = Query([]), rpc: RPC = Depends(get_rpc)):
"""Provide a list of pairs to delete from the blacklist""" """Provide a list of pairs to delete from the blacklist"""
return rpc._rpc_blacklist_delete(pairs_to_delete) return rpc._rpc_blacklist_delete(pairs_to_delete)
@router.get('/whitelist', response_model=WhitelistResponse, tags=['info', 'pairlist']) @router.get("/whitelist", response_model=WhitelistResponse, tags=["info", "pairlist"])
def whitelist(rpc: RPC = Depends(get_rpc)): def whitelist(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_whitelist() return rpc._rpc_whitelist()
@router.get('/locks', response_model=Locks, tags=['info', 'locks']) @router.get("/locks", response_model=Locks, tags=["info", "locks"])
def locks(rpc: RPC = Depends(get_rpc)): def locks(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_locks() return rpc._rpc_locks()
@router.delete('/locks/{lockid}', response_model=Locks, tags=['info', 'locks']) @router.delete("/locks/{lockid}", response_model=Locks, tags=["info", "locks"])
def delete_lock(lockid: int, rpc: RPC = Depends(get_rpc)): def delete_lock(lockid: int, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_delete_lock(lockid=lockid) return rpc._rpc_delete_lock(lockid=lockid)
@router.post('/locks/delete', response_model=Locks, tags=['info', 'locks']) @router.post("/locks/delete", response_model=Locks, tags=["info", "locks"])
def delete_lock_pair(payload: DeleteLockRequest, rpc: RPC = Depends(get_rpc)): def delete_lock_pair(payload: DeleteLockRequest, rpc: RPC = Depends(get_rpc)):
return rpc._rpc_delete_lock(lockid=payload.lockid, pair=payload.pair) return rpc._rpc_delete_lock(lockid=payload.lockid, pair=payload.pair)
@router.post('/locks', response_model=Locks, tags=['info', 'locks']) @router.post("/locks", response_model=Locks, tags=["info", "locks"])
def add_locks(payload: List[LocksPayload], rpc: RPC = Depends(get_rpc)): def add_locks(payload: List[LocksPayload], rpc: RPC = Depends(get_rpc)):
for lock in payload: for lock in payload:
rpc._rpc_add_lock(lock.pair, lock.until, lock.reason, lock.side) rpc._rpc_add_lock(lock.pair, lock.until, lock.reason, lock.side)
return rpc._rpc_locks() return rpc._rpc_locks()
@router.get('/logs', response_model=Logs, tags=['info']) @router.get("/logs", response_model=Logs, tags=["info"])
def logs(limit: Optional[int] = None): def logs(limit: Optional[int] = None):
return RPC._rpc_get_logs(limit) return RPC._rpc_get_logs(limit)
@router.post('/start', response_model=StatusMsg, tags=['botcontrol']) @router.post("/start", response_model=StatusMsg, tags=["botcontrol"])
def start(rpc: RPC = Depends(get_rpc)): def start(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_start() return rpc._rpc_start()
@router.post('/stop', response_model=StatusMsg, tags=['botcontrol']) @router.post("/stop", response_model=StatusMsg, tags=["botcontrol"])
def stop(rpc: RPC = Depends(get_rpc)): def stop(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_stop() return rpc._rpc_stop()
@router.post('/stopentry', response_model=StatusMsg, tags=['botcontrol']) @router.post("/stopentry", response_model=StatusMsg, tags=["botcontrol"])
@router.post('/stopbuy', response_model=StatusMsg, tags=['botcontrol']) @router.post("/stopbuy", response_model=StatusMsg, tags=["botcontrol"])
def stop_buy(rpc: RPC = Depends(get_rpc)): def stop_buy(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_stopentry() return rpc._rpc_stopentry()
@router.post('/reload_config', response_model=StatusMsg, tags=['botcontrol']) @router.post("/reload_config", response_model=StatusMsg, tags=["botcontrol"])
def reload_config(rpc: RPC = Depends(get_rpc)): def reload_config(rpc: RPC = Depends(get_rpc)):
return rpc._rpc_reload_config() return rpc._rpc_reload_config()
@router.get('/pair_candles', response_model=PairHistory, tags=['candle data']) @router.get("/pair_candles", response_model=PairHistory, tags=["candle data"])
def pair_candles( def pair_candles(
pair: str, timeframe: str, limit: Optional[int] = None, rpc: RPC = Depends(get_rpc)): pair: str, timeframe: str, limit: Optional[int] = None, rpc: RPC = Depends(get_rpc)
):
return rpc._rpc_analysed_dataframe(pair, timeframe, limit, None) return rpc._rpc_analysed_dataframe(pair, timeframe, limit, None)
@router.post('/pair_candles', response_model=PairHistory, tags=['candle data']) @router.post("/pair_candles", response_model=PairHistory, tags=["candle data"])
def pair_candles_filtered(payload: PairCandlesRequest, rpc: RPC = Depends(get_rpc)): def pair_candles_filtered(payload: PairCandlesRequest, rpc: RPC = Depends(get_rpc)):
# Advanced pair_candles endpoint with column filtering # Advanced pair_candles endpoint with column filtering
return rpc._rpc_analysed_dataframe( return rpc._rpc_analysed_dataframe(
payload.pair, payload.timeframe, payload.limit, payload.columns) payload.pair, payload.timeframe, payload.limit, payload.columns
)
@router.get('/pair_history', response_model=PairHistory, tags=['candle data']) @router.get("/pair_history", response_model=PairHistory, tags=["candle data"])
def pair_history(pair: str, timeframe: str, timerange: str, strategy: str, def pair_history(
pair: str,
timeframe: str,
timerange: str,
strategy: str,
freqaimodel: Optional[str] = None, freqaimodel: Optional[str] = None,
config=Depends(get_config), exchange=Depends(get_exchange)): config=Depends(get_config),
exchange=Depends(get_exchange),
):
# The initial call to this endpoint can be slow, as it may need to initialize # The initial call to this endpoint can be slow, as it may need to initialize
# the exchange class. # the exchange class.
config = deepcopy(config) config = deepcopy(config)
config.update({ config.update(
'strategy': strategy, {
'timerange': timerange, "strategy": strategy,
'freqaimodel': freqaimodel if freqaimodel else config.get('freqaimodel'), "timerange": timerange,
}) "freqaimodel": freqaimodel if freqaimodel else config.get("freqaimodel"),
}
)
try: try:
return RPC._rpc_analysed_history_full(config, pair, timeframe, exchange, None) return RPC._rpc_analysed_history_full(config, pair, timeframe, exchange, None)
except Exception as e: except Exception as e:
raise HTTPException(status_code=502, detail=str(e)) raise HTTPException(status_code=502, detail=str(e))
@router.post('/pair_history', response_model=PairHistory, tags=['candle data']) @router.post("/pair_history", response_model=PairHistory, tags=["candle data"])
def pair_history_filtered(payload: PairHistoryRequest, def pair_history_filtered(
config=Depends(get_config), exchange=Depends(get_exchange)): payload: PairHistoryRequest, config=Depends(get_config), exchange=Depends(get_exchange)
):
# The initial call to this endpoint can be slow, as it may need to initialize # The initial call to this endpoint can be slow, as it may need to initialize
# the exchange class. # the exchange class.
config = deepcopy(config) config = deepcopy(config)
config.update({ config.update(
'strategy': payload.strategy, {
'timerange': payload.timerange, "strategy": payload.strategy,
'freqaimodel': payload.freqaimodel if payload.freqaimodel else config.get('freqaimodel'), "timerange": payload.timerange,
}) "freqaimodel": payload.freqaimodel
if payload.freqaimodel
else config.get("freqaimodel"),
}
)
try: try:
return RPC._rpc_analysed_history_full( return RPC._rpc_analysed_history_full(
config, payload.pair, payload.timeframe, exchange, payload.columns) config, payload.pair, payload.timeframe, exchange, payload.columns
)
except Exception as e: except Exception as e:
raise HTTPException(status_code=502, detail=str(e)) raise HTTPException(status_code=502, detail=str(e))
@router.get('/plot_config', response_model=PlotConfig, tags=['candle data']) @router.get("/plot_config", response_model=PlotConfig, tags=["candle data"])
def plot_config(strategy: Optional[str] = None, config=Depends(get_config), def plot_config(
rpc: Optional[RPC] = Depends(get_rpc_optional)): strategy: Optional[str] = None,
config=Depends(get_config),
rpc: Optional[RPC] = Depends(get_rpc_optional),
):
if not strategy: if not strategy:
if not rpc: if not rpc:
raise RPCException("Strategy is mandatory in webserver mode.") raise RPCException("Strategy is mandatory in webserver mode.")
return PlotConfig.model_validate(rpc._rpc_plot_config()) return PlotConfig.model_validate(rpc._rpc_plot_config())
else: else:
config1 = deepcopy(config) config1 = deepcopy(config)
config1.update({ config1.update({"strategy": strategy})
'strategy': strategy
})
try: try:
return PlotConfig.model_validate(RPC._rpc_plot_config_with_strategy(config1)) return PlotConfig.model_validate(RPC._rpc_plot_config_with_strategy(config1))
except Exception as e: except Exception as e:
raise HTTPException(status_code=502, detail=str(e)) raise HTTPException(status_code=502, detail=str(e))
@router.get('/strategies', response_model=StrategyListResponse, tags=['strategy']) @router.get("/strategies", response_model=StrategyListResponse, tags=["strategy"])
def list_strategies(config=Depends(get_config)): def list_strategies(config=Depends(get_config)):
from freqtrade.resolvers.strategy_resolver import StrategyResolver from freqtrade.resolvers.strategy_resolver import StrategyResolver
strategies = StrategyResolver.search_all_objects( strategies = StrategyResolver.search_all_objects(
config, False, config.get('recursive_strategy_search', False)) config, False, config.get("recursive_strategy_search", False)
strategies = sorted(strategies, key=lambda x: x['name']) )
strategies = sorted(strategies, key=lambda x: x["name"])
return {'strategies': [x['name'] for x in strategies]} return {"strategies": [x["name"] for x in strategies]}
@router.get('/strategy/{strategy}', response_model=StrategyResponse, tags=['strategy']) @router.get("/strategy/{strategy}", response_model=StrategyResponse, tags=["strategy"])
def get_strategy(strategy: str, config=Depends(get_config)): def get_strategy(strategy: str, config=Depends(get_config)):
if ":" in strategy: if ":" in strategy:
raise HTTPException(status_code=500, detail="base64 encoded strategies are not allowed.") raise HTTPException(status_code=500, detail="base64 encoded strategies are not allowed.")
config_ = deepcopy(config) config_ = deepcopy(config)
from freqtrade.resolvers.strategy_resolver import StrategyResolver from freqtrade.resolvers.strategy_resolver import StrategyResolver
try: try:
strategy_obj = StrategyResolver._load_strategy(strategy, config_, strategy_obj = StrategyResolver._load_strategy(
extra_dir=config_.get('strategy_path')) strategy, config_, extra_dir=config_.get("strategy_path")
)
except OperationalException: except OperationalException:
raise HTTPException(status_code=404, detail='Strategy not found') raise HTTPException(status_code=404, detail="Strategy not found")
except Exception as e: except Exception as e:
raise HTTPException(status_code=502, detail=str(e)) raise HTTPException(status_code=502, detail=str(e))
return { return {
'strategy': strategy_obj.get_strategy_name(), "strategy": strategy_obj.get_strategy_name(),
'code': strategy_obj.__source__, "code": strategy_obj.__source__,
'timeframe': getattr(strategy_obj, 'timeframe', None), "timeframe": getattr(strategy_obj, "timeframe", None),
} }
@router.get('/exchanges', response_model=ExchangeListResponse, tags=[]) @router.get("/exchanges", response_model=ExchangeListResponse, tags=[])
def list_exchanges(config=Depends(get_config)): def list_exchanges(config=Depends(get_config)):
from freqtrade.exchange import list_available_exchanges from freqtrade.exchange import list_available_exchanges
exchanges = list_available_exchanges(config) exchanges = list_available_exchanges(config)
return { return {
'exchanges': exchanges, "exchanges": exchanges,
} }
@router.get('/freqaimodels', response_model=FreqAIModelListResponse, tags=['freqai']) @router.get("/freqaimodels", response_model=FreqAIModelListResponse, tags=["freqai"])
def list_freqaimodels(config=Depends(get_config)): def list_freqaimodels(config=Depends(get_config)):
from freqtrade.resolvers.freqaimodel_resolver import FreqaiModelResolver from freqtrade.resolvers.freqaimodel_resolver import FreqaiModelResolver
models = FreqaiModelResolver.search_all_objects(
config, False)
models = sorted(models, key=lambda x: x['name'])
return {'freqaimodels': [x['name'] for x in models]} models = FreqaiModelResolver.search_all_objects(config, False)
models = sorted(models, key=lambda x: x["name"])
return {"freqaimodels": [x["name"] for x in models]}
@router.get('/available_pairs', response_model=AvailablePairs, tags=['candle data']) @router.get("/available_pairs", response_model=AvailablePairs, tags=["candle data"])
def list_available_pairs(timeframe: Optional[str] = None, stake_currency: Optional[str] = None, def list_available_pairs(
candletype: Optional[CandleType] = None, config=Depends(get_config)): timeframe: Optional[str] = None,
stake_currency: Optional[str] = None,
dh = get_datahandler(config['datadir'], config.get('dataformat_ohlcv')) candletype: Optional[CandleType] = None,
trading_mode: TradingMode = config.get('trading_mode', TradingMode.SPOT) config=Depends(get_config),
pair_interval = dh.ohlcv_get_available_data(config['datadir'], trading_mode) ):
dh = get_datahandler(config["datadir"], config.get("dataformat_ohlcv"))
trading_mode: TradingMode = config.get("trading_mode", TradingMode.SPOT)
pair_interval = dh.ohlcv_get_available_data(config["datadir"], trading_mode)
if timeframe: if timeframe:
pair_interval = [pair for pair in pair_interval if pair[1] == timeframe] pair_interval = [pair for pair in pair_interval if pair[1] == timeframe]
@@ -457,18 +492,18 @@ def list_available_pairs(timeframe: Optional[str] = None, stake_currency: Option
pairs = list({x[0] for x in pair_interval}) pairs = list({x[0] for x in pair_interval})
pairs.sort() pairs.sort()
result = { result = {
'length': len(pairs), "length": len(pairs),
'pairs': pairs, "pairs": pairs,
'pair_interval': pair_interval, "pair_interval": pair_interval,
} }
return result return result
@router.get('/sysinfo', response_model=SysInfo, tags=['info']) @router.get("/sysinfo", response_model=SysInfo, tags=["info"])
def sysinfo(): def sysinfo():
return RPC._rpc_sysinfo() return RPC._rpc_sysinfo()
@router.get('/health', response_model=Health, tags=['info']) @router.get("/health", response_model=Health, tags=["info"])
def health(rpc: RPC = Depends(get_rpc)): def health(rpc: RPC = Depends(get_rpc)):
return rpc.health() return rpc.health()

View File

@@ -37,7 +37,7 @@ async def channel_reader(channel: WebSocketChannel, rpc: RPC):
await _process_consumer_request(message, channel, rpc) await _process_consumer_request(message, channel, rpc)
except FreqtradeException: except FreqtradeException:
logger.exception(f"Error processing request from {channel}") logger.exception(f"Error processing request from {channel}")
response = WSErrorMessage(data='Error processing request') response = WSErrorMessage(data="Error processing request")
await channel.send(response.dict(exclude_none=True)) await channel.send(response.dict(exclude_none=True))
@@ -47,23 +47,21 @@ async def channel_broadcaster(channel: WebSocketChannel, message_stream: Message
Iterate over messages in the message stream and send them Iterate over messages in the message stream and send them
""" """
async for message, ts in message_stream: async for message, ts in message_stream:
if channel.subscribed_to(message.get('type')): if channel.subscribed_to(message.get("type")):
# Log a warning if this channel is behind # Log a warning if this channel is behind
# on the message stream by a lot # on the message stream by a lot
if (time.time() - ts) > 60: if (time.time() - ts) > 60:
logger.warning(f"Channel {channel} is behind MessageStream by 1 minute," logger.warning(
f"Channel {channel} is behind MessageStream by 1 minute,"
" this can cause a memory leak if you see this message" " this can cause a memory leak if you see this message"
" often, consider reducing pair list size or amount of" " often, consider reducing pair list size or amount of"
" consumers.") " consumers."
)
await channel.send(message, timeout=True) await channel.send(message, timeout=True)
async def _process_consumer_request( async def _process_consumer_request(request: Dict[str, Any], channel: WebSocketChannel, rpc: RPC):
request: Dict[str, Any],
channel: WebSocketChannel,
rpc: RPC
):
""" """
Validate and handle a request from a websocket consumer Validate and handle a request from a websocket consumer
""" """
@@ -102,8 +100,8 @@ async def _process_consumer_request(
elif type_ == RPCRequestType.ANALYZED_DF: elif type_ == RPCRequestType.ANALYZED_DF:
# Limit the amount of candles per dataframe to 'limit' or 1500 # Limit the amount of candles per dataframe to 'limit' or 1500
limit = int(min(data.get('limit', 1500), 1500)) if data else None limit = int(min(data.get("limit", 1500), 1500)) if data else None
pair = data.get('pair', None) if data else None pair = data.get("pair", None) if data else None
# For every pair in the generator, send a separate message # For every pair in the generator, send a separate message
for message in rpc._ws_request_analyzed_df(limit, pair): for message in rpc._ws_request_analyzed_df(limit, pair):
@@ -117,11 +115,10 @@ async def message_endpoint(
websocket: WebSocket, websocket: WebSocket,
token: str = Depends(validate_ws_token), token: str = Depends(validate_ws_token),
rpc: RPC = Depends(get_rpc), rpc: RPC = Depends(get_rpc),
message_stream: MessageStream = Depends(get_message_stream) message_stream: MessageStream = Depends(get_message_stream),
): ):
if token: if token:
async with create_channel(websocket) as channel: async with create_channel(websocket) as channel:
await channel.run_channel_tasks( await channel.run_channel_tasks(
channel_reader(channel, rpc), channel_reader(channel, rpc), channel_broadcaster(channel, message_stream)
channel_broadcaster(channel, message_stream)
) )

View File

@@ -20,7 +20,6 @@ def get_rpc_optional() -> Optional[RPC]:
async def get_rpc() -> Optional[AsyncIterator[RPC]]: async def get_rpc() -> Optional[AsyncIterator[RPC]]:
_rpc = get_rpc_optional() _rpc = get_rpc_optional()
if _rpc: if _rpc:
request_id = str(uuid4()) request_id = str(uuid4())
@@ -33,7 +32,7 @@ async def get_rpc() -> Optional[AsyncIterator[RPC]]:
_request_id_ctx_var.reset(ctx_token) _request_id_ctx_var.reset(ctx_token)
else: else:
raise RPCException('Bot is not in the correct state') raise RPCException("Bot is not in the correct state")
def get_config() -> Dict[str, Any]: def get_config() -> Dict[str, Any]:
@@ -41,7 +40,7 @@ def get_config() -> Dict[str, Any]:
def get_api_config() -> Dict[str, Any]: def get_api_config() -> Dict[str, Any]:
return ApiServer._config['api_server'] return ApiServer._config["api_server"]
def _generate_exchange_key(config: Config) -> str: def _generate_exchange_key(config: Config) -> str:
@@ -55,8 +54,8 @@ def get_exchange(config=Depends(get_config)):
exchange_key = _generate_exchange_key(config) exchange_key = _generate_exchange_key(config)
if not (exchange := ApiBG.exchanges.get(exchange_key)): if not (exchange := ApiBG.exchanges.get(exchange_key)):
from freqtrade.resolvers import ExchangeResolver from freqtrade.resolvers import ExchangeResolver
exchange = ExchangeResolver.load_exchange(
config, validate=False, load_leverage_tiers=False) exchange = ExchangeResolver.load_exchange(config, validate=False, load_leverage_tiers=False)
ApiBG.exchanges[exchange_key] = exchange ApiBG.exchanges[exchange_key] = exchange
return exchange return exchange
@@ -66,7 +65,6 @@ def get_message_stream():
def is_webserver_mode(config=Depends(get_config)): def is_webserver_mode(config=Depends(get_config)):
if config['runmode'] != RunMode.WEBSERVER: if config["runmode"] != RunMode.WEBSERVER:
raise HTTPException(status_code=503, raise HTTPException(status_code=503, detail="Bot is not in the correct state.")
detail='Bot is not in the correct state.')
return None return None

View File

@@ -14,6 +14,7 @@ def asyncio_setup() -> None: # pragma: no cover
if sys.version_info >= (3, 8) and sys.platform == "win32": if sys.version_info >= (3, 8) and sys.platform == "win32":
import asyncio import asyncio
import selectors import selectors
selector = selectors.SelectSelector() selector = selectors.SelectSelector()
loop = asyncio.SelectorEventLoop(selector) loop = asyncio.SelectorEventLoop(selector)
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)
@@ -42,7 +43,6 @@ class UvicornServer(uvicorn.Server):
try: try:
import uvloop # noqa import uvloop # noqa
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
asyncio_setup() asyncio_setup()
else: else:
asyncio.set_event_loop(uvloop.new_event_loop()) asyncio.set_event_loop(uvloop.new_event_loop())
@@ -55,7 +55,7 @@ class UvicornServer(uvicorn.Server):
@contextlib.contextmanager @contextlib.contextmanager
def run_in_thread(self): def run_in_thread(self):
self.thread = threading.Thread(target=self.run, name='FTUvicorn') self.thread = threading.Thread(target=self.run, name="FTUvicorn")
self.thread.start() self.thread.start()
while not self.started: while not self.started:
time.sleep(1e-3) time.sleep(1e-3)

View File

@@ -9,20 +9,21 @@ from starlette.responses import FileResponse
router_ui = APIRouter() router_ui = APIRouter()
@router_ui.get('/favicon.ico', include_in_schema=False) @router_ui.get("/favicon.ico", include_in_schema=False)
async def favicon(): async def favicon():
return FileResponse(str(Path(__file__).parent / 'ui/favicon.ico')) return FileResponse(str(Path(__file__).parent / "ui/favicon.ico"))
@router_ui.get('/fallback_file.html', include_in_schema=False) @router_ui.get("/fallback_file.html", include_in_schema=False)
async def fallback(): async def fallback():
return FileResponse(str(Path(__file__).parent / 'ui/fallback_file.html')) return FileResponse(str(Path(__file__).parent / "ui/fallback_file.html"))
@router_ui.get('/ui_version', include_in_schema=False) @router_ui.get("/ui_version", include_in_schema=False)
async def ui_version(): async def ui_version():
from freqtrade.commands.deploy_commands import read_ui_version from freqtrade.commands.deploy_commands import read_ui_version
uibase = Path(__file__).parent / 'ui/installed/'
uibase = Path(__file__).parent / "ui/installed/"
version = read_ui_version(uibase) version = read_ui_version(uibase)
return { return {
@@ -40,26 +41,26 @@ def is_relative_to(path: Path, base: Path) -> bool:
return False return False
@router_ui.get('/{rest_of_path:path}', include_in_schema=False) @router_ui.get("/{rest_of_path:path}", include_in_schema=False)
async def index_html(rest_of_path: str): async def index_html(rest_of_path: str):
""" """
Emulate path fallback to index.html. Emulate path fallback to index.html.
""" """
if rest_of_path.startswith('api') or rest_of_path.startswith('.'): if rest_of_path.startswith("api") or rest_of_path.startswith("."):
raise HTTPException(status_code=404, detail="Not Found") raise HTTPException(status_code=404, detail="Not Found")
uibase = Path(__file__).parent / 'ui/installed/' uibase = Path(__file__).parent / "ui/installed/"
filename = uibase / rest_of_path filename = uibase / rest_of_path
# It's security relevant to check "relative_to". # It's security relevant to check "relative_to".
# Without this, Directory-traversal is possible. # Without this, Directory-traversal is possible.
media_type: Optional[str] = None media_type: Optional[str] = None
if filename.suffix == '.js': if filename.suffix == ".js":
# Force text/javascript for .js files - Circumvent faulty system configuration # Force text/javascript for .js files - Circumvent faulty system configuration
media_type = 'application/javascript' media_type = "application/javascript"
if filename.is_file() and is_relative_to(filename, uibase): if filename.is_file() and is_relative_to(filename, uibase):
return FileResponse(str(filename), media_type=media_type) return FileResponse(str(filename), media_type=media_type)
index_file = uibase / 'index.html' index_file = uibase / "index.html"
if not index_file.is_file(): if not index_file.is_file():
return FileResponse(str(uibase.parent / 'fallback_file.html')) return FileResponse(str(uibase.parent / "fallback_file.html"))
# Fall back to index.html, as indicated by vue router docs # Fall back to index.html, as indicated by vue router docs
return FileResponse(str(index_file)) return FileResponse(str(index_file))

View File

@@ -32,7 +32,6 @@ class FTJSONResponse(JSONResponse):
class ApiServer(RPCHandler): class ApiServer(RPCHandler):
__instance = None __instance = None
__initialized = False __initialized = False
@@ -61,10 +60,11 @@ class ApiServer(RPCHandler):
ApiServer.__initialized = True ApiServer.__initialized = True
api_config = self._config['api_server'] api_config = self._config["api_server"]
self.app = FastAPI(title="Freqtrade API", self.app = FastAPI(
docs_url='/docs' if api_config.get('enable_openapi', False) else None, title="Freqtrade API",
docs_url="/docs" if api_config.get("enable_openapi", False) else None,
redoc_url=None, redoc_url=None,
default_response_class=FTJSONResponse, default_response_class=FTJSONResponse,
) )
@@ -80,7 +80,7 @@ class ApiServer(RPCHandler):
ApiServer._has_rpc = True ApiServer._has_rpc = True
else: else:
# This should not happen assuming we didn't mess up. # This should not happen assuming we didn't mess up.
raise OperationalException('RPC Handler already attached.') raise OperationalException("RPC Handler already attached.")
def cleanup(self) -> None: def cleanup(self) -> None:
"""Cleanup pending module resources""" """Cleanup pending module resources"""
@@ -109,8 +109,7 @@ class ApiServer(RPCHandler):
def handle_rpc_exception(self, request, exc): def handle_rpc_exception(self, request, exc):
logger.error(f"API Error calling: {exc}") logger.error(f"API Error calling: {exc}")
return JSONResponse( return JSONResponse(
status_code=502, status_code=502, content={"error": f"Error querying {request.url.path}: {exc.message}"}
content={'error': f"Error querying {request.url.path}: {exc.message}"}
) )
def configure_app(self, app: FastAPI, config): def configure_app(self, app: FastAPI, config):
@@ -126,38 +125,36 @@ class ApiServer(RPCHandler):
app.include_router(api_v1_public, prefix="/api/v1") app.include_router(api_v1_public, prefix="/api/v1")
app.include_router(router_login, prefix="/api/v1", tags=["auth"]) app.include_router(router_login, prefix="/api/v1", tags=["auth"])
app.include_router(api_v1, prefix="/api/v1", app.include_router(
api_v1,
prefix="/api/v1",
dependencies=[Depends(http_basic_or_jwt_token)], dependencies=[Depends(http_basic_or_jwt_token)],
) )
app.include_router(api_backtest, prefix="/api/v1", app.include_router(
dependencies=[Depends(http_basic_or_jwt_token), api_backtest,
Depends(is_webserver_mode)], prefix="/api/v1",
dependencies=[Depends(http_basic_or_jwt_token), Depends(is_webserver_mode)],
) )
app.include_router(api_bg_tasks, prefix="/api/v1", app.include_router(
dependencies=[Depends(http_basic_or_jwt_token), api_bg_tasks,
Depends(is_webserver_mode)], prefix="/api/v1",
dependencies=[Depends(http_basic_or_jwt_token), Depends(is_webserver_mode)],
) )
app.include_router(ws_router, prefix="/api/v1") app.include_router(ws_router, prefix="/api/v1")
# UI Router MUST be last! # UI Router MUST be last!
app.include_router(router_ui, prefix='') app.include_router(router_ui, prefix="")
app.add_middleware( app.add_middleware(
CORSMiddleware, CORSMiddleware,
allow_origins=config['api_server'].get('CORS_origins', []), allow_origins=config["api_server"].get("CORS_origins", []),
allow_credentials=True, allow_credentials=True,
allow_methods=["*"], allow_methods=["*"],
allow_headers=["*"], allow_headers=["*"],
) )
app.add_exception_handler(RPCException, self.handle_rpc_exception) app.add_exception_handler(RPCException, self.handle_rpc_exception)
app.add_event_handler( app.add_event_handler(event_type="startup", func=self._api_startup_event)
event_type="startup", app.add_event_handler(event_type="shutdown", func=self._api_shutdown_event)
func=self._api_startup_event
)
app.add_event_handler(
event_type="shutdown",
func=self._api_shutdown_event
)
async def _api_startup_event(self): async def _api_startup_event(self):
""" """
@@ -179,34 +176,42 @@ class ApiServer(RPCHandler):
""" """
Start API ... should be run in thread. Start API ... should be run in thread.
""" """
rest_ip = self._config['api_server']['listen_ip_address'] rest_ip = self._config["api_server"]["listen_ip_address"]
rest_port = self._config['api_server']['listen_port'] rest_port = self._config["api_server"]["listen_port"]
logger.info(f'Starting HTTP Server at {rest_ip}:{rest_port}') logger.info(f"Starting HTTP Server at {rest_ip}:{rest_port}")
if not IPv4Address(rest_ip).is_loopback and not running_in_docker(): if not IPv4Address(rest_ip).is_loopback and not running_in_docker():
logger.warning("SECURITY WARNING - Local Rest Server listening to external connections") logger.warning("SECURITY WARNING - Local Rest Server listening to external connections")
logger.warning("SECURITY WARNING - This is insecure please set to your loopback," logger.warning(
"e.g 127.0.0.1 in config.json") "SECURITY WARNING - This is insecure please set to your loopback,"
"e.g 127.0.0.1 in config.json"
)
if not self._config['api_server'].get('password'): if not self._config["api_server"].get("password"):
logger.warning("SECURITY WARNING - No password for local REST Server defined. " logger.warning(
"Please make sure that this is intentional!") "SECURITY WARNING - No password for local REST Server defined. "
"Please make sure that this is intentional!"
)
if (self._config['api_server'].get('jwt_secret_key', 'super-secret') if self._config["api_server"].get("jwt_secret_key", "super-secret") in (
in ('super-secret, somethingrandom')): "super-secret, somethingrandom"
logger.warning("SECURITY WARNING - `jwt_secret_key` seems to be default." ):
"Others may be able to log into your bot.") logger.warning(
"SECURITY WARNING - `jwt_secret_key` seems to be default."
"Others may be able to log into your bot."
)
logger.info('Starting Local Rest Server.') logger.info("Starting Local Rest Server.")
verbosity = self._config['api_server'].get('verbosity', 'error') verbosity = self._config["api_server"].get("verbosity", "error")
uvconfig = uvicorn.Config(self.app, uvconfig = uvicorn.Config(
self.app,
port=rest_port, port=rest_port,
host=rest_ip, host=rest_ip,
use_colors=False, use_colors=False,
log_config=None, log_config=None,
access_log=True if verbosity != 'error' else False, access_log=True if verbosity != "error" else False,
ws_ping_interval=None # We do this explicitly ourselves ws_ping_interval=None, # We do this explicitly ourselves
) )
try: try:
self._server = UvicornServer(uvconfig) self._server = UvicornServer(uvconfig)

View File

@@ -1,4 +1,3 @@
from typing import Any, Dict, Literal, Optional, TypedDict from typing import Any, Dict, Literal, Optional, TypedDict
from uuid import uuid4 from uuid import uuid4
@@ -6,7 +5,7 @@ from freqtrade.exchange.exchange import Exchange
class JobsContainer(TypedDict): class JobsContainer(TypedDict):
category: Literal['pairlist'] category: Literal["pairlist"]
is_running: bool is_running: bool
status: str status: str
progress: Optional[float] progress: Optional[float]
@@ -17,11 +16,11 @@ class JobsContainer(TypedDict):
class ApiBG: class ApiBG:
# Backtesting type: Backtesting # Backtesting type: Backtesting
bt: Dict[str, Any] = { bt: Dict[str, Any] = {
'bt': None, "bt": None,
'data': None, "data": None,
'timerange': None, "timerange": None,
'last_config': {}, "last_config": {},
'bt_error': None, "bt_error": None,
} }
bgtask_running: bool = False bgtask_running: bool = False
# Exchange - only available in webserver mode. # Exchange - only available in webserver mode.

View File

@@ -25,12 +25,13 @@ class WebSocketChannel:
""" """
Object to help facilitate managing a websocket connection Object to help facilitate managing a websocket connection
""" """
def __init__( def __init__(
self, self,
websocket: WebSocketType, websocket: WebSocketType,
channel_id: Optional[str] = None, channel_id: Optional[str] = None,
serializer_cls: Type[WebSocketSerializer] = HybridJSONWebSocketSerializer, serializer_cls: Type[WebSocketSerializer] = HybridJSONWebSocketSerializer,
send_throttle: float = 0.01 send_throttle: float = 0.01,
): ):
self.channel_id = channel_id if channel_id else uuid4().hex[:8] self.channel_id = channel_id if channel_id else uuid4().hex[:8]
self._websocket = WebSocketProxy(websocket) self._websocket = WebSocketProxy(websocket)
@@ -79,9 +80,7 @@ class WebSocketChannel:
self._send_high_limit = min(max(self.avg_send_time * 2, 1), 3) self._send_high_limit = min(max(self.avg_send_time * 2, 1), 3)
async def send( async def send(
self, self, message: Union[WSMessageSchemaType, Dict[str, Any]], timeout: bool = False
message: Union[WSMessageSchemaType, Dict[str, Any]],
timeout: bool = False
): ):
""" """
Send a message on the wrapped websocket. If the sending Send a message on the wrapped websocket. If the sending
@@ -97,8 +96,7 @@ class WebSocketChannel:
# a TimeoutError and bubble up to the # a TimeoutError and bubble up to the
# message_endpoint to close the connection # message_endpoint to close the connection
await asyncio.wait_for( await asyncio.wait_for(
self._wrapped_ws.send(message), self._wrapped_ws.send(message), timeout=self._send_high_limit if timeout else None
timeout=self._send_high_limit if timeout else None
) )
total_time = time.time() - _ total_time = time.time() - _
self._send_times.append(total_time) self._send_times.append(total_time)
@@ -207,7 +205,7 @@ class WebSocketChannel:
asyncio.TimeoutError, asyncio.TimeoutError,
WebSocketDisconnect, WebSocketDisconnect,
ConnectionClosed, ConnectionClosed,
RuntimeError RuntimeError,
): ):
pass pass
except Exception as e: except Exception as e:
@@ -227,10 +225,7 @@ class WebSocketChannel:
@asynccontextmanager @asynccontextmanager
async def create_channel( async def create_channel(websocket: WebSocketType, **kwargs) -> AsyncIterator[WebSocketChannel]:
websocket: WebSocketType,
**kwargs
) -> AsyncIterator[WebSocketChannel]:
""" """
Context manager for safely opening and closing a WebSocketChannel Context manager for safely opening and closing a WebSocketChannel
""" """

View File

@@ -7,6 +7,7 @@ class MessageStream:
A message stream for consumers to subscribe to, A message stream for consumers to subscribe to,
and for producers to publish to. and for producers to publish to.
""" """
def __init__(self): def __init__(self):
self._loop = asyncio.get_running_loop() self._loop = asyncio.get_running_loop()
self._waiter = self._loop.create_future() self._waiter = self._loop.create_future()

View File

@@ -46,15 +46,12 @@ class HybridJSONWebSocketSerializer(WebSocketSerializer):
# Support serializing pandas DataFrames # Support serializing pandas DataFrames
def _json_default(z): def _json_default(z):
if isinstance(z, DataFrame): if isinstance(z, DataFrame):
return { return {"__type__": "dataframe", "__value__": dataframe_to_json(z)}
'__type__': 'dataframe',
'__value__': dataframe_to_json(z)
}
raise TypeError raise TypeError
# Support deserializing JSON to pandas DataFrames # Support deserializing JSON to pandas DataFrames
def _json_object_hook(z): def _json_object_hook(z):
if z.get('__type__') == 'dataframe': if z.get("__type__") == "dataframe":
return json_to_dataframe(z.get('__value__')) return json_to_dataframe(z.get("__value__"))
return z return z

View File

@@ -26,7 +26,7 @@ class WSMessageSchemaType(TypedDict):
class WSMessageSchema(BaseArbitraryModel): class WSMessageSchema(BaseArbitraryModel):
type: RPCMessageType type: RPCMessageType
data: Optional[Any] = None data: Optional[Any] = None
model_config = ConfigDict(extra='allow') model_config = ConfigDict(extra="allow")
# ------------------------------ REQUEST SCHEMAS ---------------------------- # ------------------------------ REQUEST SCHEMAS ----------------------------
@@ -49,6 +49,7 @@ class WSAnalyzedDFRequest(WSRequestSchema):
# ------------------------------ MESSAGE SCHEMAS ---------------------------- # ------------------------------ MESSAGE SCHEMAS ----------------------------
class WSWhitelistMessage(WSMessageSchema): class WSWhitelistMessage(WSMessageSchema):
type: RPCMessageType = RPCMessageType.WHITELIST type: RPCMessageType = RPCMessageType.WHITELIST
data: List[str] data: List[str]
@@ -68,4 +69,5 @@ class WSErrorMessage(WSMessageSchema):
type: RPCMessageType = RPCMessageType.EXCEPTION type: RPCMessageType = RPCMessageType.EXCEPTION
data: str data: str
# -------------------------------------------------------------------------- # --------------------------------------------------------------------------

View File

@@ -10,18 +10,18 @@ logger = logging.getLogger(__name__)
class Discord(Webhook): class Discord(Webhook):
def __init__(self, rpc: 'RPC', config: Config): def __init__(self, rpc: "RPC", config: Config):
self._config = config self._config = config
self.rpc = rpc self.rpc = rpc
self.strategy = config.get('strategy', '') self.strategy = config.get("strategy", "")
self.timeframe = config.get('timeframe', '') self.timeframe = config.get("timeframe", "")
self.bot_name = config.get('bot_name', '') self.bot_name = config.get("bot_name", "")
self._url = config['discord']['webhook_url'] self._url = config["discord"]["webhook_url"]
self._format = 'json' self._format = "json"
self._retries = 1 self._retries = 1
self._retry_delay = 0.1 self._retry_delay = 0.1
self._timeout = self._config['discord'].get('timeout', 10) self._timeout = self._config["discord"].get("timeout", 10)
def cleanup(self) -> None: def cleanup(self) -> None:
""" """
@@ -31,32 +31,31 @@ class Discord(Webhook):
pass pass
def send_msg(self, msg) -> None: def send_msg(self, msg) -> None:
if fields := self._config["discord"].get(msg["type"].value):
if (fields := self._config['discord'].get(msg['type'].value)):
logger.info(f"Sending discord message: {msg}") logger.info(f"Sending discord message: {msg}")
msg['strategy'] = self.strategy msg["strategy"] = self.strategy
msg['timeframe'] = self.timeframe msg["timeframe"] = self.timeframe
msg['bot_name'] = self.bot_name msg["bot_name"] = self.bot_name
color = 0x0000FF color = 0x0000FF
if msg['type'] in (RPCMessageType.EXIT, RPCMessageType.EXIT_FILL): if msg["type"] in (RPCMessageType.EXIT, RPCMessageType.EXIT_FILL):
profit_ratio = msg.get('profit_ratio') profit_ratio = msg.get("profit_ratio")
color = (0x00FF00 if profit_ratio > 0 else 0xFF0000) color = 0x00FF00 if profit_ratio > 0 else 0xFF0000
title = msg['type'].value title = msg["type"].value
if 'pair' in msg: if "pair" in msg:
title = f"Trade: {msg['pair']} {msg['type'].value}" title = f"Trade: {msg['pair']} {msg['type'].value}"
embeds = [{ embeds = [
'title': title, {
'color': color, "title": title,
'fields': [], "color": color,
"fields": [],
}] }
]
for f in fields: for f in fields:
for k, v in f.items(): for k, v in f.items():
v = v.format(**msg) v = v.format(**msg)
embeds[0]['fields'].append( embeds[0]["fields"].append({"name": k, "value": v, "inline": True})
{'name': k, 'value': v, 'inline': True})
# Send the message to discord channel # Send the message to discord channel
payload = {'embeds': embeds} payload = {"embeds": embeds}
self._send_msg(payload) self._send_msg(payload)

View File

@@ -4,6 +4,7 @@ ExternalMessageConsumer module
Main purpose is to connect to external bot's message websocket to consume data Main purpose is to connect to external bot's message websocket to consume data
from it from it
""" """
import asyncio import asyncio
import logging import logging
import socket import socket
@@ -55,11 +56,7 @@ class ExternalMessageConsumer:
other freqtrade bot's other freqtrade bot's
""" """
def __init__( def __init__(self, config: Dict[str, Any], dataprovider: DataProvider):
self,
config: Dict[str, Any],
dataprovider: DataProvider
):
self._config = config self._config = config
self._dp = dataprovider self._dp = dataprovider
@@ -69,21 +66,21 @@ class ExternalMessageConsumer:
self._main_task = None self._main_task = None
self._sub_tasks = None self._sub_tasks = None
self._emc_config = self._config.get('external_message_consumer', {}) self._emc_config = self._config.get("external_message_consumer", {})
self.enabled = self._emc_config.get('enabled', False) self.enabled = self._emc_config.get("enabled", False)
self.producers: List[Producer] = self._emc_config.get('producers', []) self.producers: List[Producer] = self._emc_config.get("producers", [])
self.wait_timeout = self._emc_config.get('wait_timeout', 30) # in seconds self.wait_timeout = self._emc_config.get("wait_timeout", 30) # in seconds
self.ping_timeout = self._emc_config.get('ping_timeout', 10) # in seconds self.ping_timeout = self._emc_config.get("ping_timeout", 10) # in seconds
self.sleep_time = self._emc_config.get('sleep_time', 10) # in seconds self.sleep_time = self._emc_config.get("sleep_time", 10) # in seconds
# The amount of candles per dataframe on the initial request # The amount of candles per dataframe on the initial request
self.initial_candle_limit = self._emc_config.get('initial_candle_limit', 1500) self.initial_candle_limit = self._emc_config.get("initial_candle_limit", 1500)
# Message size limit, in megabytes. Default 8mb, Use bitwise operator << 20 to convert # Message size limit, in megabytes. Default 8mb, Use bitwise operator << 20 to convert
# as the websockets client expects bytes. # as the websockets client expects bytes.
self.message_size_limit = (self._emc_config.get('message_size_limit', 8) << 20) self.message_size_limit = self._emc_config.get("message_size_limit", 8) << 20
# Setting these explicitly as they probably shouldn't be changed by a user # Setting these explicitly as they probably shouldn't be changed by a user
# Unless we somehow integrate this with the strategy to allow creating # Unless we somehow integrate this with the strategy to allow creating
@@ -94,7 +91,7 @@ class ExternalMessageConsumer:
self._initial_requests: List[WSRequestSchema] = [ self._initial_requests: List[WSRequestSchema] = [
WSSubscribeRequest(data=self.topics), WSSubscribeRequest(data=self.topics),
WSWhitelistRequest(), WSWhitelistRequest(),
WSAnalyzedDFRequest() WSAnalyzedDFRequest(),
] ]
# Specify which function to use for which RPCMessageType # Specify which function to use for which RPCMessageType
@@ -192,31 +189,24 @@ class ExternalMessageConsumer:
""" """
while self._running: while self._running:
try: try:
host, port = producer['host'], producer['port'] host, port = producer["host"], producer["port"]
token = producer['ws_token'] token = producer["ws_token"]
name = producer['name'] name = producer["name"]
scheme = 'wss' if producer.get('secure', False) else 'ws' scheme = "wss" if producer.get("secure", False) else "ws"
ws_url = f"{scheme}://{host}:{port}/api/v1/message/ws?token={token}" ws_url = f"{scheme}://{host}:{port}/api/v1/message/ws?token={token}"
# This will raise InvalidURI if the url is bad # This will raise InvalidURI if the url is bad
async with websockets.connect( async with websockets.connect(
ws_url, ws_url, max_size=self.message_size_limit, ping_interval=None
max_size=self.message_size_limit,
ping_interval=None
) as ws: ) as ws:
async with create_channel( async with create_channel(ws, channel_id=name, send_throttle=0.5) as channel:
ws,
channel_id=name,
send_throttle=0.5
) as channel:
# Create the message stream for this channel # Create the message stream for this channel
self._channel_streams[name] = MessageStream() self._channel_streams[name] = MessageStream()
# Run the channel tasks while connected # Run the channel tasks while connected
await channel.run_channel_tasks( await channel.run_channel_tasks(
self._receive_messages(channel, producer, lock), self._receive_messages(channel, producer, lock),
self._send_requests(channel, self._channel_streams[name]) self._send_requests(channel, self._channel_streams[name]),
) )
except (websockets.exceptions.InvalidURI, ValueError) as e: except (websockets.exceptions.InvalidURI, ValueError) as e:
@@ -227,7 +217,7 @@ class ExternalMessageConsumer:
socket.gaierror, socket.gaierror,
ConnectionRefusedError, ConnectionRefusedError,
websockets.exceptions.InvalidStatusCode, websockets.exceptions.InvalidStatusCode,
websockets.exceptions.InvalidMessage websockets.exceptions.InvalidMessage,
) as e: ) as e:
logger.error(f"Connection Refused - {e} retrying in {self.sleep_time}s") logger.error(f"Connection Refused - {e} retrying in {self.sleep_time}s")
await asyncio.sleep(self.sleep_time) await asyncio.sleep(self.sleep_time)
@@ -235,7 +225,7 @@ class ExternalMessageConsumer:
except ( except (
websockets.exceptions.ConnectionClosedError, websockets.exceptions.ConnectionClosedError,
websockets.exceptions.ConnectionClosedOK websockets.exceptions.ConnectionClosedOK,
): ):
# Just keep trying to connect again indefinitely # Just keep trying to connect again indefinitely
await asyncio.sleep(self.sleep_time) await asyncio.sleep(self.sleep_time)
@@ -260,10 +250,7 @@ class ExternalMessageConsumer:
await channel.send(request) await channel.send(request)
async def _receive_messages( async def _receive_messages(
self, self, channel: WebSocketChannel, producer: Producer, lock: asyncio.Lock
channel: WebSocketChannel,
producer: Producer,
lock: asyncio.Lock
): ):
""" """
Loop to handle receiving messages from a Producer Loop to handle receiving messages from a Producer
@@ -274,10 +261,7 @@ class ExternalMessageConsumer:
""" """
while self._running: while self._running:
try: try:
message = await asyncio.wait_for( message = await asyncio.wait_for(channel.recv(), timeout=self.wait_timeout)
channel.recv(),
timeout=self.wait_timeout
)
try: try:
async with lock: async with lock:
@@ -291,7 +275,7 @@ class ExternalMessageConsumer:
try: try:
# ping # ping
pong = await channel.ping() pong = await channel.ping()
latency = (await asyncio.wait_for(pong, timeout=self.ping_timeout) * 1000) latency = await asyncio.wait_for(pong, timeout=self.ping_timeout) * 1000
logger.info(f"Connection to {channel} still alive, latency: {latency}ms") logger.info(f"Connection to {channel} still alive, latency: {latency}ms")
continue continue
@@ -303,9 +287,7 @@ class ExternalMessageConsumer:
raise raise
def send_producer_request( def send_producer_request(
self, self, producer_name: str, request: Union[WSRequestSchema, Dict[str, Any]]
producer_name: str,
request: Union[WSRequestSchema, Dict[str, Any]]
): ):
""" """
Publish a message to the producer's message stream to be Publish a message to the producer's message stream to be
@@ -324,7 +306,7 @@ class ExternalMessageConsumer:
""" """
Handles external messages from a Producer Handles external messages from a Producer
""" """
producer_name = producer.get('name', 'default') producer_name = producer.get("name", "default")
try: try:
producer_message = WSMessageSchema.model_validate(message) producer_message = WSMessageSchema.model_validate(message)
@@ -377,7 +359,7 @@ class ExternalMessageConsumer:
return return
# If set, remove the Entry and Exit signals from the Producer # If set, remove the Entry and Exit signals from the Producer
if self._emc_config.get('remove_entry_exit_signals', False): if self._emc_config.get("remove_entry_exit_signals", False):
df = remove_entry_exit_signals(df) df = remove_entry_exit_signals(df)
logger.debug(f"Received {len(df)} candle(s) for {key}") logger.debug(f"Received {len(df)} candle(s) for {key}")
@@ -388,7 +370,7 @@ class ExternalMessageConsumer:
last_analyzed=la, last_analyzed=la,
timeframe=timeframe, timeframe=timeframe,
candle_type=candle_type, candle_type=candle_type,
producer_name=producer_name producer_name=producer_name,
) )
if not did_append: if not did_append:
@@ -397,20 +379,17 @@ class ExternalMessageConsumer:
# Set to None for all candles if we missed a full df's worth of candles # Set to None for all candles if we missed a full df's worth of candles
n_missing = n_missing if n_missing < FULL_DATAFRAME_THRESHOLD else 1500 n_missing = n_missing if n_missing < FULL_DATAFRAME_THRESHOLD else 1500
logger.warning(f"Holes in data or no existing df, requesting {n_missing} candles " logger.warning(
f"for {key} from `{producer_name}`") f"Holes in data or no existing df, requesting {n_missing} candles "
f"for {key} from `{producer_name}`"
)
self.send_producer_request( self.send_producer_request(
producer_name, producer_name, WSAnalyzedDFRequest(data={"limit": n_missing, "pair": pair})
WSAnalyzedDFRequest(
data={
"limit": n_missing,
"pair": pair
}
)
) )
return return
logger.debug( logger.debug(
f"Consumed message from `{producer_name}` " f"Consumed message from `{producer_name}` "
f"of type `RPCMessageType.ANALYZED_DF` for {key}") f"of type `RPCMessageType.ANALYZED_DF` for {key}"
)

View File

@@ -21,14 +21,14 @@ logger = logging.getLogger(__name__)
# Manually map symbol to ID for some common coins # Manually map symbol to ID for some common coins
# with duplicate coingecko entries # with duplicate coingecko entries
coingecko_mapping = { coingecko_mapping = {
'eth': 'ethereum', "eth": "ethereum",
'bnb': 'binancecoin', "bnb": "binancecoin",
'sol': 'solana', "sol": "solana",
'usdt': 'tether', "usdt": "tether",
'busd': 'binance-usd', "busd": "binance-usd",
'tusd': 'true-usd', "tusd": "true-usd",
'usdc': 'usd-coin', "usdc": "usd-coin",
'btc': 'bitcoin' "btc": "bitcoin",
} }
@@ -38,6 +38,7 @@ class CryptoToFiatConverter(LoggingMixin):
This object contains a list of pair Crypto, FIAT This object contains a list of pair Crypto, FIAT
This object is also a Singleton This object is also a Singleton
""" """
__instance = None __instance = None
_coingecko: CoinGeckoAPI = None _coingecko: CoinGeckoAPI = None
_coinlistings: List[Dict] = [] _coinlistings: List[Dict] = []
@@ -71,7 +72,8 @@ class CryptoToFiatConverter(LoggingMixin):
except RequestException as request_exception: except RequestException as request_exception:
if "429" in str(request_exception): if "429" in str(request_exception):
logger.warning( logger.warning(
"Too many requests for CoinGecko API, backing off and trying again later.") "Too many requests for CoinGecko API, backing off and trying again later."
)
# Set backoff timestamp to 60 seconds in the future # Set backoff timestamp to 60 seconds in the future
self._backoff = datetime.now().timestamp() + 60 self._backoff = datetime.now().timestamp() + 60
return return
@@ -80,9 +82,10 @@ class CryptoToFiatConverter(LoggingMixin):
"Could not load FIAT Cryptocurrency map for the following problem: " "Could not load FIAT Cryptocurrency map for the following problem: "
f"{request_exception}" f"{request_exception}"
) )
except (Exception) as exception: except Exception as exception:
logger.error( logger.error(
f"Could not load FIAT Cryptocurrency map for the following problem: {exception}") f"Could not load FIAT Cryptocurrency map for the following problem: {exception}"
)
def _get_gecko_id(self, crypto_symbol): def _get_gecko_id(self, crypto_symbol):
if not self._coinlistings: if not self._coinlistings:
@@ -93,13 +96,13 @@ class CryptoToFiatConverter(LoggingMixin):
return None return None
else: else:
return None return None
found = [x for x in self._coinlistings if x['symbol'].lower() == crypto_symbol] found = [x for x in self._coinlistings if x["symbol"].lower() == crypto_symbol]
if crypto_symbol in coingecko_mapping.keys(): if crypto_symbol in coingecko_mapping.keys():
found = [x for x in self._coinlistings if x['id'] == coingecko_mapping[crypto_symbol]] found = [x for x in self._coinlistings if x["id"] == coingecko_mapping[crypto_symbol]]
if len(found) == 1: if len(found) == 1:
return found[0]['id'] return found[0]["id"]
if len(found) > 0: if len(found) > 0:
# Wrong! # Wrong!
@@ -130,26 +133,23 @@ class CryptoToFiatConverter(LoggingMixin):
fiat_symbol = fiat_symbol.lower() fiat_symbol = fiat_symbol.lower()
inverse = False inverse = False
if crypto_symbol == 'usd': if crypto_symbol == "usd":
# usd corresponds to "uniswap-state-dollar" for coingecko. # usd corresponds to "uniswap-state-dollar" for coingecko.
# We'll therefore need to "swap" the currencies # We'll therefore need to "swap" the currencies
logger.info(f"reversing Rates {crypto_symbol}, {fiat_symbol}") logger.info(f"reversing Rates {crypto_symbol}, {fiat_symbol}")
crypto_symbol = fiat_symbol crypto_symbol = fiat_symbol
fiat_symbol = 'usd' fiat_symbol = "usd"
inverse = True inverse = True
symbol = f"{crypto_symbol}/{fiat_symbol}" symbol = f"{crypto_symbol}/{fiat_symbol}"
# Check if the fiat conversion you want is supported # Check if the fiat conversion you want is supported
if not self._is_supported_fiat(fiat=fiat_symbol): if not self._is_supported_fiat(fiat=fiat_symbol):
raise ValueError(f'The fiat {fiat_symbol} is not supported.') raise ValueError(f"The fiat {fiat_symbol} is not supported.")
price = self._pair_price.get(symbol, None) price = self._pair_price.get(symbol, None)
if not price: if not price:
price = self._find_price( price = self._find_price(crypto_symbol=crypto_symbol, fiat_symbol=fiat_symbol)
crypto_symbol=crypto_symbol,
fiat_symbol=fiat_symbol
)
if inverse and price != 0.0: if inverse and price != 0.0:
price = 1 / price price = 1 / price
self._pair_price[symbol] = price self._pair_price[symbol] = price
@@ -174,7 +174,7 @@ class CryptoToFiatConverter(LoggingMixin):
""" """
# Check if the fiat conversion you want is supported # Check if the fiat conversion you want is supported
if not self._is_supported_fiat(fiat=fiat_symbol): if not self._is_supported_fiat(fiat=fiat_symbol):
raise ValueError(f'The fiat {fiat_symbol} is not supported.') raise ValueError(f"The fiat {fiat_symbol} is not supported.")
# No need to convert if both crypto and fiat are the same # No need to convert if both crypto and fiat are the same
if crypto_symbol == fiat_symbol: if crypto_symbol == fiat_symbol:
@@ -185,16 +185,15 @@ class CryptoToFiatConverter(LoggingMixin):
if not _gecko_id: if not _gecko_id:
# return 0 for unsupported stake currencies (fiat-convert should not break the bot) # return 0 for unsupported stake currencies (fiat-convert should not break the bot)
self.log_once( self.log_once(
f"unsupported crypto-symbol {crypto_symbol.upper()} - returning 0.0", f"unsupported crypto-symbol {crypto_symbol.upper()} - returning 0.0", logger.warning
logger.warning) )
return 0.0 return 0.0
try: try:
return float( return float(
self._coingecko.get_price( self._coingecko.get_price(ids=_gecko_id, vs_currencies=fiat_symbol)[_gecko_id][
ids=_gecko_id, fiat_symbol
vs_currencies=fiat_symbol ]
)[_gecko_id][fiat_symbol]
) )
except Exception as exception: except Exception as exception:
logger.error("Error in _find_price: %s", exception) logger.error("Error in _find_price: %s", exception)

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,7 @@
""" """
This module contains class to manage RPC communications (Telegram, API, ...) This module contains class to manage RPC communications (Telegram, API, ...)
""" """
import logging import logging
from collections import deque from collections import deque
from typing import List from typing import List
@@ -25,37 +26,41 @@ class RPCManager:
self._rpc = RPC(freqtrade) self._rpc = RPC(freqtrade)
config = freqtrade.config config = freqtrade.config
# Enable telegram # Enable telegram
if config.get('telegram', {}).get('enabled', False): if config.get("telegram", {}).get("enabled", False):
logger.info('Enabling rpc.telegram ...') logger.info("Enabling rpc.telegram ...")
from freqtrade.rpc.telegram import Telegram from freqtrade.rpc.telegram import Telegram
self.registered_modules.append(Telegram(self._rpc, config)) self.registered_modules.append(Telegram(self._rpc, config))
# Enable discord # Enable discord
if config.get('discord', {}).get('enabled', False): if config.get("discord", {}).get("enabled", False):
logger.info('Enabling rpc.discord ...') logger.info("Enabling rpc.discord ...")
from freqtrade.rpc.discord import Discord from freqtrade.rpc.discord import Discord
self.registered_modules.append(Discord(self._rpc, config)) self.registered_modules.append(Discord(self._rpc, config))
# Enable Webhook # Enable Webhook
if config.get('webhook', {}).get('enabled', False): if config.get("webhook", {}).get("enabled", False):
logger.info('Enabling rpc.webhook ...') logger.info("Enabling rpc.webhook ...")
from freqtrade.rpc.webhook import Webhook from freqtrade.rpc.webhook import Webhook
self.registered_modules.append(Webhook(self._rpc, config)) self.registered_modules.append(Webhook(self._rpc, config))
# Enable local rest api server for cmd line control # Enable local rest api server for cmd line control
if config.get('api_server', {}).get('enabled', False): if config.get("api_server", {}).get("enabled", False):
logger.info('Enabling rpc.api_server') logger.info("Enabling rpc.api_server")
from freqtrade.rpc.api_server import ApiServer from freqtrade.rpc.api_server import ApiServer
apiserver = ApiServer(config) apiserver = ApiServer(config)
apiserver.add_rpc_handler(self._rpc) apiserver.add_rpc_handler(self._rpc)
self.registered_modules.append(apiserver) self.registered_modules.append(apiserver)
def cleanup(self) -> None: def cleanup(self) -> None:
"""Stops all enabled rpc modules""" """Stops all enabled rpc modules"""
logger.info('Cleaning up rpc modules ...') logger.info("Cleaning up rpc modules ...")
while self.registered_modules: while self.registered_modules:
mod = self.registered_modules.pop() mod = self.registered_modules.pop()
logger.info('Cleaning up rpc.%s ...', mod.name) logger.info("Cleaning up rpc.%s ...", mod.name)
mod.cleanup() mod.cleanup()
del mod del mod
@@ -68,16 +73,16 @@ class RPCManager:
'status': 'stopping bot' 'status': 'stopping bot'
} }
""" """
if msg.get('type') not in NO_ECHO_MESSAGES: if msg.get("type") not in NO_ECHO_MESSAGES:
logger.info('Sending rpc message: %s', msg) logger.info("Sending rpc message: %s", msg)
for mod in self.registered_modules: for mod in self.registered_modules:
logger.debug('Forwarding message to rpc.%s', mod.name) logger.debug("Forwarding message to rpc.%s", mod.name)
try: try:
mod.send_msg(msg) mod.send_msg(msg)
except NotImplementedError: except NotImplementedError:
logger.error(f"Message type '{msg['type']}' not implemented by handler {mod.name}.") logger.error(f"Message type '{msg['type']}' not implemented by handler {mod.name}.")
except Exception: except Exception:
logger.exception('Exception occurred within RPC module %s', mod.name) logger.exception("Exception occurred within RPC module %s", mod.name)
def process_msg_queue(self, queue: deque) -> None: def process_msg_queue(self, queue: deque) -> None:
""" """
@@ -85,47 +90,54 @@ class RPCManager:
""" """
while queue: while queue:
msg = queue.popleft() msg = queue.popleft()
logger.info('Sending rpc strategy_msg: %s', msg) logger.info("Sending rpc strategy_msg: %s", msg)
for mod in self.registered_modules: for mod in self.registered_modules:
if mod._config.get(mod.name, {}).get('allow_custom_messages', False): if mod._config.get(mod.name, {}).get("allow_custom_messages", False):
mod.send_msg({ mod.send_msg(
'type': RPCMessageType.STRATEGY_MSG, {
'msg': msg, "type": RPCMessageType.STRATEGY_MSG,
}) "msg": msg,
}
)
def startup_messages(self, config: Config, pairlist, protections) -> None: def startup_messages(self, config: Config, pairlist, protections) -> None:
if config['dry_run']: if config["dry_run"]:
self.send_msg({ self.send_msg(
'type': RPCMessageType.WARNING, {
'status': 'Dry run is enabled. All trades are simulated.' "type": RPCMessageType.WARNING,
}) "status": "Dry run is enabled. All trades are simulated.",
stake_currency = config['stake_currency'] }
stake_amount = config['stake_amount'] )
minimal_roi = config['minimal_roi'] stake_currency = config["stake_currency"]
stoploss = config['stoploss'] stake_amount = config["stake_amount"]
trailing_stop = config['trailing_stop'] minimal_roi = config["minimal_roi"]
timeframe = config['timeframe'] stoploss = config["stoploss"]
exchange_name = config['exchange']['name'] trailing_stop = config["trailing_stop"]
strategy_name = config.get('strategy', '') timeframe = config["timeframe"]
pos_adjust_enabled = 'On' if config['position_adjustment_enable'] else 'Off' exchange_name = config["exchange"]["name"]
self.send_msg({ strategy_name = config.get("strategy", "")
'type': RPCMessageType.STARTUP, pos_adjust_enabled = "On" if config["position_adjustment_enable"] else "Off"
'status': f'*Exchange:* `{exchange_name}`\n' self.send_msg(
{
"type": RPCMessageType.STARTUP,
"status": f'*Exchange:* `{exchange_name}`\n'
f'*Stake per trade:* `{stake_amount} {stake_currency}`\n' f'*Stake per trade:* `{stake_amount} {stake_currency}`\n'
f'*Minimum ROI:* `{minimal_roi}`\n' f'*Minimum ROI:* `{minimal_roi}`\n'
f'*{"Trailing " if trailing_stop else ""}Stoploss:* `{stoploss}`\n' f'*{"Trailing " if trailing_stop else ""}Stoploss:* `{stoploss}`\n'
f'*Position adjustment:* `{pos_adjust_enabled}`\n' f'*Position adjustment:* `{pos_adjust_enabled}`\n'
f'*Timeframe:* `{timeframe}`\n' f'*Timeframe:* `{timeframe}`\n'
f'*Strategy:* `{strategy_name}`' f'*Strategy:* `{strategy_name}`',
}) }
self.send_msg({ )
'type': RPCMessageType.STARTUP, self.send_msg(
'status': f'Searching for {stake_currency} pairs to buy and sell ' {
f'based on {pairlist.short_desc()}' "type": RPCMessageType.STARTUP,
}) "status": f"Searching for {stake_currency} pairs to buy and sell "
f"based on {pairlist.short_desc()}",
}
)
if len(protections.name_list) > 0: if len(protections.name_list) > 0:
prots = '\n'.join([p for prot in protections.short_desc() for k, p in prot.items()]) prots = "\n".join([p for prot in protections.short_desc() for k, p in prot.items()])
self.send_msg({ self.send_msg(
'type': RPCMessageType.STARTUP, {"type": RPCMessageType.STARTUP, "status": f"Using Protections: \n{prots}"}
'status': f'Using Protections: \n{prots}' )
})

View File

@@ -15,12 +15,14 @@ class RPCSendMsgBase(TypedDict):
class RPCStatusMsg(RPCSendMsgBase): class RPCStatusMsg(RPCSendMsgBase):
"""Used for Status, Startup and Warning messages""" """Used for Status, Startup and Warning messages"""
type: Literal[RPCMessageType.STATUS, RPCMessageType.STARTUP, RPCMessageType.WARNING] type: Literal[RPCMessageType.STATUS, RPCMessageType.STARTUP, RPCMessageType.WARNING]
status: str status: str
class RPCStrategyMsg(RPCSendMsgBase): class RPCStrategyMsg(RPCSendMsgBase):
"""Used for Status, Startup and Warning messages""" """Used for Status, Startup and Warning messages"""
type: Literal[RPCMessageType.STRATEGY_MSG] type: Literal[RPCMessageType.STRATEGY_MSG]
msg: str msg: str
@@ -108,12 +110,14 @@ class _AnalyzedDFData(TypedDict):
class RPCAnalyzedDFMsg(RPCSendMsgBase): class RPCAnalyzedDFMsg(RPCSendMsgBase):
"""New Analyzed dataframe message""" """New Analyzed dataframe message"""
type: Literal[RPCMessageType.ANALYZED_DF] type: Literal[RPCMessageType.ANALYZED_DF]
data: _AnalyzedDFData data: _AnalyzedDFData
class RPCNewCandleMsg(RPCSendMsgBase): class RPCNewCandleMsg(RPCSendMsgBase):
"""New candle ping message, issued once per new candle/pair""" """New candle ping message, issued once per new candle/pair"""
type: Literal[RPCMessageType.NEW_CANDLE] type: Literal[RPCMessageType.NEW_CANDLE]
data: PairWithTimeframe data: PairWithTimeframe
@@ -131,5 +135,5 @@ RPCSendMsg = Union[
RPCExitMsg, RPCExitMsg,
RPCExitCancelMsg, RPCExitCancelMsg,
RPCAnalyzedDFMsg, RPCAnalyzedDFMsg,
RPCNewCandleMsg RPCNewCandleMsg,
] ]

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,7 @@
""" """
This module manages webhook communication This module manages webhook communication
""" """
import logging import logging
import time import time
from typing import Any, Dict, Optional from typing import Any, Dict, Optional
@@ -15,7 +16,7 @@ from freqtrade.rpc.rpc_types import RPCSendMsg
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.debug('Included module rpc.webhook ...') logger.debug("Included module rpc.webhook ...")
class Webhook(RPCHandler): class Webhook(RPCHandler):
@@ -30,11 +31,11 @@ class Webhook(RPCHandler):
""" """
super().__init__(rpc, config) super().__init__(rpc, config)
self._url = self._config['webhook']['url'] self._url = self._config["webhook"]["url"]
self._format = self._config['webhook'].get('format', 'form') self._format = self._config["webhook"].get("format", "form")
self._retries = self._config['webhook'].get('retries', 0) self._retries = self._config["webhook"].get("retries", 0)
self._retry_delay = self._config['webhook'].get('retry_delay', 0.1) self._retry_delay = self._config["webhook"].get("retry_delay", 0.1)
self._timeout = self._config['webhook'].get('timeout', 10) self._timeout = self._config["webhook"].get("timeout", 10)
def cleanup(self) -> None: def cleanup(self) -> None:
""" """
@@ -44,35 +45,38 @@ class Webhook(RPCHandler):
pass pass
def _get_value_dict(self, msg: RPCSendMsg) -> Optional[Dict[str, Any]]: def _get_value_dict(self, msg: RPCSendMsg) -> Optional[Dict[str, Any]]:
whconfig = self._config['webhook'] whconfig = self._config["webhook"]
if msg['type'].value in whconfig: if msg["type"].value in whconfig:
# Explicit types should have priority # Explicit types should have priority
valuedict = whconfig.get(msg['type'].value) valuedict = whconfig.get(msg["type"].value)
# Deprecated 2022.10 - only keep generic method. # Deprecated 2022.10 - only keep generic method.
elif msg['type'] in [RPCMessageType.ENTRY]: elif msg["type"] in [RPCMessageType.ENTRY]:
valuedict = whconfig.get('webhookentry') valuedict = whconfig.get("webhookentry")
elif msg['type'] in [RPCMessageType.ENTRY_CANCEL]: elif msg["type"] in [RPCMessageType.ENTRY_CANCEL]:
valuedict = whconfig.get('webhookentrycancel') valuedict = whconfig.get("webhookentrycancel")
elif msg['type'] in [RPCMessageType.ENTRY_FILL]: elif msg["type"] in [RPCMessageType.ENTRY_FILL]:
valuedict = whconfig.get('webhookentryfill') valuedict = whconfig.get("webhookentryfill")
elif msg['type'] == RPCMessageType.EXIT: elif msg["type"] == RPCMessageType.EXIT:
valuedict = whconfig.get('webhookexit') valuedict = whconfig.get("webhookexit")
elif msg['type'] == RPCMessageType.EXIT_FILL: elif msg["type"] == RPCMessageType.EXIT_FILL:
valuedict = whconfig.get('webhookexitfill') valuedict = whconfig.get("webhookexitfill")
elif msg['type'] == RPCMessageType.EXIT_CANCEL: elif msg["type"] == RPCMessageType.EXIT_CANCEL:
valuedict = whconfig.get('webhookexitcancel') valuedict = whconfig.get("webhookexitcancel")
elif msg['type'] in (RPCMessageType.STATUS, elif msg["type"] in (
RPCMessageType.STATUS,
RPCMessageType.STARTUP, RPCMessageType.STARTUP,
RPCMessageType.EXCEPTION, RPCMessageType.EXCEPTION,
RPCMessageType.WARNING): RPCMessageType.WARNING,
valuedict = whconfig.get('webhookstatus') ):
elif msg['type'] in ( valuedict = whconfig.get("webhookstatus")
elif msg["type"] in (
RPCMessageType.PROTECTION_TRIGGER, RPCMessageType.PROTECTION_TRIGGER,
RPCMessageType.PROTECTION_TRIGGER_GLOBAL, RPCMessageType.PROTECTION_TRIGGER_GLOBAL,
RPCMessageType.WHITELIST, RPCMessageType.WHITELIST,
RPCMessageType.ANALYZED_DF, RPCMessageType.ANALYZED_DF,
RPCMessageType.NEW_CANDLE, RPCMessageType.NEW_CANDLE,
RPCMessageType.STRATEGY_MSG): RPCMessageType.STRATEGY_MSG,
):
# Don't fail for non-implemented types # Don't fail for non-implemented types
return None return None
return valuedict return valuedict
@@ -80,18 +84,20 @@ class Webhook(RPCHandler):
def send_msg(self, msg: RPCSendMsg) -> None: def send_msg(self, msg: RPCSendMsg) -> None:
"""Send a message to telegram channel""" """Send a message to telegram channel"""
try: try:
valuedict = self._get_value_dict(msg) valuedict = self._get_value_dict(msg)
if not valuedict: if not valuedict:
logger.debug("Message type '%s' not configured for webhooks", msg['type']) logger.debug("Message type '%s' not configured for webhooks", msg["type"])
return return
payload = {key: value.format(**msg) for (key, value) in valuedict.items()} payload = {key: value.format(**msg) for (key, value) in valuedict.items()}
self._send_msg(payload) self._send_msg(payload)
except KeyError as exc: except KeyError as exc:
logger.exception("Problem calling Webhook. Please check your webhook configuration. " logger.exception(
"Exception: %s", exc) "Problem calling Webhook. Please check your webhook configuration. "
"Exception: %s",
exc,
)
def _send_msg(self, payload: dict) -> None: def _send_msg(self, payload: dict) -> None:
"""do the actual call to the webhook""" """do the actual call to the webhook"""
@@ -107,16 +113,19 @@ class Webhook(RPCHandler):
attempts += 1 attempts += 1
try: try:
if self._format == 'form': if self._format == "form":
response = post(self._url, data=payload, timeout=self._timeout) response = post(self._url, data=payload, timeout=self._timeout)
elif self._format == 'json': elif self._format == "json":
response = post(self._url, json=payload, timeout=self._timeout) response = post(self._url, json=payload, timeout=self._timeout)
elif self._format == 'raw': elif self._format == "raw":
response = post(self._url, data=payload['data'], response = post(
headers={'Content-Type': 'text/plain'}, self._url,
timeout=self._timeout) data=payload["data"],
headers={"Content-Type": "text/plain"},
timeout=self._timeout,
)
else: else:
raise NotImplementedError(f'Unknown format: {self._format}') raise NotImplementedError(f"Unknown format: {self._format}")
# Throw a RequestException if the post was not successful # Throw a RequestException if the post was not successful
response.raise_for_status() response.raise_for_status()