From 99de6515a09a443c62542ebca54f6e4accf0359a Mon Sep 17 00:00:00 2001 From: Peter Steinberger Date: Sat, 7 Mar 2026 22:40:53 +0000 Subject: [PATCH] fix(telegram): surface fallback on dispatch failures (#39209, thanks @riftzen-bit) Co-authored-by: riftzen-bit --- CHANGELOG.md | 1 + src/telegram/bot-handlers.ts | 15 +++++++++- src/telegram/bot-message-dispatch.test.ts | 17 +++++++---- src/telegram/bot-message-dispatch.ts | 14 +++++++-- src/telegram/bot-message.test.ts | 25 ++++++++++++++++ src/telegram/bot-message.ts | 36 ++++++++++++++++------- 6 files changed, 88 insertions(+), 20 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4d77ffc4380..d5d749e954c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -292,6 +292,7 @@ Docs: https://docs.openclaw.ai - Heartbeat/requests-in-flight scheduling: stop advancing `nextDueMs` and avoid immediate `scheduleNext()` timer overrides on requests-in-flight skips, so wake-layer retry cooldowns are honored and heartbeat cadence no longer drifts under sustained contention. (#39182) Thanks @MumuTW. - Memory/SQLite contention resilience: re-apply `PRAGMA busy_timeout` on every sync-store and QMD connection open so process restarts/reopens no longer revert to immediate `SQLITE_BUSY` failures under lock contention. (#39183) Thanks @MumuTW. - Gateway/webchat route safety: block webchat/control-ui clients from inheriting stored external delivery routes on channel-scoped sessions (while preserving route inheritance for UI/TUI clients), preventing cross-channel leakage from scoped chats. (#39175) Thanks @widingmarcus-cyber. +- Telegram error-surface resilience: return a user-visible fallback reply when dispatch/debounce processing fails instead of going silent, while preserving draft-stream cleanup and best-effort thread-scoped fallback delivery. (#39209) Thanks @riftzen-bit. ## 2026.3.2 diff --git a/src/telegram/bot-handlers.ts b/src/telegram/bot-handlers.ts index 34b8b8de208..a83e99d41c5 100644 --- a/src/telegram/bot-handlers.ts +++ b/src/telegram/bot-handlers.ts @@ -262,8 +262,21 @@ export const registerTelegramHandlers = ({ replyMedia, ); }, - onError: (err) => { + onError: (err, items) => { runtime.error?.(danger(`telegram debounce flush failed: ${String(err)}`)); + const chatId = items[0]?.msg.chat.id; + if (chatId != null) { + const threadId = items[0]?.msg.message_thread_id; + void bot.api + .sendMessage( + chatId, + "Something went wrong while processing your message. Please try again.", + threadId != null ? { message_thread_id: threadId } : undefined, + ) + .catch((sendErr) => { + logVerbose(`telegram: error fallback send failed: ${String(sendErr)}`); + }); + } }, }); diff --git a/src/telegram/bot-message-dispatch.test.ts b/src/telegram/bot-message-dispatch.test.ts index 2e6cf158f10..001660b6aa1 100644 --- a/src/telegram/bot-message-dispatch.test.ts +++ b/src/telegram/bot-message-dispatch.test.ts @@ -1775,18 +1775,25 @@ describe("dispatchTelegramMessage draft streaming", () => { expect(draftStream.clear).toHaveBeenCalledTimes(1); }); - it("clears preview when dispatcher throws before fallback phase", async () => { + it("sends error fallback and clears preview when dispatcher throws", async () => { const draftStream = createDraftStream(999); createTelegramDraftStream.mockReturnValue(draftStream); dispatchReplyWithBufferedBlockDispatcher.mockRejectedValue(new Error("dispatcher exploded")); + deliverReplies.mockResolvedValue({ delivered: true }); - await expect(dispatchWithContext({ context: createContext() })).rejects.toThrow( - "dispatcher exploded", - ); + await dispatchWithContext({ context: createContext() }); expect(draftStream.stop).toHaveBeenCalledTimes(1); expect(draftStream.clear).toHaveBeenCalledTimes(1); - expect(deliverReplies).not.toHaveBeenCalled(); + // Error fallback message should be delivered to the user instead of silent failure + expect(deliverReplies).toHaveBeenCalledTimes(1); + expect(deliverReplies).toHaveBeenCalledWith( + expect.objectContaining({ + replies: [ + { text: "Something went wrong while processing your request. Please try again." }, + ], + }), + ); }); it("supports concurrent dispatches with independent previews", async () => { diff --git a/src/telegram/bot-message-dispatch.ts b/src/telegram/bot-message-dispatch.ts index e6f2f65218d..859a35688f6 100644 --- a/src/telegram/bot-message-dispatch.ts +++ b/src/telegram/bot-message-dispatch.ts @@ -507,6 +507,7 @@ export const dispatchTelegramMessage = async ({ }, }); + let dispatchError: unknown; try { ({ queuedFinal } = await dispatchReplyWithBufferedBlockDispatcher({ ctx: ctxPayload, @@ -680,6 +681,9 @@ export const dispatchTelegramMessage = async ({ onModelSelected, }, })); + } catch (err) { + dispatchError = err; + runtime.error?.(danger(`telegram dispatch failed: ${String(err)}`)); } finally { // Upstream assistant callbacks are fire-and-forget; drain queued lane work // before stream cleanup so boundary rotations/materialization complete first. @@ -747,11 +751,15 @@ export const dispatchTelegramMessage = async ({ let sentFallback = false; const deliverySummary = deliveryState.snapshot(); if ( - !deliverySummary.delivered && - (deliverySummary.skippedNonSilent > 0 || deliverySummary.failedNonSilent > 0) + dispatchError || + (!deliverySummary.delivered && + (deliverySummary.skippedNonSilent > 0 || deliverySummary.failedNonSilent > 0)) ) { + const fallbackText = dispatchError + ? "Something went wrong while processing your request. Please try again." + : EMPTY_RESPONSE_FALLBACK; const result = await deliverReplies({ - replies: [{ text: EMPTY_RESPONSE_FALLBACK }], + replies: [{ text: fallbackText }], ...deliveryBaseOptions, }); sentFallback = result.delivered; diff --git a/src/telegram/bot-message.test.ts b/src/telegram/bot-message.test.ts index 38b9a06d322..1837e6861f1 100644 --- a/src/telegram/bot-message.test.ts +++ b/src/telegram/bot-message.test.ts @@ -72,4 +72,29 @@ describe("telegram bot message processor", () => { await processSampleMessage(processMessage); expect(dispatchTelegramMessage).not.toHaveBeenCalled(); }); + + it("sends user-visible fallback when dispatch throws", async () => { + const sendMessage = vi.fn().mockResolvedValue(undefined); + const runtimeError = vi.fn(); + buildTelegramMessageContext.mockResolvedValue({ + chatId: 123, + threadSpec: { id: 456 }, + route: { sessionKey: "agent:main:main" }, + }); + dispatchTelegramMessage.mockRejectedValue(new Error("dispatch exploded")); + + const processMessage = createTelegramMessageProcessor({ + ...baseDeps, + bot: { api: { sendMessage } }, + runtime: { error: runtimeError }, + } as unknown as Parameters[0]); + await expect(processSampleMessage(processMessage)).resolves.toBeUndefined(); + + expect(sendMessage).toHaveBeenCalledWith( + 123, + "Something went wrong while processing your request. Please try again.", + { message_thread_id: 456 }, + ); + expect(runtimeError).toHaveBeenCalledWith(expect.stringContaining("dispatch exploded")); + }); }); diff --git a/src/telegram/bot-message.ts b/src/telegram/bot-message.ts index 15fb1bc943d..3fa58bb9ed8 100644 --- a/src/telegram/bot-message.ts +++ b/src/telegram/bot-message.ts @@ -1,5 +1,6 @@ import type { ReplyToMode } from "../config/config.js"; import type { TelegramAccountConfig } from "../config/types.telegram.js"; +import { danger } from "../globals.js"; import type { RuntimeEnv } from "../runtime.js"; import { buildTelegramMessageContext, @@ -78,16 +79,29 @@ export const createTelegramMessageProcessor = (deps: TelegramMessageProcessorDep if (!context) { return; } - await dispatchTelegramMessage({ - context, - bot, - cfg, - runtime, - replyToMode, - streamMode, - textLimit, - telegramCfg, - opts, - }); + try { + await dispatchTelegramMessage({ + context, + bot, + cfg, + runtime, + replyToMode, + streamMode, + textLimit, + telegramCfg, + opts, + }); + } catch (err) { + runtime.error?.(danger(`telegram message processing failed: ${String(err)}`)); + try { + await bot.api.sendMessage( + context.chatId, + "Something went wrong while processing your request. Please try again.", + context.threadSpec?.id != null ? { message_thread_id: context.threadSpec.id } : undefined, + ); + } catch { + // Best-effort fallback; delivery may fail if the bot was blocked or the chat is invalid. + } + } }; };