fix(media): bound remote error-body snippet reads

This commit is contained in:
Peter Steinberger
2026-03-22 22:42:10 -07:00
parent fd5555d5be
commit 81445a9010
5 changed files with 187 additions and 58 deletions

View File

@@ -118,6 +118,7 @@ Docs: https://docs.openclaw.ai
- CLI: avoid loading provider discovery during startup model normalization. (#46522) Thanks @ItsAditya-xyz and @vincentkoc.
- CLI/status: keep `status --json` stdout clean by skipping plugin compatibility scans that were not rendered in the JSON payload. (#52449) Thanks @cgdusek.
- Agents/Telegram: avoid rebuilding the full model catalog on ordinary inbound replies so Telegram message handling no longer pays multi-second core startup latency before reply generation. Thanks @vincentkoc.
- Media/security: bound remote-media error-body snippets with the same streaming caps and idle timeouts as successful downloads, so malicious HTTP error responses cannot force unbounded buffering before OpenClaw throws.
- Gateway/Discord startup: load only configured channel plugins during gateway boot, and lazy-load Discord provider/session runtime setup so startup stops importing unrelated providers and trims cold-start delay. Thanks @vincentkoc.
- Security/exec: harden macOS allowlist resolution against wrapper and `env` spoofing, require fresh approval for inline interpreter eval with `tools.exec.strictInlineEval`, wrap Discord guild message bodies as untrusted external content, and add audit findings for risky exec approval and open-channel combinations.
- Agents/inbound: lazy-load media and link understanding for plain-text turns and cache synced auth stores by auth-file state so ordinary inbound replies avoid unnecessary startup churn. Thanks @vincentkoc.

View File

@@ -186,6 +186,30 @@ describe("fetchRemoteMedia", () => {
});
});
it("bounds error-body snippets instead of reading the full response", async () => {
const hiddenTail = `${" ".repeat(9_000)}BAD`;
const fetchImpl = vi.fn(
async () =>
new Response(makeStream([new TextEncoder().encode(hiddenTail)]), {
status: 400,
statusText: "Bad Request",
}),
);
const result = await fetchRemoteMedia({
url: "https://example.com/file.bin",
fetchImpl,
maxBytes: 1024,
}).catch((err: unknown) => err);
expect(result).toBeInstanceOf(Error);
if (!(result instanceof Error)) {
expect.unreachable("expected fetchRemoteMedia to reject");
}
expect(result.message).not.toContain("BAD");
expect(result.message).not.toContain("body:");
});
it("blocks private IP literals before fetching", async () => {
const fetchImpl = vi.fn();
await expect(

View File

@@ -4,7 +4,7 @@ import { fetchWithSsrFGuard, withStrictGuardedFetchMode } from "../infra/net/fet
import type { LookupFn, PinnedDispatcherPolicy, SsrFPolicy } from "../infra/net/ssrf.js";
import { redactSensitiveText } from "../logging/redact.js";
import { detectMime, extensionForMime } from "./mime.js";
import { readResponseWithLimit } from "./read-response-with-limit.js";
import { readResponseTextSnippet, readResponseWithLimit } from "./read-response-with-limit.js";
type FetchMediaResult = {
buffer: Buffer;
@@ -71,20 +71,19 @@ function parseContentDispositionFileName(header?: string | null): string | undef
return undefined;
}
async function readErrorBodySnippet(res: Response, maxChars = 200): Promise<string | undefined> {
async function readErrorBodySnippet(
res: Response,
opts?: {
maxChars?: number;
chunkTimeoutMs?: number;
},
): Promise<string | undefined> {
try {
const text = await res.text();
if (!text) {
return undefined;
}
const collapsed = text.replace(/\s+/g, " ").trim();
if (!collapsed) {
return undefined;
}
if (collapsed.length <= maxChars) {
return collapsed;
}
return `${collapsed.slice(0, maxChars)}`;
return await readResponseTextSnippet(res, {
maxBytes: 8 * 1024,
maxChars: opts?.maxChars,
chunkTimeoutMs: opts?.chunkTimeoutMs,
});
} catch {
return undefined;
}
@@ -185,7 +184,7 @@ export async function fetchRemoteMedia(options: FetchMediaOptions): Promise<Fetc
if (!res.body) {
detail = `HTTP ${res.status}${statusText}; empty response body`;
} else {
const snippet = await readErrorBodySnippet(res);
const snippet = await readErrorBodySnippet(res, { chunkTimeoutMs: readIdleTimeoutMs });
if (snippet) {
detail += `; body: ${snippet}`;
}

View File

@@ -1,5 +1,5 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { readResponseWithLimit } from "./read-response-with-limit.js";
import { readResponseTextSnippet, readResponseWithLimit } from "./read-response-with-limit.js";
function makeStream(chunks: Uint8Array[], delayMs?: number) {
return new ReadableStream<Uint8Array>({
@@ -81,3 +81,38 @@ describe("readResponseWithLimit", () => {
}
});
});
describe("readResponseTextSnippet", () => {
beforeEach(() => {
vi.useRealTimers();
});
it("returns collapsed text within the limit", async () => {
const res = new Response(makeStream([new TextEncoder().encode("hello \n world")]));
await expect(readResponseTextSnippet(res, { maxBytes: 64, maxChars: 50 })).resolves.toBe(
"hello world",
);
});
it("truncates to the byte limit without reading the full body", async () => {
const res = new Response(
makeStream([new TextEncoder().encode("12345"), new TextEncoder().encode("67890")]),
);
await expect(readResponseTextSnippet(res, { maxBytes: 7, maxChars: 50 })).resolves.toBe(
"1234567…",
);
});
it("applies the idle timeout while reading snippets", async () => {
vi.useFakeTimers();
try {
const res = new Response(makeStallingStream([new Uint8Array([65, 66])]));
const readPromise = readResponseTextSnippet(res, { maxBytes: 64, chunkTimeoutMs: 50 });
const rejection = expect(readPromise).rejects.toThrow(/stalled/i);
await vi.advanceTimersByTimeAsync(60);
await rejection;
} finally {
vi.useRealTimers();
}
}, 5_000);
});

View File

@@ -37,6 +37,84 @@ async function readChunkWithIdleTimeout(
});
}
type ReadResponsePrefixResult = {
buffer: Buffer;
size: number;
truncated: boolean;
};
async function readResponsePrefix(
res: Response,
maxBytes: number,
opts?: {
chunkTimeoutMs?: number;
},
): Promise<ReadResponsePrefixResult> {
const chunkTimeoutMs = opts?.chunkTimeoutMs;
const body = res.body;
if (!body || typeof body.getReader !== "function") {
const fallback = Buffer.from(await res.arrayBuffer());
if (fallback.length > maxBytes) {
return {
buffer: fallback.subarray(0, maxBytes),
size: fallback.length,
truncated: true,
};
}
return { buffer: fallback, size: fallback.length, truncated: false };
}
const reader = body.getReader();
const chunks: Uint8Array[] = [];
let total = 0;
let size = 0;
let truncated = false;
try {
while (true) {
const { done, value } = chunkTimeoutMs
? await readChunkWithIdleTimeout(reader, chunkTimeoutMs)
: await reader.read();
if (done) {
size = total;
break;
}
if (!value?.length) {
continue;
}
const nextTotal = total + value.length;
if (nextTotal > maxBytes) {
const remaining = maxBytes - total;
if (remaining > 0) {
chunks.push(value.subarray(0, remaining));
total += remaining;
}
size = nextTotal;
truncated = true;
try {
await reader.cancel();
} catch {}
break;
}
chunks.push(value);
total = nextTotal;
size = total;
}
} finally {
try {
reader.releaseLock();
} catch {}
}
return {
buffer: Buffer.concat(
chunks.map((chunk) => Buffer.from(chunk)),
total,
),
size,
truncated,
};
}
export async function readResponseWithLimit(
res: Response,
maxBytes: number,
@@ -49,47 +127,39 @@ export async function readResponseWithLimit(
opts?.onOverflow ??
((params: { size: number; maxBytes: number }) =>
new Error(`Content too large: ${params.size} bytes (limit: ${params.maxBytes} bytes)`));
const chunkTimeoutMs = opts?.chunkTimeoutMs;
const body = res.body;
if (!body || typeof body.getReader !== "function") {
const fallback = Buffer.from(await res.arrayBuffer());
if (fallback.length > maxBytes) {
throw onOverflow({ size: fallback.length, maxBytes, res });
}
return fallback;
const prefix = await readResponsePrefix(res, maxBytes, { chunkTimeoutMs: opts?.chunkTimeoutMs });
if (prefix.truncated) {
throw onOverflow({ size: prefix.size, maxBytes, res });
}
const reader = body.getReader();
const chunks: Uint8Array[] = [];
let total = 0;
try {
while (true) {
const { done, value } = chunkTimeoutMs
? await readChunkWithIdleTimeout(reader, chunkTimeoutMs)
: await reader.read();
if (done) {
break;
}
if (value?.length) {
total += value.length;
if (total > maxBytes) {
try {
await reader.cancel();
} catch {}
throw onOverflow({ size: total, maxBytes, res });
}
chunks.push(value);
}
}
} finally {
try {
reader.releaseLock();
} catch {}
}
return Buffer.concat(
chunks.map((chunk) => Buffer.from(chunk)),
total,
);
return prefix.buffer;
}
export async function readResponseTextSnippet(
res: Response,
opts?: {
maxBytes?: number;
maxChars?: number;
chunkTimeoutMs?: number;
},
): Promise<string | undefined> {
const maxBytes = opts?.maxBytes ?? 8 * 1024;
const maxChars = opts?.maxChars ?? 200;
const prefix = await readResponsePrefix(res, maxBytes, { chunkTimeoutMs: opts?.chunkTimeoutMs });
if (prefix.buffer.length === 0) {
return undefined;
}
const text = new TextDecoder().decode(prefix.buffer);
if (!text) {
return undefined;
}
const collapsed = text.replace(/\s+/g, " ").trim();
if (!collapsed) {
return undefined;
}
if (collapsed.length > maxChars) {
return `${collapsed.slice(0, maxChars)}`;
}
return prefix.truncated ? `${collapsed}` : collapsed;
}