fix (agents): force store=true for direct openai responses

This commit is contained in:
Vignesh Natarajan
2026-02-14 20:45:15 -08:00
parent 9020277f09
commit 909b5411bb
2 changed files with 104 additions and 0 deletions

View File

@@ -91,4 +91,50 @@ describe("applyExtraParamsToAgent", () => {
"X-Custom": "1",
});
});
it("forces store=true for direct OpenAI Responses payloads", () => {
const payload = { store: false };
const baseStreamFn: StreamFn = (_model, _context, options) => {
options?.onPayload?.(payload);
return new AssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(agent, undefined, "openai", "gpt-5");
const model = {
api: "openai-responses",
provider: "openai",
id: "gpt-5",
baseUrl: "https://api.openai.com/v1",
} as Model<"openai-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(payload.store).toBe(true);
});
it("does not force store for OpenAI Responses routed through non-OpenAI base URLs", () => {
const payload = { store: false };
const baseStreamFn: StreamFn = (_model, _context, options) => {
options?.onPayload?.(payload);
return new AssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(agent, undefined, "openai", "gpt-5");
const model = {
api: "openai-responses",
provider: "openai",
id: "gpt-5",
baseUrl: "https://proxy.example.com/v1",
} as Model<"openai-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(payload.store).toBe(false);
});
});

View File

@@ -8,6 +8,8 @@ const OPENROUTER_APP_HEADERS: Record<string, string> = {
"HTTP-Referer": "https://openclaw.ai",
"X-Title": "OpenClaw",
};
const OPENAI_RESPONSES_APIS = new Set(["openai-responses", "openai-codex-responses"]);
const OPENAI_RESPONSES_PROVIDERS = new Set(["openai", "openai-codex"]);
/**
* Resolve provider-specific extra params from model config.
@@ -101,6 +103,57 @@ function createStreamFnWithExtraParams(
return wrappedStreamFn;
}
function isDirectOpenAIBaseUrl(baseUrl: unknown): boolean {
if (typeof baseUrl !== "string" || !baseUrl.trim()) {
return true;
}
try {
const host = new URL(baseUrl).hostname.toLowerCase();
return host === "api.openai.com" || host === "chatgpt.com";
} catch {
const normalized = baseUrl.toLowerCase();
return normalized.includes("api.openai.com") || normalized.includes("chatgpt.com");
}
}
function shouldForceResponsesStore(model: {
api?: unknown;
provider?: unknown;
baseUrl?: unknown;
}): boolean {
if (typeof model.api !== "string" || typeof model.provider !== "string") {
return false;
}
if (!OPENAI_RESPONSES_APIS.has(model.api)) {
return false;
}
if (!OPENAI_RESPONSES_PROVIDERS.has(model.provider)) {
return false;
}
return isDirectOpenAIBaseUrl(model.baseUrl);
}
function createOpenAIResponsesStoreWrapper(baseStreamFn: StreamFn | undefined): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) => {
if (!shouldForceResponsesStore(model)) {
return underlying(model, context, options);
}
const originalOnPayload = options?.onPayload;
return underlying(model, context, {
...options,
onPayload: (payload) => {
if (payload && typeof payload === "object") {
(payload as { store?: unknown }).store = true;
}
originalOnPayload?.(payload);
},
});
};
}
/**
* Create a streamFn wrapper that adds OpenRouter app attribution headers.
* These headers allow OpenClaw to appear on OpenRouter's leaderboard.
@@ -153,4 +206,9 @@ export function applyExtraParamsToAgent(
log.debug(`applying OpenRouter app attribution headers for ${provider}/${modelId}`);
agent.streamFn = createOpenRouterHeadersWrapper(agent.streamFn);
}
// Work around upstream pi-ai hardcoding `store: false` for Responses API.
// Force `store=true` for direct OpenAI/OpenAI Codex providers so multi-turn
// server-side conversation state is preserved.
agent.streamFn = createOpenAIResponsesStoreWrapper(agent.streamFn);
}