fix(agents): normalize chat-latest verbosity

This commit is contained in:
Vincent Koc
2026-05-06 03:12:20 -07:00
parent 479a9c0259
commit 978acec90d
2 changed files with 18 additions and 45 deletions

View File

@@ -1,4 +1,3 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import {
type ProviderResolveDynamicModelContext,
type ProviderRuntimeModel,
@@ -79,41 +78,6 @@ const OPENAI_MODERN_MODEL_IDS = [
"gpt-5.2",
] as const;
function clampChatLatestTextVerbosity(payload: unknown): void {
if (!payload || typeof payload !== "object") {
return;
}
const payloadObj = payload as Record<string, unknown>;
const text = payloadObj.text;
if (!text || typeof text !== "object") {
return;
}
const textObj = text as Record<string, unknown>;
if (textObj.verbosity !== undefined && textObj.verbosity !== "medium") {
payloadObj.text = { ...textObj, verbosity: "medium" };
}
}
function createOpenAIChatLatestCompatWrapper(baseStreamFn: StreamFn, modelId: string): StreamFn {
const underlying = baseStreamFn;
return (model, context, options) => {
if (
model.api !== "openai-responses" ||
normalizeLowercaseStringOrEmpty(modelId) !== OPENAI_CHAT_LATEST_MODEL_ID
) {
return underlying(model, context, options);
}
const originalOnPayload = options?.onPayload;
return underlying(model, context, {
...options,
onPayload: (payload) => {
clampChatLatestTextVerbosity(payload);
return originalOnPayload?.(payload, model);
},
});
};
}
function shouldUseOpenAIResponsesTransport(params: {
provider: string;
api?: string | null;
@@ -247,7 +211,6 @@ function resolveOpenAIGptForwardCompatModel(ctx: ProviderResolveDynamicModelCont
}
export function buildOpenAIProvider(): ProviderPlugin {
const responsesHooks = buildOpenAIResponsesProviderHooks({ transport: "sse" });
return {
id: PROVIDER_ID,
label: "OpenAI",
@@ -285,12 +248,7 @@ export function buildOpenAIProvider(): ProviderPlugin {
shouldUseOpenAIResponsesTransport({ provider, api, baseUrl })
? { api: "openai-responses", baseUrl }
: undefined,
...responsesHooks,
wrapStreamFn: (ctx) =>
createOpenAIChatLatestCompatWrapper(
responsesHooks.wrapStreamFn?.(ctx) ?? ctx.streamFn,
ctx.modelId,
),
...buildOpenAIResponsesProviderHooks({ transport: "sse" }),
matchesContextOverflowError: ({ errorMessage }) =>
/content_filter.*(?:prompt|input).*(?:too long|exceed)/i.test(errorMessage),
resolveReasoningOutputMode: () => "native",

View File

@@ -24,6 +24,19 @@ import { streamWithPayloadPatch } from "./stream-payload-utils.js";
type OpenAIServiceTier = "auto" | "default" | "flex" | "priority";
export { resolveOpenAITextVerbosity };
function resolveOpenAITextVerbosityForModel(
model: { api?: unknown; id?: unknown; provider?: unknown },
verbosity: OpenAITextVerbosity,
): OpenAITextVerbosity {
const api = normalizeOptionalLowercaseString(model.api);
const provider = normalizeOptionalLowercaseString(model.provider);
const id = normalizeOptionalLowercaseString(model.id);
if (api === "openai-responses" && provider === "openai" && id === "chat-latest") {
return "medium";
}
return verbosity;
}
function resolveOpenAIRequestCapabilities(model: {
api?: unknown;
provider?: unknown;
@@ -392,7 +405,9 @@ export function createOpenAITextVerbosityWrapper(
if (model.api !== "openai-responses" && model.api !== "openai-codex-responses") {
return underlying(model, context, options);
}
const shouldOverrideExistingVerbosity = model.api === "openai-codex-responses";
const resolvedVerbosity = resolveOpenAITextVerbosityForModel(model, verbosity);
const shouldOverrideExistingVerbosity =
model.api === "openai-codex-responses" || resolvedVerbosity !== verbosity;
const originalOnPayload = options?.onPayload;
return underlying(model, context, {
...options,
@@ -404,7 +419,7 @@ export function createOpenAITextVerbosityWrapper(
? (payloadObj.text as Record<string, unknown>)
: {};
if (shouldOverrideExistingVerbosity || existingText.verbosity === undefined) {
payloadObj.text = { ...existingText, verbosity };
payloadObj.text = { ...existingText, verbosity: resolvedVerbosity };
}
}
return originalOnPayload?.(payload, model);