fix(ci): unblock agent typing and cache startup metadata

This commit is contained in:
Peter Steinberger
2026-04-04 07:04:06 +01:00
parent 3a3f88a80a
commit c91b6bf322
8 changed files with 123 additions and 55 deletions

View File

@@ -8,6 +8,7 @@ import { getApiKeyForModel, requireApiKey } from "./model-auth.js";
import { normalizeProviderId, parseModelRef } from "./model-selection.js";
import { ensureOpenClawModelsJson } from "./models-config.js";
import { discoverAuthStorage, discoverModels } from "./pi-model-discovery.js";
import { buildAssistantMessageWithZeroUsage } from "./stream-message-shared.js";
export const LIVE_CACHE_TEST_ENABLED =
isLiveTestEnabled() && isTruthyEnvValue(process.env.OPENCLAW_LIVE_CACHE_TEST);
@@ -123,6 +124,22 @@ export function extractAssistantText(message: AssistantMessage): string {
.join(" ");
}
export function buildAssistantHistoryTurn(
text: string,
model?: Pick<Model<Api>, "api" | "provider" | "id">,
): AssistantMessage {
return buildAssistantMessageWithZeroUsage({
model: {
api: model?.api ?? "openai-responses",
provider: model?.provider ?? "openai",
id: model?.id ?? "test-model",
},
content: [{ type: "text", text }],
stopReason: "stop",
timestamp: Date.now(),
});
}
export function computeCacheHitRate(usage: {
input?: number;
cacheRead?: number;

View File

@@ -48,14 +48,14 @@ import {
planTurnInput,
} from "./openai-ws-message-conversion.js";
import { buildOpenAIWebSocketResponseCreatePayload } from "./openai-ws-request.js";
import { createBoundaryAwareStreamFnForModel } from "./provider-transport-stream.js";
import { log } from "./pi-embedded-runner/logger.js";
import { createBoundaryAwareStreamFnForModel } from "./provider-transport-stream.js";
import {
buildAssistantMessageWithZeroUsage,
buildStreamErrorAssistantMessage,
} from "./stream-message-shared.js";
import { mergeTransportMetadata } from "./transport-stream-shared.js";
import { stripSystemPromptCacheBoundary } from "./system-prompt-cache-boundary.js";
import { mergeTransportMetadata } from "./transport-stream-shared.js";
// ─────────────────────────────────────────────────────────────────────────────
// Per-session state
@@ -963,7 +963,7 @@ async function fallbackToHttp(
const httpStreamFn =
openAIWsStreamDeps.createHttpFallbackStreamFn(model as ProviderRuntimeModel) ??
openAIWsStreamDeps.streamSimple;
const httpStream = httpStreamFn(model, context, mergedOptions);
const httpStream = await httpStreamFn(model, context, mergedOptions);
for await (const event of httpStream) {
if (fallbackOptions?.suppressStart && event.type === "start") {
continue;

View File

@@ -6,6 +6,7 @@ import { Type } from "@sinclair/typebox";
import { afterAll, beforeAll, describe, expect, it } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import {
buildAssistantHistoryTurn as buildTypedAssistantHistoryTurn,
buildStableCachePrefix,
completeSimpleWithLiveTimeout,
computeCacheHitRate,
@@ -49,12 +50,11 @@ let liveRunnerRootDir: string | undefined;
type UserContent = Extract<Message, { role: "user" }>["content"];
function makeAssistantHistoryTurn(text: string): Message {
return {
role: "assistant",
content: [{ type: "text", text }],
timestamp: Date.now(),
};
function makeAssistantHistoryTurn(
text: string,
model?: Awaited<ReturnType<typeof resolveLiveDirectModel>>["model"],
): Message {
return buildTypedAssistantHistoryTurn(text, model);
}
function makeUserHistoryTurn(content: UserContent): Message {
@@ -348,9 +348,9 @@ async function runOpenAiToolCacheProbe(params: {
},
toolTurn.response,
buildToolResultMessage(toolTurn.toolCall.id, NOOP_TOOL.name, "ok"),
makeAssistantHistoryTurn("TOOL HISTORY ACKNOWLEDGED"),
makeAssistantHistoryTurn("TOOL HISTORY ACKNOWLEDGED", params.model),
makeUserHistoryTurn("Keep the tool output stable in history."),
makeAssistantHistoryTurn("TOOL HISTORY PRESERVED"),
makeAssistantHistoryTurn("TOOL HISTORY PRESERVED", params.model),
{
role: "user",
content: `Reply with exactly CACHE-OK ${params.suffix}.`,
@@ -432,9 +432,9 @@ async function runOpenAiImageCacheProbe(params: {
makeImageUserTurn(
"An image is attached. Ignore image semantics but keep the bytes in history.",
),
makeAssistantHistoryTurn("IMAGE HISTORY ACKNOWLEDGED"),
makeAssistantHistoryTurn("IMAGE HISTORY ACKNOWLEDGED", params.model),
makeUserHistoryTurn("Keep the earlier image turn stable in context."),
makeAssistantHistoryTurn("IMAGE HISTORY PRESERVED"),
makeAssistantHistoryTurn("IMAGE HISTORY PRESERVED", params.model),
makeUserHistoryTurn(`Reply with exactly CACHE-OK ${params.suffix}.`),
],
},
@@ -526,9 +526,9 @@ async function runAnthropicToolCacheProbe(params: {
},
toolTurn.response,
buildToolResultMessage(toolTurn.toolCall.id, NOOP_TOOL.name, "ok"),
makeAssistantHistoryTurn("TOOL HISTORY ACKNOWLEDGED"),
makeAssistantHistoryTurn("TOOL HISTORY ACKNOWLEDGED", params.model),
makeUserHistoryTurn("Keep the tool output stable in history."),
makeAssistantHistoryTurn("TOOL HISTORY PRESERVED"),
makeAssistantHistoryTurn("TOOL HISTORY PRESERVED", params.model),
{
role: "user",
content: `Reply with exactly CACHE-OK ${params.suffix}.`,
@@ -572,9 +572,9 @@ async function runAnthropicImageCacheProbe(params: {
makeImageUserTurn(
"An image is attached. Ignore image semantics but keep the bytes in history.",
),
makeAssistantHistoryTurn("IMAGE HISTORY ACKNOWLEDGED"),
makeAssistantHistoryTurn("IMAGE HISTORY ACKNOWLEDGED", params.model),
makeUserHistoryTurn("Keep the earlier image turn stable in context."),
makeAssistantHistoryTurn("IMAGE HISTORY PRESERVED"),
makeAssistantHistoryTurn("IMAGE HISTORY PRESERVED", params.model),
makeUserHistoryTurn(`Reply with exactly CACHE-OK ${params.suffix}.`),
],
},

View File

@@ -83,7 +83,8 @@ export const sessionAbortCompactionMock: Mock<(reason?: unknown) => void> = vi.f
export const createOpenClawCodingToolsMock = vi.fn(() => []);
export const resolveEmbeddedAgentStreamFnMock = vi.fn((_params?: unknown) => vi.fn());
export const applyExtraParamsToAgentMock = vi.fn(() => ({ effectiveExtraParams: {} }));
export const resolveAgentTransportOverrideMock = vi.fn(() => undefined);
export const resolveAgentTransportOverrideMock: Mock<(params?: unknown) => string | undefined> =
vi.fn(() => undefined);
export function resetCompactSessionStateMocks(): void {
sanitizeSessionHistoryMock.mockReset();

View File

@@ -2,6 +2,7 @@ import type { AssistantMessage, Tool } from "@mariozechner/pi-ai";
import { Type } from "@sinclair/typebox";
import { describe, expect, it } from "vitest";
import {
buildAssistantHistoryTurn,
buildStableCachePrefix,
completeSimpleWithLiveTimeout,
computeCacheHitRate,
@@ -122,21 +123,13 @@ async function runOpenAiMcpStyleCacheProbe(params: {
{ role: "user", content: toolTurn.prompt, timestamp: Date.now() },
toolTurn.response,
buildToolResultMessage(toolTurn.toolCall.id),
{
role: "assistant",
content: [{ type: "text", text: "MCP TOOL HISTORY ACKNOWLEDGED" }],
timestamp: Date.now(),
},
buildAssistantHistoryTurn("MCP TOOL HISTORY ACKNOWLEDGED", params.model),
{
role: "user",
content: "Keep the MCP tool output stable in history.",
timestamp: Date.now(),
},
{
role: "assistant",
content: [{ type: "text", text: "MCP TOOL HISTORY PRESERVED" }],
timestamp: Date.now(),
},
buildAssistantHistoryTurn("MCP TOOL HISTORY PRESERVED", params.model),
{
role: "user",
content: `Reply with exactly CACHE-OK ${params.suffix}.`,