Gateway: honor message-channel header for chat completions

This commit is contained in:
bmendonca3
2026-02-28 10:56:38 -07:00
committed by Peter Steinberger
parent caae34cbaf
commit 842a79cf99
2 changed files with 50 additions and 2 deletions

View File

@@ -0,0 +1,43 @@
import { describe, expect, it } from "vitest";
import { agentCommand, installGatewayTestHooks, withGatewayServer } from "./test-helpers.js";
installGatewayTestHooks({ scope: "test" });
describe("OpenAI HTTP message channel", () => {
it("passes x-openclaw-message-channel through to agentCommand", async () => {
agentCommand.mockReset();
agentCommand.mockResolvedValueOnce({ payloads: [{ text: "ok" }] } as never);
await withGatewayServer(
async ({ port }) => {
const res = await fetch(`http://127.0.0.1:${port}/v1/chat/completions`, {
method: "POST",
headers: {
"content-type": "application/json",
authorization: "Bearer secret",
"x-openclaw-message-channel": "custom-client-channel",
},
body: JSON.stringify({
model: "openclaw",
messages: [{ role: "user", content: "hi" }],
}),
});
expect(res.status).toBe(200);
const firstCall = (agentCommand.mock.calls[0] as unknown[] | undefined)?.[0] as
| { messageChannel?: string }
| undefined;
expect(firstCall?.messageChannel).toBe("custom-client-channel");
await res.text();
},
{
serverOptions: {
host: "127.0.0.1",
auth: { mode: "token", token: "secret" },
controlUiEnabled: false,
openAiChatCompletionsEnabled: true,
},
},
);
});
});

View File

@@ -5,6 +5,7 @@ import { agentCommand } from "../commands/agent.js";
import { emitAgentEvent, onAgentEvent } from "../infra/agent-events.js";
import { logWarn } from "../logger.js";
import { defaultRuntime } from "../runtime.js";
import { normalizeMessageChannel } from "../utils/message-channel.js";
import { resolveAssistantStreamDeltaText } from "./agent-event-assistant-text.js";
import {
buildAgentMessageFromConversationEntries,
@@ -14,7 +15,7 @@ import type { AuthRateLimiter } from "./auth-rate-limit.js";
import type { ResolvedGatewayAuth } from "./auth.js";
import { sendJson, setSseHeaders, writeDone } from "./http-common.js";
import { handleGatewayPostJsonEndpoint } from "./http-endpoint-helpers.js";
import { resolveAgentIdForRequest, resolveSessionKey } from "./http-utils.js";
import { getHeader, resolveAgentIdForRequest, resolveSessionKey } from "./http-utils.js";
type OpenAiHttpOptions = {
auth: ResolvedGatewayAuth;
@@ -45,6 +46,7 @@ function buildAgentCommandInput(params: {
prompt: { message: string; extraSystemPrompt?: string };
sessionKey: string;
runId: string;
messageChannel: string;
}) {
return {
message: params.prompt.message,
@@ -52,7 +54,7 @@ function buildAgentCommandInput(params: {
sessionKey: params.sessionKey,
runId: params.runId,
deliver: false as const,
messageChannel: "webchat" as const,
messageChannel: params.messageChannel,
bestEffortDeliver: false as const,
};
}
@@ -226,6 +228,8 @@ export async function handleOpenAiHttpRequest(
const agentId = resolveAgentIdForRequest({ req, model });
const sessionKey = resolveOpenAiSessionKey({ req, agentId, user });
const messageChannel =
normalizeMessageChannel(getHeader(req, "x-openclaw-message-channel") ?? "") ?? "webchat";
const prompt = buildAgentPrompt(payload.messages);
if (!prompt.message) {
sendJson(res, 400, {
@@ -243,6 +247,7 @@ export async function handleOpenAiHttpRequest(
prompt,
sessionKey,
runId,
messageChannel,
});
if (!stream) {