fix(thinking): default Claude 4.6 to adaptive

This commit is contained in:
Peter Steinberger
2026-03-02 04:27:26 +00:00
parent 4691aab019
commit 37d036714e
4 changed files with 103 additions and 1 deletions

View File

@@ -94,6 +94,7 @@ Docs: https://docs.openclaw.ai
- Cron/Delivery mode none: send explicit `delivery: { mode: "none" }` from cron editor for both add and update flows so previous announce delivery is actually cleared. Landed from contributor PR #31145 by @byungsker. Thanks @byungsker.
- Cron editor viewport: make the sticky cron edit form independently scrollable with viewport-bounded height so lower fields/actions are reachable on shorter screens. Landed from contributor PR #31133 by @Sid-Qin. Thanks @Sid-Qin.
- Agents/Thinking fallback: when providers reject unsupported thinking levels without enumerating alternatives, retry with `think=off` to avoid hard failure during model/provider fallback chains. Landed from contributor PR #31002 by @yfge. Thanks @yfge.
- Agents/Thinking defaults: set `adaptive` as the default thinking level for Anthropic Claude 4.6 models (including Bedrock Claude 4.6 refs) while keeping other reasoning-capable models at `low` unless explicitly configured.
- Ollama/Embedded runner base URL precedence: prioritize configured provider `baseUrl` over model defaults for embedded Ollama runs so Docker and remote-host setups avoid localhost fetch failures. (#30964) Thanks @stakeswky.
- Agents/Failover reason classification: avoid false rate-limit classification from incidental `tpm` substrings by matching TPM as a standalone token/phrase and keeping auth-context errors on the auth path. Landed from contributor PR #31007 by @HOYALIM. Thanks @HOYALIM.
- CLI/Cron: clarify `cron list` output by renaming `Agent` to `Agent ID` and adding a `Model` column for isolated agent-turn jobs. (#26259) Thanks @openperf.

View File

@@ -503,6 +503,72 @@ describe("model-selection", () => {
}),
).toBe("high");
});
it("accepts per-model params.thinking=adaptive", () => {
const cfg = {
agents: {
defaults: {
models: {
"anthropic/claude-opus-4-6": {
params: { thinking: "adaptive" },
},
},
},
},
} as OpenClawConfig;
expect(
resolveThinkingDefault({
cfg,
provider: "anthropic",
model: "claude-opus-4-6",
catalog: [
{
provider: "anthropic",
id: "claude-opus-4-6",
name: "Claude Opus 4.6",
reasoning: true,
},
],
}),
).toBe("adaptive");
});
it("defaults Anthropic Claude 4.6 models to adaptive", () => {
const cfg = {} as OpenClawConfig;
expect(
resolveThinkingDefault({
cfg,
provider: "anthropic",
model: "claude-opus-4-6",
catalog: [
{
provider: "anthropic",
id: "claude-opus-4-6",
name: "Claude Opus 4.6",
reasoning: true,
},
],
}),
).toBe("adaptive");
expect(
resolveThinkingDefault({
cfg,
provider: "amazon-bedrock",
model: "us.anthropic.claude-sonnet-4-6-v1:0",
catalog: [
{
provider: "amazon-bedrock",
id: "us.anthropic.claude-sonnet-4-6-v1:0",
name: "Claude Sonnet 4.6",
reasoning: true,
},
],
}),
).toBe("adaptive");
});
});
});

View File

@@ -27,6 +27,7 @@ const ANTHROPIC_MODEL_ALIASES: Record<string, string> = {
"sonnet-4.6": "claude-sonnet-4-6",
"sonnet-4.5": "claude-sonnet-4-5",
};
const CLAUDE_46_MODEL_RE = /claude-(?:opus|sonnet)-4(?:\.|-)6(?:$|[-.])/i;
function normalizeAliasKey(value: string): string {
return value.trim().toLowerCase();
@@ -525,6 +526,8 @@ export function resolveThinkingDefault(params: {
model: string;
catalog?: ModelCatalogEntry[];
}): ThinkLevel {
const normalizedProvider = normalizeProviderId(params.provider);
const modelLower = params.model.toLowerCase();
const perModelThinking =
params.cfg.agents?.defaults?.models?.[modelKey(params.provider, params.model)]?.params
?.thinking;
@@ -534,7 +537,8 @@ export function resolveThinkingDefault(params: {
perModelThinking === "low" ||
perModelThinking === "medium" ||
perModelThinking === "high" ||
perModelThinking === "xhigh"
perModelThinking === "xhigh" ||
perModelThinking === "adaptive"
) {
return perModelThinking;
}
@@ -542,6 +546,14 @@ export function resolveThinkingDefault(params: {
if (configured) {
return configured;
}
const isAnthropicFamilyModel =
normalizedProvider === "anthropic" ||
normalizedProvider === "amazon-bedrock" ||
modelLower.includes("anthropic/") ||
modelLower.includes(".anthropic.");
if (isAnthropicFamilyModel && CLAUDE_46_MODEL_RE.test(modelLower)) {
return "adaptive";
}
const candidate = params.catalog?.find(
(entry) => entry.provider === params.provider && entry.id === params.model,
);

View File

@@ -734,6 +734,29 @@ describe("agentCommand", () => {
});
});
it("defaults thinking to adaptive for Anthropic Claude 4.6 models", async () => {
await withTempHome(async (home) => {
const store = path.join(home, "sessions.json");
mockConfig(home, store, {
model: { primary: "anthropic/claude-opus-4-6" },
models: { "anthropic/claude-opus-4-6": {} },
});
vi.mocked(loadModelCatalog).mockResolvedValueOnce([
{
id: "claude-opus-4-6",
name: "Opus 4.6",
provider: "anthropic",
reasoning: true,
},
]);
await agentCommand({ message: "hi", to: "+1555" }, runtime);
const callArgs = vi.mocked(runEmbeddedPiAgent).mock.calls.at(-1)?.[0];
expect(callArgs?.thinkLevel).toBe("adaptive");
});
});
it("prefers per-model thinking over global thinkingDefault", async () => {
await withTempHome(async (home) => {
const store = path.join(home, "sessions.json");