fix(models): cache models.json readiness for embedded runs (#52077)

* fix(models): cache models.json readiness for embedded runs

* fix(models): harden readiness cache inputs
This commit is contained in:
Vincent Koc
2026-03-21 21:58:10 -07:00
committed by GitHub
parent 432e8943ad
commit 2b210703a3
4 changed files with 140 additions and 8 deletions

View File

@@ -93,6 +93,7 @@ Docs: https://docs.openclaw.ai
- Agents/Telegram: avoid rebuilding the full model catalog on ordinary inbound replies so Telegram message handling no longer pays multi-second core startup latency before reply generation. Thanks @vincentkoc.
- Agents/inbound: lazy-load media and link understanding for plain-text turns and cache synced auth stores by auth-file state so ordinary inbound replies avoid unnecessary startup churn. Thanks @vincentkoc.
- Telegram/polling: hard-timeout stuck `getUpdates` requests so wedged network paths fail over sooner instead of waiting for the polling stall watchdog. Thanks @vincentkoc.
- Agents/models: cache `models.json` readiness by config and auth-file state so embedded runner turns stop paying repeated model-catalog startup work before replies. Thanks @vincentkoc.
- Security/device pairing: harden `device.token.rotate` deny handling by keeping public failures generic while logging internal deny reasons and preserving approved-baseline enforcement. (`GHSA-7jrw-x62h-64p8`)
- Inbound policy hardening: tighten callback and webhook sender checks across Mattermost and Google Chat, match Nextcloud Talk rooms by stable room token, and treat explicit empty Twitch allowlists as deny-all. (#46787) Thanks @zpbrent, @ijxpwastaken and @vincentkoc.
- Webhooks/runtime: move auth earlier and tighten pre-auth body limits and timeouts across bundled webhook handlers, including slow-body handling for Mattermost slash commands. (#46802) Thanks @vincentkoc.

View File

@@ -1,16 +1,20 @@
import fs from "node:fs/promises";
import path from "node:path";
import { describe, expect, it } from "vitest";
import { afterEach, describe, expect, it } from "vitest";
import { resolveOpenClawAgentDir } from "./agent-paths.js";
import {
CUSTOM_PROXY_MODELS_CONFIG,
installModelsConfigTestHooks,
withModelsTempHome as withTempHome,
} from "./models-config.e2e-harness.js";
import { ensureOpenClawModelsJson } from "./models-config.js";
import { ensureOpenClawModelsJson, resetModelsJsonReadyCacheForTest } from "./models-config.js";
installModelsConfigTestHooks();
afterEach(() => {
resetModelsJsonReadyCacheForTest();
});
describe("models-config file mode", () => {
it("writes models.json with mode 0600", async () => {
if (process.platform === "win32") {

View File

@@ -1,4 +1,4 @@
import { describe, expect, it } from "vitest";
import { afterEach, describe, expect, it } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import {
clearConfigCache,
@@ -11,11 +11,15 @@ import {
installModelsConfigTestHooks,
withModelsTempHome as withTempHome,
} from "./models-config.e2e-harness.js";
import { ensureOpenClawModelsJson } from "./models-config.js";
import { ensureOpenClawModelsJson, resetModelsJsonReadyCacheForTest } from "./models-config.js";
import { readGeneratedModelsJson } from "./models-config.test-utils.js";
installModelsConfigTestHooks();
afterEach(() => {
resetModelsJsonReadyCacheForTest();
});
function createOpenAiApiKeySourceConfig(): OpenClawConfig {
return {
models: {
@@ -215,6 +219,55 @@ describe("models-config runtime source snapshot", () => {
});
});
it("invalidates cached readiness when projected config changes under the same runtime snapshot", async () => {
await withTempHome(async () => {
const sourceConfig = createOpenAiApiKeySourceConfig();
const runtimeConfig = createOpenAiApiKeyRuntimeConfig();
const firstCandidate: OpenClawConfig = {
...runtimeConfig,
models: {
providers: {
openai: {
...runtimeConfig.models!.providers!.openai,
baseUrl: "https://api.openai.com/v1",
},
},
},
};
const secondCandidate: OpenClawConfig = {
...runtimeConfig,
models: {
providers: {
openai: {
...runtimeConfig.models!.providers!.openai,
baseUrl: "https://mirror.example/v1",
},
},
},
};
try {
setRuntimeConfigSnapshot(runtimeConfig, sourceConfig);
await ensureOpenClawModelsJson(firstCandidate);
let parsed = await readGeneratedModelsJson<{
providers: Record<string, { baseUrl?: string; apiKey?: string }>;
}>();
expect(parsed.providers.openai?.baseUrl).toBe("https://api.openai.com/v1");
expect(parsed.providers.openai?.apiKey).toBe("OPENAI_API_KEY"); // pragma: allowlist secret
await ensureOpenClawModelsJson(secondCandidate);
parsed = await readGeneratedModelsJson<{
providers: Record<string, { baseUrl?: string; apiKey?: string }>;
}>();
expect(parsed.providers.openai?.baseUrl).toBe("https://mirror.example/v1");
expect(parsed.providers.openai?.apiKey).toBe("OPENAI_API_KEY"); // pragma: allowlist secret
} finally {
clearRuntimeConfigSnapshot();
clearConfigCache();
}
});
});
it("uses header markers from runtime source snapshot instead of resolved runtime values", async () => {
await withGeneratedModelsFromRuntimeSource(
{

View File

@@ -11,6 +11,53 @@ import { resolveOpenClawAgentDir } from "./agent-paths.js";
import { planOpenClawModelsJson } from "./models-config.plan.js";
const MODELS_JSON_WRITE_LOCKS = new Map<string, Promise<void>>();
const MODELS_JSON_READY_CACHE = new Map<
string,
Promise<{ fingerprint: string; result: { agentDir: string; wrote: boolean } }>
>();
async function readFileMtimeMs(pathname: string): Promise<number | null> {
try {
const stat = await fs.stat(pathname);
return Number.isFinite(stat.mtimeMs) ? stat.mtimeMs : null;
} catch {
return null;
}
}
function stableStringify(value: unknown): string {
if (value === null || typeof value !== "object") {
return JSON.stringify(value);
}
if (Array.isArray(value)) {
return `[${value.map((entry) => stableStringify(entry)).join(",")}]`;
}
const entries = Object.entries(value as Record<string, unknown>).toSorted(([a], [b]) =>
a.localeCompare(b),
);
return `{${entries
.map(([key, entry]) => `${JSON.stringify(key)}:${stableStringify(entry)}`)
.join(",")}}`;
}
async function buildModelsJsonFingerprint(params: {
config: OpenClawConfig;
sourceConfigForSecrets: OpenClawConfig;
agentDir: string;
}): Promise<string> {
const authProfilesMtimeMs = await readFileMtimeMs(
path.join(params.agentDir, "auth-profiles.json"),
);
const modelsFileMtimeMs = await readFileMtimeMs(path.join(params.agentDir, "models.json"));
const envShape = createConfigRuntimeEnv(params.config, {});
return stableStringify({
config: params.config,
sourceConfigForSecrets: params.sourceConfigForSecrets,
envShape,
authProfilesMtimeMs,
modelsFileMtimeMs,
});
}
async function readExistingModelsFile(pathname: string): Promise<{
raw: string;
@@ -96,8 +143,21 @@ export async function ensureOpenClawModelsJson(
const cfg = resolved.config;
const agentDir = agentDirOverride?.trim() ? agentDirOverride.trim() : resolveOpenClawAgentDir();
const targetPath = path.join(agentDir, "models.json");
const fingerprint = await buildModelsJsonFingerprint({
config: cfg,
sourceConfigForSecrets: resolved.sourceConfigForSecrets,
agentDir,
});
const cached = MODELS_JSON_READY_CACHE.get(targetPath);
if (cached) {
const settled = await cached;
if (settled.fingerprint === fingerprint) {
await ensureModelsFileMode(targetPath);
return settled.result;
}
}
return await withModelsJsonWriteLock(targetPath, async () => {
const pending = withModelsJsonWriteLock(targetPath, async () => {
// Ensure config env vars (e.g. AWS_PROFILE, AWS_ACCESS_KEY_ID) are
// are available to provider discovery without mutating process.env.
const env = createConfigRuntimeEnv(cfg);
@@ -112,17 +172,31 @@ export async function ensureOpenClawModelsJson(
});
if (plan.action === "skip") {
return { agentDir, wrote: false };
return { fingerprint, result: { agentDir, wrote: false } };
}
if (plan.action === "noop") {
await ensureModelsFileMode(targetPath);
return { agentDir, wrote: false };
return { fingerprint, result: { agentDir, wrote: false } };
}
await fs.mkdir(agentDir, { recursive: true, mode: 0o700 });
await writeModelsFileAtomic(targetPath, plan.contents);
await ensureModelsFileMode(targetPath);
return { agentDir, wrote: true };
return { fingerprint, result: { agentDir, wrote: true } };
});
MODELS_JSON_READY_CACHE.set(targetPath, pending);
try {
const settled = await pending;
return settled.result;
} catch (error) {
if (MODELS_JSON_READY_CACHE.get(targetPath) === pending) {
MODELS_JSON_READY_CACHE.delete(targetPath);
}
throw error;
}
}
export function resetModelsJsonReadyCacheForTest(): void {
MODELS_JSON_READY_CACHE.clear();
}