mirror of
https://github.com/moltbot/moltbot.git
synced 2026-04-26 16:06:16 +00:00
fix(agents): allow configured ollama endpoints without dummy api keys
This commit is contained in:
@@ -226,6 +226,62 @@ describe("getApiKeyForModel", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("resolves synthetic local auth key for configured ollama provider without apiKey", async () => {
|
||||
await withEnvAsync({ OLLAMA_API_KEY: undefined }, async () => {
|
||||
const resolved = await resolveApiKeyForProvider({
|
||||
provider: "ollama",
|
||||
store: { version: 1, profiles: {} },
|
||||
cfg: {
|
||||
models: {
|
||||
providers: {
|
||||
ollama: {
|
||||
baseUrl: "http://gpu-node-server:11434",
|
||||
api: "openai-completions",
|
||||
models: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(resolved.apiKey).toBe("ollama-local");
|
||||
expect(resolved.mode).toBe("api-key");
|
||||
expect(resolved.source).toContain("synthetic local key");
|
||||
});
|
||||
});
|
||||
|
||||
it("prefers explicit OLLAMA_API_KEY over synthetic local key", async () => {
|
||||
await withEnvAsync({ OLLAMA_API_KEY: "env-ollama-key" }, async () => {
|
||||
const resolved = await resolveApiKeyForProvider({
|
||||
provider: "ollama",
|
||||
store: { version: 1, profiles: {} },
|
||||
cfg: {
|
||||
models: {
|
||||
providers: {
|
||||
ollama: {
|
||||
baseUrl: "http://gpu-node-server:11434",
|
||||
api: "openai-completions",
|
||||
models: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(resolved.apiKey).toBe("env-ollama-key");
|
||||
expect(resolved.source).toContain("OLLAMA_API_KEY");
|
||||
});
|
||||
});
|
||||
|
||||
it("still throws for ollama when no env/profile/config provider is available", async () => {
|
||||
await withEnvAsync({ OLLAMA_API_KEY: undefined }, async () => {
|
||||
await expect(
|
||||
resolveApiKeyForProvider({
|
||||
provider: "ollama",
|
||||
store: { version: 1, profiles: {} },
|
||||
}),
|
||||
).rejects.toThrow('No API key found for provider "ollama".');
|
||||
});
|
||||
});
|
||||
|
||||
it("resolves Vercel AI Gateway API key from env", async () => {
|
||||
await withEnvAsync({ AI_GATEWAY_API_KEY: "gateway-test-key" }, async () => {
|
||||
const resolved = await resolveApiKeyForProvider({
|
||||
|
||||
@@ -67,6 +67,35 @@ function resolveProviderAuthOverride(
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function resolveSyntheticLocalProviderAuth(params: {
|
||||
cfg: OpenClawConfig | undefined;
|
||||
provider: string;
|
||||
}): ResolvedProviderAuth | null {
|
||||
const normalizedProvider = normalizeProviderId(params.provider);
|
||||
if (normalizedProvider !== "ollama") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const providerConfig = resolveProviderConfig(params.cfg, params.provider);
|
||||
if (!providerConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const hasApiConfig =
|
||||
Boolean(providerConfig.api?.trim()) ||
|
||||
Boolean(providerConfig.baseUrl?.trim()) ||
|
||||
(Array.isArray(providerConfig.models) && providerConfig.models.length > 0);
|
||||
if (!hasApiConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
apiKey: "ollama-local",
|
||||
source: "models.providers.ollama (synthetic local key)",
|
||||
mode: "api-key",
|
||||
};
|
||||
}
|
||||
|
||||
function resolveEnvSourceLabel(params: {
|
||||
applied: Set<string>;
|
||||
envVars: string[];
|
||||
@@ -207,6 +236,11 @@ export async function resolveApiKeyForProvider(params: {
|
||||
return { apiKey: customKey, source: "models.json", mode: "api-key" };
|
||||
}
|
||||
|
||||
const syntheticLocalAuth = resolveSyntheticLocalProviderAuth({ cfg, provider });
|
||||
if (syntheticLocalAuth) {
|
||||
return syntheticLocalAuth;
|
||||
}
|
||||
|
||||
const normalized = normalizeProviderId(provider);
|
||||
if (authOverride === undefined && normalized === "amazon-bedrock") {
|
||||
return resolveAwsSdkAuthInfo();
|
||||
|
||||
Reference in New Issue
Block a user