mirror of
https://github.com/moltbot/moltbot.git
synced 2026-05-06 15:18:58 +00:00
fix(litellm): honor noninteractive custom base url
This commit is contained in:
@@ -80,6 +80,7 @@ Docs: https://docs.openclaw.ai
|
||||
- Plugins/registry: suppress duplicate-plugin startup warnings when a tracked npm-installed plugin intentionally overrides the bundled plugin with the same id. Carries forward #48673. Thanks @abdushsk.
|
||||
- Plugins/startup: reuse canonical realpath lookups throughout each plugin discovery pass, including package and manifest boundary checks, so Windows npm-global startups no longer repeat expensive path resolution for the same plugin roots. Fixes #65733. Thanks @welfo-beo.
|
||||
- Gateway/proxy: pass `ALL_PROXY` / `all_proxy` into the global Undici env-proxy dispatcher and provider proxy-fetch helper while keeping SSRF trusted-proxy auto-upgrade on `HTTP_PROXY` / `HTTPS_PROXY` only, so gateway/provider calls honor all-proxy setups without weakening guarded fetches. Fixes #43821; carries forward #43919. Thanks @RickyTong1.
|
||||
- Providers/LiteLLM: honor `--custom-base-url` during non-interactive API-key onboarding without adding proxy discovery side effects, so scripted remote LiteLLM setup keeps the requested endpoint instead of falling back to localhost. Carries forward #66160. Thanks @dongs0104.
|
||||
- Reply/link understanding: keep media and link preprocessing on stable runtime entrypoints and continue with raw message content if optional enrichment fails, so URL-bearing messages are no longer dropped after stale runtime chunk upgrades. Fixes #68466. Thanks @songshikang0111.
|
||||
- Discord: persist routed model-picker overrides when the hidden `/model` dispatch succeeds but the bound thread session store is still stale, including LM Studio suffixed model ids. Carries forward #61473. Thanks @Nanako0129.
|
||||
- Nodes/CLI: add `openclaw nodes remove --node <id|name|ip>` and `node.pair.remove` so stale gateway-owned node pairing records can be cleaned without hand-editing state files.
|
||||
|
||||
@@ -30,6 +30,12 @@ read_when:
|
||||
```bash
|
||||
openclaw onboard --auth-choice litellm-api-key
|
||||
```
|
||||
|
||||
For non-interactive setup against a remote proxy, pass the proxy URL explicitly:
|
||||
|
||||
```bash
|
||||
openclaw onboard --non-interactive --auth-choice litellm-api-key --litellm-api-key "$LITELLM_API_KEY" --custom-base-url "https://litellm.example/v1"
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
|
||||
73
extensions/litellm/index.test.ts
Normal file
73
extensions/litellm/index.test.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { mkdtempSync, rmSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { capturePluginRegistration } from "openclaw/plugin-sdk/testing";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import plugin from "./index.js";
|
||||
|
||||
function registerProvider() {
|
||||
const captured = capturePluginRegistration(plugin);
|
||||
const provider = captured.providers[0];
|
||||
expect(provider?.id).toBe("litellm");
|
||||
return provider;
|
||||
}
|
||||
|
||||
describe("litellm plugin", () => {
|
||||
it("honors --custom-base-url in non-interactive API-key setup", async () => {
|
||||
const provider = registerProvider();
|
||||
const auth = provider?.auth?.[0];
|
||||
const agentDir = mkdtempSync(join(tmpdir(), "openclaw-litellm-auth-"));
|
||||
const resolveApiKey = vi.fn(async () => ({ key: "litellm-test-key", source: "flag" as const }));
|
||||
const toApiKeyCredential = vi.fn(({ provider: providerId, resolved }) => ({
|
||||
type: "api_key" as const,
|
||||
provider: providerId,
|
||||
key: resolved.key,
|
||||
}));
|
||||
|
||||
try {
|
||||
const result = await auth?.runNonInteractive?.({
|
||||
authChoice: "litellm-api-key",
|
||||
config: {},
|
||||
baseConfig: {},
|
||||
opts: {
|
||||
litellmApiKey: "litellm-test-key",
|
||||
customBaseUrl: "https://litellm.example/v1/",
|
||||
},
|
||||
runtime: {
|
||||
error: vi.fn(),
|
||||
exit: vi.fn(),
|
||||
log: vi.fn(),
|
||||
} as never,
|
||||
agentDir,
|
||||
resolveApiKey,
|
||||
toApiKeyCredential,
|
||||
} as never);
|
||||
|
||||
expect(result?.models?.providers?.litellm?.baseUrl).toBe("https://litellm.example/v1");
|
||||
expect(result?.models?.providers?.litellm?.api).toBe("openai-completions");
|
||||
expect(result?.auth?.profiles?.["litellm:default"]).toEqual({
|
||||
provider: "litellm",
|
||||
mode: "api_key",
|
||||
});
|
||||
expect(result?.agents?.defaults?.model).toMatchObject({
|
||||
primary: "litellm/claude-opus-4-6",
|
||||
});
|
||||
expect(resolveApiKey).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: "litellm",
|
||||
flagValue: "litellm-test-key",
|
||||
flagName: "--litellm-api-key",
|
||||
envVar: "LITELLM_API_KEY",
|
||||
}),
|
||||
);
|
||||
expect(toApiKeyCredential).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: "litellm",
|
||||
resolved: { key: "litellm-test-key", source: "flag" },
|
||||
}),
|
||||
);
|
||||
} finally {
|
||||
rmSync(agentDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1,45 +1,108 @@
|
||||
import { defineSingleProviderPluginEntry } from "openclaw/plugin-sdk/provider-entry";
|
||||
import type { OpenClawConfig } from "openclaw/plugin-sdk/config-types";
|
||||
import {
|
||||
definePluginEntry,
|
||||
type OpenClawPluginApi,
|
||||
type ProviderAuthMethodNonInteractiveContext,
|
||||
} from "openclaw/plugin-sdk/plugin-entry";
|
||||
import {
|
||||
createProviderApiKeyAuthMethod,
|
||||
normalizeOptionalSecretInput,
|
||||
} from "openclaw/plugin-sdk/provider-auth";
|
||||
import { buildSingleProviderApiKeyCatalog } from "openclaw/plugin-sdk/provider-catalog-shared";
|
||||
import { buildLitellmImageGenerationProvider } from "./image-generation-provider.js";
|
||||
import { applyLitellmConfig, LITELLM_DEFAULT_MODEL_REF } from "./onboard.js";
|
||||
import { buildLitellmProvider } from "./provider-catalog.js";
|
||||
|
||||
const PROVIDER_ID = "litellm";
|
||||
|
||||
export default defineSingleProviderPluginEntry({
|
||||
function applyCustomBaseUrlForNonInteractiveSetup(
|
||||
cfg: OpenClawConfig,
|
||||
customBaseUrl: unknown,
|
||||
): OpenClawConfig {
|
||||
const baseUrl = normalizeOptionalSecretInput(customBaseUrl)?.replace(/\/+$/, "");
|
||||
if (!baseUrl) {
|
||||
return cfg;
|
||||
}
|
||||
const existingProvider = cfg.models?.providers?.[PROVIDER_ID];
|
||||
return {
|
||||
...cfg,
|
||||
models: {
|
||||
...cfg.models,
|
||||
providers: {
|
||||
...cfg.models?.providers,
|
||||
[PROVIDER_ID]: {
|
||||
...existingProvider,
|
||||
baseUrl,
|
||||
models: existingProvider?.models ?? [],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export default definePluginEntry({
|
||||
id: PROVIDER_ID,
|
||||
name: "LiteLLM Provider",
|
||||
description: "Bundled LiteLLM provider plugin",
|
||||
provider: {
|
||||
label: "LiteLLM",
|
||||
docsPath: "/providers/litellm",
|
||||
auth: [
|
||||
{
|
||||
register(api: OpenClawPluginApi) {
|
||||
const apiKeyAuth = createProviderApiKeyAuthMethod({
|
||||
providerId: PROVIDER_ID,
|
||||
methodId: "api-key",
|
||||
label: "LiteLLM API key",
|
||||
hint: "Unified gateway for 100+ LLM providers",
|
||||
optionKey: "litellmApiKey",
|
||||
flagName: "--litellm-api-key",
|
||||
envVar: "LITELLM_API_KEY",
|
||||
promptMessage: "Enter LiteLLM API key",
|
||||
defaultModel: LITELLM_DEFAULT_MODEL_REF,
|
||||
applyConfig: (cfg) => applyLitellmConfig(cfg),
|
||||
noteTitle: "LiteLLM",
|
||||
noteMessage: [
|
||||
"LiteLLM provides a unified API to 100+ LLM providers.",
|
||||
"Get your API key from your LiteLLM proxy or https://litellm.ai",
|
||||
"Default proxy runs on http://localhost:4000",
|
||||
].join("\n"),
|
||||
wizard: {
|
||||
choiceId: `${PROVIDER_ID}-api-key`,
|
||||
choiceLabel: "LiteLLM API key",
|
||||
groupId: PROVIDER_ID,
|
||||
groupLabel: "LiteLLM",
|
||||
groupHint: "Unified LLM gateway (100+ providers)",
|
||||
methodId: "api-key",
|
||||
label: "LiteLLM API key",
|
||||
hint: "Unified gateway for 100+ LLM providers",
|
||||
optionKey: "litellmApiKey",
|
||||
flagName: "--litellm-api-key",
|
||||
envVar: "LITELLM_API_KEY",
|
||||
promptMessage: "Enter LiteLLM API key",
|
||||
defaultModel: LITELLM_DEFAULT_MODEL_REF,
|
||||
applyConfig: (cfg) => applyLitellmConfig(cfg),
|
||||
noteTitle: "LiteLLM",
|
||||
noteMessage: [
|
||||
"LiteLLM provides a unified API to 100+ LLM providers.",
|
||||
"Get your API key from your LiteLLM proxy or https://litellm.ai",
|
||||
"Default proxy runs on http://localhost:4000",
|
||||
].join("\n"),
|
||||
wizard: {
|
||||
groupHint: "Unified LLM gateway (100+ providers)",
|
||||
},
|
||||
},
|
||||
],
|
||||
catalog: {
|
||||
buildProvider: buildLitellmProvider,
|
||||
allowExplicitBaseUrl: true,
|
||||
},
|
||||
},
|
||||
register(api) {
|
||||
});
|
||||
|
||||
api.registerProvider({
|
||||
id: PROVIDER_ID,
|
||||
label: "LiteLLM",
|
||||
docsPath: "/providers/litellm",
|
||||
envVars: ["LITELLM_API_KEY"],
|
||||
auth: [
|
||||
{
|
||||
...apiKeyAuth,
|
||||
runNonInteractive: async (ctx: ProviderAuthMethodNonInteractiveContext) => {
|
||||
const runNonInteractive = apiKeyAuth.runNonInteractive;
|
||||
if (!runNonInteractive) {
|
||||
return null;
|
||||
}
|
||||
return await runNonInteractive({
|
||||
...ctx,
|
||||
config: applyCustomBaseUrlForNonInteractiveSetup(ctx.config, ctx.opts.customBaseUrl),
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
catalog: {
|
||||
order: "simple",
|
||||
run: (ctx) =>
|
||||
buildSingleProviderApiKeyCatalog({
|
||||
ctx,
|
||||
providerId: PROVIDER_ID,
|
||||
buildProvider: buildLitellmProvider,
|
||||
allowExplicitBaseUrl: true,
|
||||
}),
|
||||
},
|
||||
});
|
||||
api.registerImageGenerationProvider(buildLitellmImageGenerationProvider());
|
||||
},
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user