From c08f2aa21a6e336b303355297b8bbce08c0fa4a3 Mon Sep 17 00:00:00 2001 From: Gustavo Madeira Santana Date: Mon, 16 Mar 2026 12:04:41 +0000 Subject: [PATCH] Providers: centralize setup defaults and helper boundaries --- src/agents/auth-profiles/upsert-with-lock.ts | 56 ++++++++++++++ .../models-config.providers.discovery.ts | 34 ++++----- src/agents/ollama-models.ts | 3 +- src/agents/ollama-stream.ts | 3 +- src/agents/self-hosted-provider-defaults.ts | 8 ++ src/cli/config-cli.ts | 2 +- src/commands/auth-profile-config.ts | 73 +++++++++++++++++++ src/commands/ollama-setup.ts | 6 +- src/commands/onboard-auth.config-core.ts | 73 +------------------ src/commands/onboard-custom.test.ts | 2 +- src/commands/onboard-custom.ts | 2 +- src/commands/self-hosted-provider-setup.ts | 22 +++--- src/commands/vllm-setup.ts | 16 ++-- 13 files changed, 184 insertions(+), 116 deletions(-) create mode 100644 src/agents/auth-profiles/upsert-with-lock.ts create mode 100644 src/agents/self-hosted-provider-defaults.ts create mode 100644 src/commands/auth-profile-config.ts diff --git a/src/agents/auth-profiles/upsert-with-lock.ts b/src/agents/auth-profiles/upsert-with-lock.ts new file mode 100644 index 00000000000..965798da940 --- /dev/null +++ b/src/agents/auth-profiles/upsert-with-lock.ts @@ -0,0 +1,56 @@ +import { withFileLock } from "../../infra/file-lock.js"; +import { loadJsonFile, saveJsonFile } from "../../infra/json-file.js"; +import { AUTH_STORE_LOCK_OPTIONS, AUTH_STORE_VERSION } from "./constants.js"; +import { ensureAuthStoreFile, resolveAuthStorePath } from "./paths.js"; +import type { AuthProfileCredential, AuthProfileStore, ProfileUsageStats } from "./types.js"; + +function coerceAuthProfileStore(raw: unknown): AuthProfileStore { + const record = raw && typeof raw === "object" ? (raw as Record) : {}; + const profiles = + record.profiles && typeof record.profiles === "object" && !Array.isArray(record.profiles) + ? { ...(record.profiles as Record) } + : {}; + const order = + record.order && typeof record.order === "object" && !Array.isArray(record.order) + ? (record.order as Record) + : undefined; + const lastGood = + record.lastGood && typeof record.lastGood === "object" && !Array.isArray(record.lastGood) + ? (record.lastGood as Record) + : undefined; + const usageStats = + record.usageStats && typeof record.usageStats === "object" && !Array.isArray(record.usageStats) + ? (record.usageStats as Record) + : undefined; + + return { + version: + typeof record.version === "number" && Number.isFinite(record.version) + ? record.version + : AUTH_STORE_VERSION, + profiles, + ...(order ? { order } : {}), + ...(lastGood ? { lastGood } : {}), + ...(usageStats ? { usageStats } : {}), + }; +} + +export async function upsertAuthProfileWithLock(params: { + profileId: string; + credential: AuthProfileCredential; + agentDir?: string; +}): Promise { + const authPath = resolveAuthStorePath(params.agentDir); + ensureAuthStoreFile(authPath); + + try { + return await withFileLock(authPath, AUTH_STORE_LOCK_OPTIONS, async () => { + const store = coerceAuthProfileStore(loadJsonFile(authPath)); + store.profiles[params.profileId] = params.credential; + saveJsonFile(authPath, store); + return store; + }); + } catch { + return null; + } +} diff --git a/src/agents/models-config.providers.discovery.ts b/src/agents/models-config.providers.discovery.ts index a6d99afa89f..01dfb28e469 100644 --- a/src/agents/models-config.providers.discovery.ts +++ b/src/agents/models-config.providers.discovery.ts @@ -18,8 +18,15 @@ import { resolveOllamaApiBase, type OllamaTagsResponse, } from "./ollama-models.js"; +import { + SELF_HOSTED_DEFAULT_CONTEXT_WINDOW, + SELF_HOSTED_DEFAULT_COST, + SELF_HOSTED_DEFAULT_MAX_TOKENS, +} from "./self-hosted-provider-defaults.js"; +import { SGLANG_DEFAULT_BASE_URL, SGLANG_PROVIDER_LABEL } from "./sglang-defaults.js"; import { discoverVeniceModels, VENICE_BASE_URL } from "./venice-models.js"; import { discoverVercelAiGatewayModels, VERCEL_AI_GATEWAY_BASE_URL } from "./vercel-ai-gateway.js"; +import { VLLM_DEFAULT_BASE_URL, VLLM_PROVIDER_LABEL } from "./vllm-defaults.js"; export { resolveOllamaApiBase } from "./ollama-models.js"; @@ -31,19 +38,6 @@ const log = createSubsystemLogger("agents/model-providers"); const OLLAMA_SHOW_CONCURRENCY = 8; const OLLAMA_SHOW_MAX_MODELS = 200; -const OPENAI_COMPAT_LOCAL_DEFAULT_CONTEXT_WINDOW = 128000; -const OPENAI_COMPAT_LOCAL_DEFAULT_MAX_TOKENS = 8192; -const OPENAI_COMPAT_LOCAL_DEFAULT_COST = { - input: 0, - output: 0, - cacheRead: 0, - cacheWrite: 0, -}; - -const SGLANG_BASE_URL = "http://127.0.0.1:30000/v1"; - -const VLLM_BASE_URL = "http://127.0.0.1:8000/v1"; - type OpenAICompatModelsResponse = { data?: Array<{ id?: string; @@ -140,9 +134,9 @@ async function discoverOpenAICompatibleLocalModels(params: { name: modelId, reasoning: isReasoningModelHeuristic(modelId), input: ["text"], - cost: OPENAI_COMPAT_LOCAL_DEFAULT_COST, - contextWindow: params.contextWindow ?? OPENAI_COMPAT_LOCAL_DEFAULT_CONTEXT_WINDOW, - maxTokens: params.maxTokens ?? OPENAI_COMPAT_LOCAL_DEFAULT_MAX_TOKENS, + cost: SELF_HOSTED_DEFAULT_COST, + contextWindow: params.contextWindow ?? SELF_HOSTED_DEFAULT_CONTEXT_WINDOW, + maxTokens: params.maxTokens ?? SELF_HOSTED_DEFAULT_MAX_TOKENS, } satisfies ModelDefinitionConfig; }); } catch (error) { @@ -197,11 +191,11 @@ export async function buildVllmProvider(params?: { baseUrl?: string; apiKey?: string; }): Promise { - const baseUrl = (params?.baseUrl?.trim() || VLLM_BASE_URL).replace(/\/+$/, ""); + const baseUrl = (params?.baseUrl?.trim() || VLLM_DEFAULT_BASE_URL).replace(/\/+$/, ""); const models = await discoverOpenAICompatibleLocalModels({ baseUrl, apiKey: params?.apiKey, - label: "vLLM", + label: VLLM_PROVIDER_LABEL, }); return { baseUrl, @@ -214,11 +208,11 @@ export async function buildSglangProvider(params?: { baseUrl?: string; apiKey?: string; }): Promise { - const baseUrl = (params?.baseUrl?.trim() || SGLANG_BASE_URL).replace(/\/+$/, ""); + const baseUrl = (params?.baseUrl?.trim() || SGLANG_DEFAULT_BASE_URL).replace(/\/+$/, ""); const models = await discoverOpenAICompatibleLocalModels({ baseUrl, apiKey: params?.apiKey, - label: "SGLang", + label: SGLANG_PROVIDER_LABEL, }); return { baseUrl, diff --git a/src/agents/ollama-models.ts b/src/agents/ollama-models.ts index 20406b3a80e..ee0fcfde447 100644 --- a/src/agents/ollama-models.ts +++ b/src/agents/ollama-models.ts @@ -1,7 +1,6 @@ import type { ModelDefinitionConfig } from "../config/types.models.js"; -import { OLLAMA_NATIVE_BASE_URL } from "./ollama-stream.js"; +import { OLLAMA_DEFAULT_BASE_URL } from "./ollama-defaults.js"; -export const OLLAMA_DEFAULT_BASE_URL = OLLAMA_NATIVE_BASE_URL; export const OLLAMA_DEFAULT_CONTEXT_WINDOW = 128000; export const OLLAMA_DEFAULT_MAX_TOKENS = 8192; export const OLLAMA_DEFAULT_COST = { diff --git a/src/agents/ollama-stream.ts b/src/agents/ollama-stream.ts index 70a2ef33cf1..f332ad1fd83 100644 --- a/src/agents/ollama-stream.ts +++ b/src/agents/ollama-stream.ts @@ -10,6 +10,7 @@ import type { import { createAssistantMessageEventStream } from "@mariozechner/pi-ai"; import { createSubsystemLogger } from "../logging/subsystem.js"; import { isNonSecretApiKeyMarker } from "./model-auth-markers.js"; +import { OLLAMA_DEFAULT_BASE_URL } from "./ollama-defaults.js"; import { buildAssistantMessage as buildStreamAssistantMessage, buildStreamErrorAssistantMessage, @@ -18,7 +19,7 @@ import { const log = createSubsystemLogger("ollama-stream"); -export const OLLAMA_NATIVE_BASE_URL = "http://127.0.0.1:11434"; +export const OLLAMA_NATIVE_BASE_URL = OLLAMA_DEFAULT_BASE_URL; export function resolveOllamaBaseUrlForRun(params: { modelBaseUrl?: string; diff --git a/src/agents/self-hosted-provider-defaults.ts b/src/agents/self-hosted-provider-defaults.ts new file mode 100644 index 00000000000..da9dcc4b1d6 --- /dev/null +++ b/src/agents/self-hosted-provider-defaults.ts @@ -0,0 +1,8 @@ +export const SELF_HOSTED_DEFAULT_CONTEXT_WINDOW = 128000; +export const SELF_HOSTED_DEFAULT_MAX_TOKENS = 8192; +export const SELF_HOSTED_DEFAULT_COST = { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, +}; diff --git a/src/cli/config-cli.ts b/src/cli/config-cli.ts index 4793ff6bea6..0469952d322 100644 --- a/src/cli/config-cli.ts +++ b/src/cli/config-cli.ts @@ -1,5 +1,6 @@ import type { Command } from "commander"; import JSON5 from "json5"; +import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js"; import { readConfigFileSnapshot, writeConfigFile } from "../config/config.js"; import { formatConfigIssueLines, normalizeConfigIssues } from "../config/issue-format.js"; import { CONFIG_PATH } from "../config/paths.js"; @@ -20,7 +21,6 @@ type ConfigSetParseOpts = { const OLLAMA_API_KEY_PATH: PathSegment[] = ["models", "providers", "ollama", "apiKey"]; const OLLAMA_PROVIDER_PATH: PathSegment[] = ["models", "providers", "ollama"]; -const OLLAMA_DEFAULT_BASE_URL = "http://127.0.0.1:11434"; function isIndexSegment(raw: string): boolean { return /^[0-9]+$/.test(raw); diff --git a/src/commands/auth-profile-config.ts b/src/commands/auth-profile-config.ts new file mode 100644 index 00000000000..797135b87b2 --- /dev/null +++ b/src/commands/auth-profile-config.ts @@ -0,0 +1,73 @@ +import type { OpenClawConfig } from "../config/config.js"; + +export function applyAuthProfileConfig( + cfg: OpenClawConfig, + params: { + profileId: string; + provider: string; + mode: "api_key" | "oauth" | "token"; + email?: string; + preferProfileFirst?: boolean; + }, +): OpenClawConfig { + const normalizedProvider = params.provider.toLowerCase(); + const profiles = { + ...cfg.auth?.profiles, + [params.profileId]: { + provider: params.provider, + mode: params.mode, + ...(params.email ? { email: params.email } : {}), + }, + }; + + const configuredProviderProfiles = Object.entries(cfg.auth?.profiles ?? {}) + .filter(([, profile]) => profile.provider.toLowerCase() === normalizedProvider) + .map(([profileId, profile]) => ({ profileId, mode: profile.mode })); + + // Maintain `auth.order` when it already exists. Additionally, if we detect + // mixed auth modes for the same provider (e.g. legacy oauth + newly selected + // api_key), create an explicit order to keep the newly selected profile first. + const existingProviderOrder = cfg.auth?.order?.[params.provider]; + const preferProfileFirst = params.preferProfileFirst ?? true; + const reorderedProviderOrder = + existingProviderOrder && preferProfileFirst + ? [ + params.profileId, + ...existingProviderOrder.filter((profileId) => profileId !== params.profileId), + ] + : existingProviderOrder; + const hasMixedConfiguredModes = configuredProviderProfiles.some( + ({ profileId, mode }) => profileId !== params.profileId && mode !== params.mode, + ); + const derivedProviderOrder = + existingProviderOrder === undefined && preferProfileFirst && hasMixedConfiguredModes + ? [ + params.profileId, + ...configuredProviderProfiles + .map(({ profileId }) => profileId) + .filter((profileId) => profileId !== params.profileId), + ] + : undefined; + const order = + existingProviderOrder !== undefined + ? { + ...cfg.auth?.order, + [params.provider]: reorderedProviderOrder?.includes(params.profileId) + ? reorderedProviderOrder + : [...(reorderedProviderOrder ?? []), params.profileId], + } + : derivedProviderOrder + ? { + ...cfg.auth?.order, + [params.provider]: derivedProviderOrder, + } + : cfg.auth?.order; + return { + ...cfg, + auth: { + ...cfg.auth, + profiles, + ...(order ? { order } : {}), + }, + }; +} diff --git a/src/commands/ollama-setup.ts b/src/commands/ollama-setup.ts index 060724061bd..4557f606bb6 100644 --- a/src/commands/ollama-setup.ts +++ b/src/commands/ollama-setup.ts @@ -1,6 +1,6 @@ -import { upsertAuthProfileWithLock } from "../agents/auth-profiles.js"; +import { upsertAuthProfileWithLock } from "../agents/auth-profiles/upsert-with-lock.js"; +import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js"; import { - OLLAMA_DEFAULT_BASE_URL, buildOllamaModelDefinition, enrichOllamaModelsWithContext, fetchOllamaModels, @@ -15,7 +15,7 @@ import { applyAgentDefaultModelPrimary } from "./onboard-auth.config-shared.js"; import { openUrl } from "./onboard-helpers.js"; import type { OnboardMode, OnboardOptions } from "./onboard-types.js"; -export { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-models.js"; +export { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js"; export const OLLAMA_DEFAULT_MODEL = "glm-4.7-flash"; const OLLAMA_SUGGESTED_MODELS_LOCAL = ["glm-4.7-flash"]; diff --git a/src/commands/onboard-auth.config-core.ts b/src/commands/onboard-auth.config-core.ts index 0afd59c3910..c939a2cb99d 100644 --- a/src/commands/onboard-auth.config-core.ts +++ b/src/commands/onboard-auth.config-core.ts @@ -84,6 +84,7 @@ import { MODELSTUDIO_GLOBAL_BASE_URL, MODELSTUDIO_DEFAULT_MODEL_REF, } from "./onboard-auth.models.js"; +export { applyAuthProfileConfig } from "./auth-profile-config.js"; function mergeProviderModels( existingProvider: Record | undefined, @@ -484,78 +485,6 @@ export function applyKilocodeConfig(cfg: OpenClawConfig): OpenClawConfig { return applyAgentDefaultModelPrimary(next, KILOCODE_DEFAULT_MODEL_REF); } -export function applyAuthProfileConfig( - cfg: OpenClawConfig, - params: { - profileId: string; - provider: string; - mode: "api_key" | "oauth" | "token"; - email?: string; - preferProfileFirst?: boolean; - }, -): OpenClawConfig { - const normalizedProvider = params.provider.toLowerCase(); - const profiles = { - ...cfg.auth?.profiles, - [params.profileId]: { - provider: params.provider, - mode: params.mode, - ...(params.email ? { email: params.email } : {}), - }, - }; - - const configuredProviderProfiles = Object.entries(cfg.auth?.profiles ?? {}) - .filter(([, profile]) => profile.provider.toLowerCase() === normalizedProvider) - .map(([profileId, profile]) => ({ profileId, mode: profile.mode })); - - // Maintain `auth.order` when it already exists. Additionally, if we detect - // mixed auth modes for the same provider (e.g. legacy oauth + newly selected - // api_key), create an explicit order to keep the newly selected profile first. - const existingProviderOrder = cfg.auth?.order?.[params.provider]; - const preferProfileFirst = params.preferProfileFirst ?? true; - const reorderedProviderOrder = - existingProviderOrder && preferProfileFirst - ? [ - params.profileId, - ...existingProviderOrder.filter((profileId) => profileId !== params.profileId), - ] - : existingProviderOrder; - const hasMixedConfiguredModes = configuredProviderProfiles.some( - ({ profileId, mode }) => profileId !== params.profileId && mode !== params.mode, - ); - const derivedProviderOrder = - existingProviderOrder === undefined && preferProfileFirst && hasMixedConfiguredModes - ? [ - params.profileId, - ...configuredProviderProfiles - .map(({ profileId }) => profileId) - .filter((profileId) => profileId !== params.profileId), - ] - : undefined; - const order = - existingProviderOrder !== undefined - ? { - ...cfg.auth?.order, - [params.provider]: reorderedProviderOrder?.includes(params.profileId) - ? reorderedProviderOrder - : [...(reorderedProviderOrder ?? []), params.profileId], - } - : derivedProviderOrder - ? { - ...cfg.auth?.order, - [params.provider]: derivedProviderOrder, - } - : cfg.auth?.order; - return { - ...cfg, - auth: { - ...cfg.auth, - profiles, - ...(order ? { order } : {}), - }, - }; -} - export function applyQianfanProviderConfig(cfg: OpenClawConfig): OpenClawConfig { const models = { ...cfg.agents?.defaults?.models }; models[QIANFAN_DEFAULT_MODEL_REF] = { diff --git a/src/commands/onboard-custom.test.ts b/src/commands/onboard-custom.test.ts index 6d78766853a..cf86da64211 100644 --- a/src/commands/onboard-custom.test.ts +++ b/src/commands/onboard-custom.test.ts @@ -1,6 +1,6 @@ import { afterEach, describe, expect, it, vi } from "vitest"; import { CONTEXT_WINDOW_HARD_MIN_TOKENS } from "../agents/context-window-guard.js"; -import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-models.js"; +import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js"; import type { OpenClawConfig } from "../config/config.js"; import { defaultRuntime } from "../runtime.js"; import { diff --git a/src/commands/onboard-custom.ts b/src/commands/onboard-custom.ts index 874018a74ea..9de8e3f85cf 100644 --- a/src/commands/onboard-custom.ts +++ b/src/commands/onboard-custom.ts @@ -1,7 +1,7 @@ import { CONTEXT_WINDOW_HARD_MIN_TOKENS } from "../agents/context-window-guard.js"; import { DEFAULT_PROVIDER } from "../agents/defaults.js"; import { buildModelAliasIndex, modelKey } from "../agents/model-selection.js"; -import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-models.js"; +import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js"; import type { OpenClawConfig } from "../config/config.js"; import type { ModelProviderConfig } from "../config/types.models.js"; import { isSecretRef, type SecretInput } from "../config/types.secrets.js"; diff --git a/src/commands/self-hosted-provider-setup.ts b/src/commands/self-hosted-provider-setup.ts index c067d797f15..ec2d8c683e3 100644 --- a/src/commands/self-hosted-provider-setup.ts +++ b/src/commands/self-hosted-provider-setup.ts @@ -1,5 +1,10 @@ -import { upsertAuthProfileWithLock } from "../agents/auth-profiles.js"; import type { ApiKeyCredential, AuthProfileCredential } from "../agents/auth-profiles/types.js"; +import { upsertAuthProfileWithLock } from "../agents/auth-profiles/upsert-with-lock.js"; +import { + SELF_HOSTED_DEFAULT_CONTEXT_WINDOW, + SELF_HOSTED_DEFAULT_COST, + SELF_HOSTED_DEFAULT_MAX_TOKENS, +} from "../agents/self-hosted-provider-defaults.js"; import type { OpenClawConfig } from "../config/config.js"; import type { ProviderDiscoveryContext, @@ -8,16 +13,13 @@ import type { ProviderNonInteractiveApiKeyResult, } from "../plugins/types.js"; import type { WizardPrompter } from "../wizard/prompts.js"; -import { applyAuthProfileConfig } from "./onboard-auth.js"; +import { applyAuthProfileConfig } from "./auth-profile-config.js"; -export const SELF_HOSTED_DEFAULT_CONTEXT_WINDOW = 128000; -export const SELF_HOSTED_DEFAULT_MAX_TOKENS = 8192; -export const SELF_HOSTED_DEFAULT_COST = { - input: 0, - output: 0, - cacheRead: 0, - cacheWrite: 0, -}; +export { + SELF_HOSTED_DEFAULT_CONTEXT_WINDOW, + SELF_HOSTED_DEFAULT_COST, + SELF_HOSTED_DEFAULT_MAX_TOKENS, +} from "../agents/self-hosted-provider-defaults.js"; export function applyProviderDefaultModel(cfg: OpenClawConfig, modelRef: string): OpenClawConfig { const existingModel = cfg.agents?.defaults?.model; diff --git a/src/commands/vllm-setup.ts b/src/commands/vllm-setup.ts index 4d8657306e6..4c44587c06e 100644 --- a/src/commands/vllm-setup.ts +++ b/src/commands/vllm-setup.ts @@ -1,14 +1,20 @@ +import { + VLLM_DEFAULT_API_KEY_ENV_VAR, + VLLM_DEFAULT_BASE_URL, + VLLM_MODEL_PLACEHOLDER, + VLLM_PROVIDER_LABEL, +} from "../agents/vllm-defaults.js"; import type { OpenClawConfig } from "../config/config.js"; import type { WizardPrompter } from "../wizard/prompts.js"; import { applyProviderDefaultModel, - promptAndConfigureOpenAICompatibleSelfHostedProvider, SELF_HOSTED_DEFAULT_CONTEXT_WINDOW, SELF_HOSTED_DEFAULT_COST, SELF_HOSTED_DEFAULT_MAX_TOKENS, + promptAndConfigureOpenAICompatibleSelfHostedProvider, } from "./self-hosted-provider-setup.js"; -export const VLLM_DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1"; +export { VLLM_DEFAULT_BASE_URL } from "../agents/vllm-defaults.js"; export const VLLM_DEFAULT_CONTEXT_WINDOW = SELF_HOSTED_DEFAULT_CONTEXT_WINDOW; export const VLLM_DEFAULT_MAX_TOKENS = SELF_HOSTED_DEFAULT_MAX_TOKENS; export const VLLM_DEFAULT_COST = SELF_HOSTED_DEFAULT_COST; @@ -21,10 +27,10 @@ export async function promptAndConfigureVllm(params: { cfg: params.cfg, prompter: params.prompter, providerId: "vllm", - providerLabel: "vLLM", + providerLabel: VLLM_PROVIDER_LABEL, defaultBaseUrl: VLLM_DEFAULT_BASE_URL, - defaultApiKeyEnvVar: "VLLM_API_KEY", - modelPlaceholder: "meta-llama/Meta-Llama-3-8B-Instruct", + defaultApiKeyEnvVar: VLLM_DEFAULT_API_KEY_ENV_VAR, + modelPlaceholder: VLLM_MODEL_PLACEHOLDER, }); return { config: result.config,