Providers: centralize setup defaults and helper boundaries

This commit is contained in:
Gustavo Madeira Santana
2026-03-16 12:04:41 +00:00
parent 9fc6c1929a
commit c08f2aa21a
13 changed files with 184 additions and 116 deletions

View File

@@ -0,0 +1,56 @@
import { withFileLock } from "../../infra/file-lock.js";
import { loadJsonFile, saveJsonFile } from "../../infra/json-file.js";
import { AUTH_STORE_LOCK_OPTIONS, AUTH_STORE_VERSION } from "./constants.js";
import { ensureAuthStoreFile, resolveAuthStorePath } from "./paths.js";
import type { AuthProfileCredential, AuthProfileStore, ProfileUsageStats } from "./types.js";
function coerceAuthProfileStore(raw: unknown): AuthProfileStore {
const record = raw && typeof raw === "object" ? (raw as Record<string, unknown>) : {};
const profiles =
record.profiles && typeof record.profiles === "object" && !Array.isArray(record.profiles)
? { ...(record.profiles as Record<string, AuthProfileCredential>) }
: {};
const order =
record.order && typeof record.order === "object" && !Array.isArray(record.order)
? (record.order as Record<string, string[]>)
: undefined;
const lastGood =
record.lastGood && typeof record.lastGood === "object" && !Array.isArray(record.lastGood)
? (record.lastGood as Record<string, string>)
: undefined;
const usageStats =
record.usageStats && typeof record.usageStats === "object" && !Array.isArray(record.usageStats)
? (record.usageStats as Record<string, ProfileUsageStats>)
: undefined;
return {
version:
typeof record.version === "number" && Number.isFinite(record.version)
? record.version
: AUTH_STORE_VERSION,
profiles,
...(order ? { order } : {}),
...(lastGood ? { lastGood } : {}),
...(usageStats ? { usageStats } : {}),
};
}
export async function upsertAuthProfileWithLock(params: {
profileId: string;
credential: AuthProfileCredential;
agentDir?: string;
}): Promise<AuthProfileStore | null> {
const authPath = resolveAuthStorePath(params.agentDir);
ensureAuthStoreFile(authPath);
try {
return await withFileLock(authPath, AUTH_STORE_LOCK_OPTIONS, async () => {
const store = coerceAuthProfileStore(loadJsonFile(authPath));
store.profiles[params.profileId] = params.credential;
saveJsonFile(authPath, store);
return store;
});
} catch {
return null;
}
}

View File

@@ -18,8 +18,15 @@ import {
resolveOllamaApiBase,
type OllamaTagsResponse,
} from "./ollama-models.js";
import {
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
} from "./self-hosted-provider-defaults.js";
import { SGLANG_DEFAULT_BASE_URL, SGLANG_PROVIDER_LABEL } from "./sglang-defaults.js";
import { discoverVeniceModels, VENICE_BASE_URL } from "./venice-models.js";
import { discoverVercelAiGatewayModels, VERCEL_AI_GATEWAY_BASE_URL } from "./vercel-ai-gateway.js";
import { VLLM_DEFAULT_BASE_URL, VLLM_PROVIDER_LABEL } from "./vllm-defaults.js";
export { resolveOllamaApiBase } from "./ollama-models.js";
@@ -31,19 +38,6 @@ const log = createSubsystemLogger("agents/model-providers");
const OLLAMA_SHOW_CONCURRENCY = 8;
const OLLAMA_SHOW_MAX_MODELS = 200;
const OPENAI_COMPAT_LOCAL_DEFAULT_CONTEXT_WINDOW = 128000;
const OPENAI_COMPAT_LOCAL_DEFAULT_MAX_TOKENS = 8192;
const OPENAI_COMPAT_LOCAL_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
const SGLANG_BASE_URL = "http://127.0.0.1:30000/v1";
const VLLM_BASE_URL = "http://127.0.0.1:8000/v1";
type OpenAICompatModelsResponse = {
data?: Array<{
id?: string;
@@ -140,9 +134,9 @@ async function discoverOpenAICompatibleLocalModels(params: {
name: modelId,
reasoning: isReasoningModelHeuristic(modelId),
input: ["text"],
cost: OPENAI_COMPAT_LOCAL_DEFAULT_COST,
contextWindow: params.contextWindow ?? OPENAI_COMPAT_LOCAL_DEFAULT_CONTEXT_WINDOW,
maxTokens: params.maxTokens ?? OPENAI_COMPAT_LOCAL_DEFAULT_MAX_TOKENS,
cost: SELF_HOSTED_DEFAULT_COST,
contextWindow: params.contextWindow ?? SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
maxTokens: params.maxTokens ?? SELF_HOSTED_DEFAULT_MAX_TOKENS,
} satisfies ModelDefinitionConfig;
});
} catch (error) {
@@ -197,11 +191,11 @@ export async function buildVllmProvider(params?: {
baseUrl?: string;
apiKey?: string;
}): Promise<ProviderConfig> {
const baseUrl = (params?.baseUrl?.trim() || VLLM_BASE_URL).replace(/\/+$/, "");
const baseUrl = (params?.baseUrl?.trim() || VLLM_DEFAULT_BASE_URL).replace(/\/+$/, "");
const models = await discoverOpenAICompatibleLocalModels({
baseUrl,
apiKey: params?.apiKey,
label: "vLLM",
label: VLLM_PROVIDER_LABEL,
});
return {
baseUrl,
@@ -214,11 +208,11 @@ export async function buildSglangProvider(params?: {
baseUrl?: string;
apiKey?: string;
}): Promise<ProviderConfig> {
const baseUrl = (params?.baseUrl?.trim() || SGLANG_BASE_URL).replace(/\/+$/, "");
const baseUrl = (params?.baseUrl?.trim() || SGLANG_DEFAULT_BASE_URL).replace(/\/+$/, "");
const models = await discoverOpenAICompatibleLocalModels({
baseUrl,
apiKey: params?.apiKey,
label: "SGLang",
label: SGLANG_PROVIDER_LABEL,
});
return {
baseUrl,

View File

@@ -1,7 +1,6 @@
import type { ModelDefinitionConfig } from "../config/types.models.js";
import { OLLAMA_NATIVE_BASE_URL } from "./ollama-stream.js";
import { OLLAMA_DEFAULT_BASE_URL } from "./ollama-defaults.js";
export const OLLAMA_DEFAULT_BASE_URL = OLLAMA_NATIVE_BASE_URL;
export const OLLAMA_DEFAULT_CONTEXT_WINDOW = 128000;
export const OLLAMA_DEFAULT_MAX_TOKENS = 8192;
export const OLLAMA_DEFAULT_COST = {

View File

@@ -10,6 +10,7 @@ import type {
import { createAssistantMessageEventStream } from "@mariozechner/pi-ai";
import { createSubsystemLogger } from "../logging/subsystem.js";
import { isNonSecretApiKeyMarker } from "./model-auth-markers.js";
import { OLLAMA_DEFAULT_BASE_URL } from "./ollama-defaults.js";
import {
buildAssistantMessage as buildStreamAssistantMessage,
buildStreamErrorAssistantMessage,
@@ -18,7 +19,7 @@ import {
const log = createSubsystemLogger("ollama-stream");
export const OLLAMA_NATIVE_BASE_URL = "http://127.0.0.1:11434";
export const OLLAMA_NATIVE_BASE_URL = OLLAMA_DEFAULT_BASE_URL;
export function resolveOllamaBaseUrlForRun(params: {
modelBaseUrl?: string;

View File

@@ -0,0 +1,8 @@
export const SELF_HOSTED_DEFAULT_CONTEXT_WINDOW = 128000;
export const SELF_HOSTED_DEFAULT_MAX_TOKENS = 8192;
export const SELF_HOSTED_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};

View File

@@ -1,5 +1,6 @@
import type { Command } from "commander";
import JSON5 from "json5";
import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js";
import { readConfigFileSnapshot, writeConfigFile } from "../config/config.js";
import { formatConfigIssueLines, normalizeConfigIssues } from "../config/issue-format.js";
import { CONFIG_PATH } from "../config/paths.js";
@@ -20,7 +21,6 @@ type ConfigSetParseOpts = {
const OLLAMA_API_KEY_PATH: PathSegment[] = ["models", "providers", "ollama", "apiKey"];
const OLLAMA_PROVIDER_PATH: PathSegment[] = ["models", "providers", "ollama"];
const OLLAMA_DEFAULT_BASE_URL = "http://127.0.0.1:11434";
function isIndexSegment(raw: string): boolean {
return /^[0-9]+$/.test(raw);

View File

@@ -0,0 +1,73 @@
import type { OpenClawConfig } from "../config/config.js";
export function applyAuthProfileConfig(
cfg: OpenClawConfig,
params: {
profileId: string;
provider: string;
mode: "api_key" | "oauth" | "token";
email?: string;
preferProfileFirst?: boolean;
},
): OpenClawConfig {
const normalizedProvider = params.provider.toLowerCase();
const profiles = {
...cfg.auth?.profiles,
[params.profileId]: {
provider: params.provider,
mode: params.mode,
...(params.email ? { email: params.email } : {}),
},
};
const configuredProviderProfiles = Object.entries(cfg.auth?.profiles ?? {})
.filter(([, profile]) => profile.provider.toLowerCase() === normalizedProvider)
.map(([profileId, profile]) => ({ profileId, mode: profile.mode }));
// Maintain `auth.order` when it already exists. Additionally, if we detect
// mixed auth modes for the same provider (e.g. legacy oauth + newly selected
// api_key), create an explicit order to keep the newly selected profile first.
const existingProviderOrder = cfg.auth?.order?.[params.provider];
const preferProfileFirst = params.preferProfileFirst ?? true;
const reorderedProviderOrder =
existingProviderOrder && preferProfileFirst
? [
params.profileId,
...existingProviderOrder.filter((profileId) => profileId !== params.profileId),
]
: existingProviderOrder;
const hasMixedConfiguredModes = configuredProviderProfiles.some(
({ profileId, mode }) => profileId !== params.profileId && mode !== params.mode,
);
const derivedProviderOrder =
existingProviderOrder === undefined && preferProfileFirst && hasMixedConfiguredModes
? [
params.profileId,
...configuredProviderProfiles
.map(({ profileId }) => profileId)
.filter((profileId) => profileId !== params.profileId),
]
: undefined;
const order =
existingProviderOrder !== undefined
? {
...cfg.auth?.order,
[params.provider]: reorderedProviderOrder?.includes(params.profileId)
? reorderedProviderOrder
: [...(reorderedProviderOrder ?? []), params.profileId],
}
: derivedProviderOrder
? {
...cfg.auth?.order,
[params.provider]: derivedProviderOrder,
}
: cfg.auth?.order;
return {
...cfg,
auth: {
...cfg.auth,
profiles,
...(order ? { order } : {}),
},
};
}

View File

@@ -1,6 +1,6 @@
import { upsertAuthProfileWithLock } from "../agents/auth-profiles.js";
import { upsertAuthProfileWithLock } from "../agents/auth-profiles/upsert-with-lock.js";
import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js";
import {
OLLAMA_DEFAULT_BASE_URL,
buildOllamaModelDefinition,
enrichOllamaModelsWithContext,
fetchOllamaModels,
@@ -15,7 +15,7 @@ import { applyAgentDefaultModelPrimary } from "./onboard-auth.config-shared.js";
import { openUrl } from "./onboard-helpers.js";
import type { OnboardMode, OnboardOptions } from "./onboard-types.js";
export { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-models.js";
export { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js";
export const OLLAMA_DEFAULT_MODEL = "glm-4.7-flash";
const OLLAMA_SUGGESTED_MODELS_LOCAL = ["glm-4.7-flash"];

View File

@@ -84,6 +84,7 @@ import {
MODELSTUDIO_GLOBAL_BASE_URL,
MODELSTUDIO_DEFAULT_MODEL_REF,
} from "./onboard-auth.models.js";
export { applyAuthProfileConfig } from "./auth-profile-config.js";
function mergeProviderModels<T extends { id: string }>(
existingProvider: Record<string, unknown> | undefined,
@@ -484,78 +485,6 @@ export function applyKilocodeConfig(cfg: OpenClawConfig): OpenClawConfig {
return applyAgentDefaultModelPrimary(next, KILOCODE_DEFAULT_MODEL_REF);
}
export function applyAuthProfileConfig(
cfg: OpenClawConfig,
params: {
profileId: string;
provider: string;
mode: "api_key" | "oauth" | "token";
email?: string;
preferProfileFirst?: boolean;
},
): OpenClawConfig {
const normalizedProvider = params.provider.toLowerCase();
const profiles = {
...cfg.auth?.profiles,
[params.profileId]: {
provider: params.provider,
mode: params.mode,
...(params.email ? { email: params.email } : {}),
},
};
const configuredProviderProfiles = Object.entries(cfg.auth?.profiles ?? {})
.filter(([, profile]) => profile.provider.toLowerCase() === normalizedProvider)
.map(([profileId, profile]) => ({ profileId, mode: profile.mode }));
// Maintain `auth.order` when it already exists. Additionally, if we detect
// mixed auth modes for the same provider (e.g. legacy oauth + newly selected
// api_key), create an explicit order to keep the newly selected profile first.
const existingProviderOrder = cfg.auth?.order?.[params.provider];
const preferProfileFirst = params.preferProfileFirst ?? true;
const reorderedProviderOrder =
existingProviderOrder && preferProfileFirst
? [
params.profileId,
...existingProviderOrder.filter((profileId) => profileId !== params.profileId),
]
: existingProviderOrder;
const hasMixedConfiguredModes = configuredProviderProfiles.some(
({ profileId, mode }) => profileId !== params.profileId && mode !== params.mode,
);
const derivedProviderOrder =
existingProviderOrder === undefined && preferProfileFirst && hasMixedConfiguredModes
? [
params.profileId,
...configuredProviderProfiles
.map(({ profileId }) => profileId)
.filter((profileId) => profileId !== params.profileId),
]
: undefined;
const order =
existingProviderOrder !== undefined
? {
...cfg.auth?.order,
[params.provider]: reorderedProviderOrder?.includes(params.profileId)
? reorderedProviderOrder
: [...(reorderedProviderOrder ?? []), params.profileId],
}
: derivedProviderOrder
? {
...cfg.auth?.order,
[params.provider]: derivedProviderOrder,
}
: cfg.auth?.order;
return {
...cfg,
auth: {
...cfg.auth,
profiles,
...(order ? { order } : {}),
},
};
}
export function applyQianfanProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const models = { ...cfg.agents?.defaults?.models };
models[QIANFAN_DEFAULT_MODEL_REF] = {

View File

@@ -1,6 +1,6 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import { CONTEXT_WINDOW_HARD_MIN_TOKENS } from "../agents/context-window-guard.js";
import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-models.js";
import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js";
import type { OpenClawConfig } from "../config/config.js";
import { defaultRuntime } from "../runtime.js";
import {

View File

@@ -1,7 +1,7 @@
import { CONTEXT_WINDOW_HARD_MIN_TOKENS } from "../agents/context-window-guard.js";
import { DEFAULT_PROVIDER } from "../agents/defaults.js";
import { buildModelAliasIndex, modelKey } from "../agents/model-selection.js";
import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-models.js";
import { OLLAMA_DEFAULT_BASE_URL } from "../agents/ollama-defaults.js";
import type { OpenClawConfig } from "../config/config.js";
import type { ModelProviderConfig } from "../config/types.models.js";
import { isSecretRef, type SecretInput } from "../config/types.secrets.js";

View File

@@ -1,5 +1,10 @@
import { upsertAuthProfileWithLock } from "../agents/auth-profiles.js";
import type { ApiKeyCredential, AuthProfileCredential } from "../agents/auth-profiles/types.js";
import { upsertAuthProfileWithLock } from "../agents/auth-profiles/upsert-with-lock.js";
import {
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
} from "../agents/self-hosted-provider-defaults.js";
import type { OpenClawConfig } from "../config/config.js";
import type {
ProviderDiscoveryContext,
@@ -8,16 +13,13 @@ import type {
ProviderNonInteractiveApiKeyResult,
} from "../plugins/types.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import { applyAuthProfileConfig } from "./onboard-auth.js";
import { applyAuthProfileConfig } from "./auth-profile-config.js";
export const SELF_HOSTED_DEFAULT_CONTEXT_WINDOW = 128000;
export const SELF_HOSTED_DEFAULT_MAX_TOKENS = 8192;
export const SELF_HOSTED_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
export {
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
} from "../agents/self-hosted-provider-defaults.js";
export function applyProviderDefaultModel(cfg: OpenClawConfig, modelRef: string): OpenClawConfig {
const existingModel = cfg.agents?.defaults?.model;

View File

@@ -1,14 +1,20 @@
import {
VLLM_DEFAULT_API_KEY_ENV_VAR,
VLLM_DEFAULT_BASE_URL,
VLLM_MODEL_PLACEHOLDER,
VLLM_PROVIDER_LABEL,
} from "../agents/vllm-defaults.js";
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import {
applyProviderDefaultModel,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
} from "./self-hosted-provider-setup.js";
export const VLLM_DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
export { VLLM_DEFAULT_BASE_URL } from "../agents/vllm-defaults.js";
export const VLLM_DEFAULT_CONTEXT_WINDOW = SELF_HOSTED_DEFAULT_CONTEXT_WINDOW;
export const VLLM_DEFAULT_MAX_TOKENS = SELF_HOSTED_DEFAULT_MAX_TOKENS;
export const VLLM_DEFAULT_COST = SELF_HOSTED_DEFAULT_COST;
@@ -21,10 +27,10 @@ export async function promptAndConfigureVllm(params: {
cfg: params.cfg,
prompter: params.prompter,
providerId: "vllm",
providerLabel: "vLLM",
providerLabel: VLLM_PROVIDER_LABEL,
defaultBaseUrl: VLLM_DEFAULT_BASE_URL,
defaultApiKeyEnvVar: "VLLM_API_KEY",
modelPlaceholder: "meta-llama/Meta-Llama-3-8B-Instruct",
defaultApiKeyEnvVar: VLLM_DEFAULT_API_KEY_ENV_VAR,
modelPlaceholder: VLLM_MODEL_PLACEHOLDER,
});
return {
config: result.config,