mirror of
https://github.com/moltbot/moltbot.git
synced 2026-03-08 06:54:24 +00:00
feat(secrets): replace migrate flow with audit/configure/apply
This commit is contained in:
committed by
Peter Steinberger
parent
8944b75e16
commit
f413e314b9
@@ -270,8 +270,9 @@ Note: plugins can add additional top-level commands (for example `openclaw voice
|
||||
## Secrets
|
||||
|
||||
- `openclaw secrets reload` — re-resolve refs and atomically swap the runtime snapshot.
|
||||
- `openclaw secrets migrate` — migrate plaintext static secrets to file-backed refs (`--write` to apply; dry-run by default).
|
||||
- `openclaw secrets migrate --rollback <backup-id>` — restore from a migration backup.
|
||||
- `openclaw secrets audit` — scan for plaintext residues, unresolved refs, and precedence drift.
|
||||
- `openclaw secrets configure` — interactive helper to build SecretRef plan and preflight/apply safely.
|
||||
- `openclaw secrets apply --from <plan.json>` — apply a previously generated plan (`--dry-run` supported).
|
||||
|
||||
## Plugins
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
---
|
||||
summary: "CLI reference for `openclaw secrets` (reload and migration operations)"
|
||||
summary: "CLI reference for `openclaw secrets` (reload, audit, configure, apply)"
|
||||
read_when:
|
||||
- Re-resolving secret refs at runtime
|
||||
- Migrating plaintext secrets into file-backed refs
|
||||
- Rolling back secrets migration backups
|
||||
- Auditing plaintext residues and unresolved refs
|
||||
- Configuring SecretRefs and applying one-way scrub changes
|
||||
title: "secrets"
|
||||
---
|
||||
|
||||
@@ -31,68 +31,64 @@ Notes:
|
||||
- If resolution fails, gateway keeps last-known-good snapshot.
|
||||
- JSON response includes `warningCount`.
|
||||
|
||||
## Migrate plaintext secrets
|
||||
## Audit
|
||||
|
||||
Dry-run by default:
|
||||
Scan OpenClaw state for:
|
||||
|
||||
- plaintext secret storage
|
||||
- unresolved refs
|
||||
- precedence drift (`auth-profiles` shadowing config refs)
|
||||
- legacy residues (`auth.json`, OAuth out-of-scope notes)
|
||||
|
||||
```bash
|
||||
openclaw secrets migrate
|
||||
openclaw secrets migrate --json
|
||||
openclaw secrets audit
|
||||
openclaw secrets audit --check
|
||||
openclaw secrets audit --json
|
||||
```
|
||||
|
||||
Apply changes:
|
||||
Exit behavior:
|
||||
|
||||
- `--check` exits non-zero on findings.
|
||||
- unresolved refs exit with a higher-priority non-zero code.
|
||||
|
||||
## Configure (interactive helper)
|
||||
|
||||
Build SecretRef changes interactively, run preflight, and optionally apply:
|
||||
|
||||
```bash
|
||||
openclaw secrets migrate --write
|
||||
openclaw secrets configure
|
||||
openclaw secrets configure --plan-out /tmp/openclaw-secrets-plan.json
|
||||
openclaw secrets configure --apply --yes
|
||||
openclaw secrets configure --json
|
||||
```
|
||||
|
||||
Skip `.env` scrubbing:
|
||||
Notes:
|
||||
|
||||
- `configure` targets secret-bearing fields in `openclaw.json`.
|
||||
- It performs preflight resolution before apply.
|
||||
- Apply path is one-way for migrated plaintext values.
|
||||
|
||||
## Apply a saved plan
|
||||
|
||||
Apply or preflight a plan generated previously:
|
||||
|
||||
```bash
|
||||
openclaw secrets migrate --write --no-scrub-env
|
||||
openclaw secrets apply --from /tmp/openclaw-secrets-plan.json
|
||||
openclaw secrets apply --from /tmp/openclaw-secrets-plan.json --dry-run
|
||||
openclaw secrets apply --from /tmp/openclaw-secrets-plan.json --json
|
||||
```
|
||||
|
||||
`.env` scrub details (default behavior):
|
||||
## Why no rollback backups
|
||||
|
||||
- Scrub target is `<config-dir>/.env`.
|
||||
- Only known secret env keys are considered.
|
||||
- Entries are removed only when the value exactly matches a migrated plaintext secret.
|
||||
- Migration writes to the configured default `file` provider path when present; otherwise `<state-dir>/secrets.json`.
|
||||
`secrets apply` intentionally does not write rollback backups containing old plaintext values.
|
||||
|
||||
Rollback a previous migration:
|
||||
Safety comes from strict preflight + atomic-ish apply with best-effort in-memory restore on failure.
|
||||
|
||||
## Example
|
||||
|
||||
```bash
|
||||
openclaw secrets migrate --rollback <backup-id>
|
||||
```
|
||||
|
||||
## Migration outputs
|
||||
|
||||
- Dry-run: prints what would change.
|
||||
- Write mode: prints backup id and moved secret count.
|
||||
- Rollback: restores files from the selected backup manifest.
|
||||
|
||||
Backups live under:
|
||||
|
||||
- `~/.openclaw/backups/secrets-migrate/<backupId>/manifest.json`
|
||||
|
||||
## Examples
|
||||
|
||||
### Preview migration impact
|
||||
|
||||
```bash
|
||||
openclaw secrets migrate --json | jq '{mode, changed, counters, changedFiles}'
|
||||
```
|
||||
|
||||
### Apply migration and keep a machine-readable record
|
||||
|
||||
```bash
|
||||
openclaw secrets migrate --write --json > /tmp/openclaw-secrets-migrate.json
|
||||
```
|
||||
|
||||
### Force a reload after updating gateway env visibility
|
||||
|
||||
```bash
|
||||
# Ensure OPENAI_API_KEY is visible to the running gateway process first,
|
||||
# then re-resolve refs:
|
||||
openclaw secrets reload
|
||||
# Audit first, then configure, then confirm clean:
|
||||
openclaw secrets audit --check
|
||||
openclaw secrets configure
|
||||
openclaw secrets audit --check
|
||||
```
|
||||
|
||||
@@ -2470,7 +2470,7 @@ Notes:
|
||||
- Static runtime credentials come from in-memory resolved snapshots; legacy static `auth.json` entries are scrubbed when discovered.
|
||||
- Legacy OAuth imports from `~/.openclaw/credentials/oauth.json`.
|
||||
- See [OAuth](/concepts/oauth).
|
||||
- Secrets runtime behavior and migration tooling: [Secrets Management](/gateway/secrets).
|
||||
- Secrets runtime behavior and `audit/configure/apply` tooling: [Secrets Management](/gateway/secrets).
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
---
|
||||
summary: "Secrets management: SecretRef contract, runtime snapshot behavior, and migration"
|
||||
summary: "Secrets management: SecretRef contract, runtime snapshot behavior, and safe one-way scrubbing"
|
||||
read_when:
|
||||
- Configuring SecretRefs for providers, auth profiles, skills, or Google Chat
|
||||
- Operating secrets reload/migrate safely in production
|
||||
- Operating secrets reload/audit/configure/apply safely in production
|
||||
- Understanding fail-fast and last-known-good behavior
|
||||
title: "Secrets Management"
|
||||
---
|
||||
@@ -208,51 +208,58 @@ Behavior:
|
||||
- Repeated failures while already degraded log warnings but do not spam events.
|
||||
- Startup fail-fast does not emit degraded events because no runtime snapshot exists yet.
|
||||
|
||||
## Migration command
|
||||
## Audit and configure workflow
|
||||
|
||||
Use `openclaw secrets migrate` to move plaintext static secrets into file-backed refs.
|
||||
|
||||
Dry-run (default):
|
||||
Use this default operator flow:
|
||||
|
||||
```bash
|
||||
openclaw secrets migrate
|
||||
openclaw secrets audit --check
|
||||
openclaw secrets configure
|
||||
openclaw secrets audit --check
|
||||
```
|
||||
|
||||
Apply:
|
||||
### `secrets audit`
|
||||
|
||||
Findings include:
|
||||
|
||||
- plaintext values at rest (`openclaw.json`, `auth-profiles.json`, `.env`)
|
||||
- unresolved refs
|
||||
- precedence shadowing (`auth-profiles` taking priority over config refs)
|
||||
- legacy residues (`auth.json`, OAuth out-of-scope reminders)
|
||||
|
||||
### `secrets configure`
|
||||
|
||||
Interactive helper that:
|
||||
|
||||
- lets you select secret-bearing fields in `openclaw.json`
|
||||
- captures SecretRef details (`source`, `provider`, `id`)
|
||||
- runs preflight resolution
|
||||
- can apply immediately
|
||||
|
||||
`configure` apply defaults to:
|
||||
|
||||
- scrub matching static creds from `auth-profiles.json` for targeted providers
|
||||
- scrub legacy static `api_key` entries from `auth.json`
|
||||
- scrub matching known secret lines from `<config-dir>/.env`
|
||||
|
||||
### `secrets apply`
|
||||
|
||||
Apply a saved plan:
|
||||
|
||||
```bash
|
||||
openclaw secrets migrate --write
|
||||
openclaw secrets apply --from /tmp/openclaw-secrets-plan.json
|
||||
openclaw secrets apply --from /tmp/openclaw-secrets-plan.json --dry-run
|
||||
```
|
||||
|
||||
Rollback by backup id:
|
||||
## One-way safety policy
|
||||
|
||||
```bash
|
||||
openclaw secrets migrate --rollback 20260224T193000Z
|
||||
```
|
||||
OpenClaw intentionally does **not** write rollback backups that contain pre-migration plaintext secret values.
|
||||
|
||||
What migration covers:
|
||||
Safety model:
|
||||
|
||||
- `openclaw.json` fields listed above
|
||||
- `auth-profiles.json` plaintext API key/token fields
|
||||
- optional scrub of matching plaintext values from `<config-dir>/.env` (default on)
|
||||
|
||||
Migration writes secrets to:
|
||||
|
||||
- configured default `file` provider path when present
|
||||
- otherwise `<state-dir>/secrets.json`
|
||||
|
||||
`.env` scrub semantics:
|
||||
|
||||
- target path is `<config-dir>/.env`
|
||||
- only known secret env keys are eligible
|
||||
- a line is removed only when value exactly matches a migrated plaintext value
|
||||
- comments/non-secret keys/unmatched values are preserved
|
||||
|
||||
Backups:
|
||||
|
||||
- path: `~/.openclaw/backups/secrets-migrate/<backupId>/`
|
||||
- manifest: `manifest.json`
|
||||
- retention: 20 backups
|
||||
- preflight must succeed before write mode
|
||||
- runtime activation is validated before commit
|
||||
- apply updates files using atomic file replacement and best-effort in-memory restore on failure
|
||||
|
||||
## `auth.json` compatibility notes
|
||||
|
||||
|
||||
@@ -207,7 +207,6 @@ Use this when auditing access or deciding what to back up:
|
||||
- `~/.openclaw/credentials/<channel>-<accountId>-allowFrom.json` (non-default accounts)
|
||||
- **Model auth profiles**: `~/.openclaw/agents/<agentId>/agent/auth-profiles.json`
|
||||
- **File-backed secrets payload (optional)**: `~/.openclaw/secrets.json`
|
||||
- **Secrets migration backups (optional)**: `~/.openclaw/backups/secrets-migrate/<backupId>/`
|
||||
- **Legacy OAuth import**: `~/.openclaw/credentials/oauth.json`
|
||||
|
||||
## Security Audit Checklist
|
||||
@@ -764,7 +763,6 @@ Assume anything under `~/.openclaw/` (or `$OPENCLAW_STATE_DIR/`) may contain sec
|
||||
- `credentials/**`: channel credentials (example: WhatsApp creds), pairing allowlists, legacy OAuth imports.
|
||||
- `agents/<agentId>/agent/auth-profiles.json`: API keys, token profiles, OAuth tokens, and optional `keyRef`/`tokenRef`.
|
||||
- `secrets.json` (optional): file-backed secret payload used by `file` SecretRef providers (`secrets.providers`).
|
||||
- `backups/secrets-migrate/**` (optional): migration rollback backups + manifests.
|
||||
- `agents/<agentId>/agent/auth.json`: legacy compatibility file. Static `api_key` entries are scrubbed when discovered.
|
||||
- `agents/<agentId>/sessions/**`: session transcripts (`*.jsonl`) + routing metadata (`sessions.json`) that can contain private messages and tool output.
|
||||
- `extensions/**`: installed plugins (plus their `node_modules/`).
|
||||
|
||||
@@ -1297,7 +1297,6 @@ Everything lives under `$OPENCLAW_STATE_DIR` (default: `~/.openclaw`):
|
||||
| `$OPENCLAW_STATE_DIR/credentials/oauth.json` | Legacy OAuth import (copied into auth profiles on first use) |
|
||||
| `$OPENCLAW_STATE_DIR/agents/<agentId>/agent/auth-profiles.json` | Auth profiles (OAuth, API keys, and optional `keyRef`/`tokenRef`) |
|
||||
| `$OPENCLAW_STATE_DIR/secrets.json` | Optional file-backed secret payload for `file` SecretRef providers |
|
||||
| `$OPENCLAW_STATE_DIR/backups/secrets-migrate/` | Optional migration rollback backups + manifests |
|
||||
| `$OPENCLAW_STATE_DIR/agents/<agentId>/agent/auth.json` | Legacy compatibility file (static `api_key` entries scrubbed) |
|
||||
| `$OPENCLAW_STATE_DIR/credentials/` | Provider state (e.g. `whatsapp/<accountId>/creds.json`) |
|
||||
| `$OPENCLAW_STATE_DIR/agents/` | Per-agent state (agentDir + sessions) |
|
||||
@@ -1340,7 +1339,7 @@ Put your **agent workspace** in a **private** git repo and back it up somewhere
|
||||
private (for example GitHub private). This captures memory + AGENTS/SOUL/USER
|
||||
files, and lets you restore the assistant's "mind" later.
|
||||
|
||||
Do **not** commit anything under `~/.openclaw` (credentials, sessions, tokens, encrypted secrets payloads, or migration backups).
|
||||
Do **not** commit anything under `~/.openclaw` (credentials, sessions, tokens, or encrypted secrets payloads).
|
||||
If you need a full restore, back up both the workspace and the state directory
|
||||
separately (see the migration question above).
|
||||
|
||||
|
||||
@@ -135,7 +135,6 @@ Use this when debugging auth or deciding what to back up:
|
||||
- `~/.openclaw/credentials/<channel>-<accountId>-allowFrom.json` (non-default accounts)
|
||||
- **Model auth profiles**: `~/.openclaw/agents/<agentId>/agent/auth-profiles.json`
|
||||
- **File-backed secrets payload (optional)**: `~/.openclaw/secrets.json`
|
||||
- **Secrets migration backups (optional)**: `~/.openclaw/backups/secrets-migrate/<backupId>/`
|
||||
- **Legacy OAuth import**: `~/.openclaw/credentials/oauth.json`
|
||||
More detail: [Security](/gateway/security#credential-storage-map).
|
||||
|
||||
|
||||
@@ -3,8 +3,11 @@ import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { createCliRuntimeCapture } from "./test-runtime-capture.js";
|
||||
|
||||
const callGatewayFromCli = vi.fn();
|
||||
const runSecretsMigration = vi.fn();
|
||||
const rollbackSecretsMigration = vi.fn();
|
||||
const runSecretsAudit = vi.fn();
|
||||
const resolveSecretsAuditExitCode = vi.fn();
|
||||
const runSecretsConfigureInteractive = vi.fn();
|
||||
const runSecretsApply = vi.fn();
|
||||
const confirm = vi.fn();
|
||||
|
||||
const { defaultRuntime, runtimeLogs, runtimeErrors, resetRuntimeCapture } =
|
||||
createCliRuntimeCapture();
|
||||
@@ -19,9 +22,22 @@ vi.mock("../runtime.js", () => ({
|
||||
defaultRuntime,
|
||||
}));
|
||||
|
||||
vi.mock("../secrets/migrate.js", () => ({
|
||||
runSecretsMigration: (options: unknown) => runSecretsMigration(options),
|
||||
rollbackSecretsMigration: (options: unknown) => rollbackSecretsMigration(options),
|
||||
vi.mock("../secrets/audit.js", () => ({
|
||||
runSecretsAudit: () => runSecretsAudit(),
|
||||
resolveSecretsAuditExitCode: (report: unknown, check: boolean) =>
|
||||
resolveSecretsAuditExitCode(report, check),
|
||||
}));
|
||||
|
||||
vi.mock("../secrets/configure.js", () => ({
|
||||
runSecretsConfigureInteractive: () => runSecretsConfigureInteractive(),
|
||||
}));
|
||||
|
||||
vi.mock("../secrets/apply.js", () => ({
|
||||
runSecretsApply: (options: unknown) => runSecretsApply(options),
|
||||
}));
|
||||
|
||||
vi.mock("@clack/prompts", () => ({
|
||||
confirm: (options: unknown) => confirm(options),
|
||||
}));
|
||||
|
||||
const { registerSecretsCli } = await import("./secrets-cli.js");
|
||||
@@ -37,8 +53,11 @@ describe("secrets CLI", () => {
|
||||
beforeEach(() => {
|
||||
resetRuntimeCapture();
|
||||
callGatewayFromCli.mockReset();
|
||||
runSecretsMigration.mockReset();
|
||||
rollbackSecretsMigration.mockReset();
|
||||
runSecretsAudit.mockReset();
|
||||
resolveSecretsAuditExitCode.mockReset();
|
||||
runSecretsConfigureInteractive.mockReset();
|
||||
runSecretsApply.mockReset();
|
||||
confirm.mockReset();
|
||||
});
|
||||
|
||||
it("calls secrets.reload and prints human output", async () => {
|
||||
@@ -60,37 +79,57 @@ describe("secrets CLI", () => {
|
||||
expect(runtimeLogs.at(-1)).toContain('"ok": true');
|
||||
});
|
||||
|
||||
it("runs secrets migrate as dry-run by default", async () => {
|
||||
runSecretsMigration.mockResolvedValue({
|
||||
mode: "dry-run",
|
||||
changed: true,
|
||||
secretsFilePath: "/tmp/secrets.enc.json",
|
||||
counters: { secretsWritten: 3 },
|
||||
changedFiles: ["/tmp/openclaw.json"],
|
||||
it("runs secrets audit and exits via check code", async () => {
|
||||
runSecretsAudit.mockResolvedValue({
|
||||
version: 1,
|
||||
status: "findings",
|
||||
filesScanned: [],
|
||||
summary: {
|
||||
plaintextCount: 1,
|
||||
unresolvedRefCount: 0,
|
||||
shadowedRefCount: 0,
|
||||
legacyResidueCount: 0,
|
||||
},
|
||||
findings: [],
|
||||
});
|
||||
resolveSecretsAuditExitCode.mockReturnValue(1);
|
||||
|
||||
await createProgram().parseAsync(["secrets", "migrate"], { from: "user" });
|
||||
|
||||
expect(runSecretsMigration).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ write: false, scrubEnv: true }),
|
||||
);
|
||||
expect(runtimeLogs.at(-1)).toContain("dry run");
|
||||
await expect(
|
||||
createProgram().parseAsync(["secrets", "audit", "--check"], { from: "user" }),
|
||||
).rejects.toBeTruthy();
|
||||
expect(runSecretsAudit).toHaveBeenCalled();
|
||||
expect(resolveSecretsAuditExitCode).toHaveBeenCalledWith(expect.anything(), true);
|
||||
});
|
||||
|
||||
it("runs rollback when --rollback is provided", async () => {
|
||||
rollbackSecretsMigration.mockResolvedValue({
|
||||
backupId: "20260221T010203Z",
|
||||
restoredFiles: 2,
|
||||
deletedFiles: 1,
|
||||
it("runs secrets configure then apply when confirmed", async () => {
|
||||
runSecretsConfigureInteractive.mockResolvedValue({
|
||||
plan: {
|
||||
version: 1,
|
||||
protocolVersion: 1,
|
||||
generatedAt: "2026-02-26T00:00:00.000Z",
|
||||
generatedBy: "openclaw secrets configure",
|
||||
targets: [],
|
||||
},
|
||||
preflight: {
|
||||
mode: "dry-run",
|
||||
changed: true,
|
||||
changedFiles: ["/tmp/openclaw.json"],
|
||||
warningCount: 0,
|
||||
warnings: [],
|
||||
},
|
||||
});
|
||||
confirm.mockResolvedValue(true);
|
||||
runSecretsApply.mockResolvedValue({
|
||||
mode: "write",
|
||||
changed: true,
|
||||
changedFiles: ["/tmp/openclaw.json"],
|
||||
warningCount: 0,
|
||||
warnings: [],
|
||||
});
|
||||
|
||||
await createProgram().parseAsync(["secrets", "migrate", "--rollback", "20260221T010203Z"], {
|
||||
from: "user",
|
||||
});
|
||||
|
||||
expect(rollbackSecretsMigration).toHaveBeenCalledWith({
|
||||
backupId: "20260221T010203Z",
|
||||
});
|
||||
expect(runtimeLogs.at(-1)).toContain("rollback complete");
|
||||
await createProgram().parseAsync(["secrets", "configure"], { from: "user" });
|
||||
expect(runSecretsConfigureInteractive).toHaveBeenCalled();
|
||||
expect(runSecretsApply).toHaveBeenCalledWith(expect.objectContaining({ write: true }));
|
||||
expect(runtimeLogs.at(-1)).toContain("Secrets applied");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,58 +1,40 @@
|
||||
import fs from "node:fs";
|
||||
import { confirm } from "@clack/prompts";
|
||||
import type { Command } from "commander";
|
||||
import { danger } from "../globals.js";
|
||||
import { defaultRuntime } from "../runtime.js";
|
||||
import {
|
||||
rollbackSecretsMigration,
|
||||
runSecretsMigration,
|
||||
type SecretsMigrationRollbackResult,
|
||||
type SecretsMigrationRunResult,
|
||||
} from "../secrets/migrate.js";
|
||||
import { runSecretsApply } from "../secrets/apply.js";
|
||||
import { resolveSecretsAuditExitCode, runSecretsAudit } from "../secrets/audit.js";
|
||||
import { runSecretsConfigureInteractive } from "../secrets/configure.js";
|
||||
import { isSecretsApplyPlan, type SecretsApplyPlan } from "../secrets/plan.js";
|
||||
import { formatDocsLink } from "../terminal/links.js";
|
||||
import { theme } from "../terminal/theme.js";
|
||||
import { addGatewayClientOptions, callGatewayFromCli, type GatewayRpcOpts } from "./gateway-rpc.js";
|
||||
|
||||
type SecretsReloadOptions = GatewayRpcOpts & { json?: boolean };
|
||||
type SecretsMigrateOptions = {
|
||||
write?: boolean;
|
||||
rollback?: string;
|
||||
scrubEnv?: boolean;
|
||||
type SecretsAuditOptions = {
|
||||
check?: boolean;
|
||||
json?: boolean;
|
||||
};
|
||||
type SecretsConfigureOptions = {
|
||||
apply?: boolean;
|
||||
yes?: boolean;
|
||||
planOut?: string;
|
||||
json?: boolean;
|
||||
};
|
||||
type SecretsApplyOptions = {
|
||||
from: string;
|
||||
dryRun?: boolean;
|
||||
json?: boolean;
|
||||
};
|
||||
|
||||
function printMigrationResult(
|
||||
result: SecretsMigrationRunResult | SecretsMigrationRollbackResult,
|
||||
json: boolean,
|
||||
): void {
|
||||
if (json) {
|
||||
defaultRuntime.log(JSON.stringify(result, null, 2));
|
||||
return;
|
||||
function readPlanFile(pathname: string): SecretsApplyPlan {
|
||||
const raw = fs.readFileSync(pathname, "utf8");
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
if (!isSecretsApplyPlan(parsed)) {
|
||||
throw new Error(`Invalid secrets plan file: ${pathname}`);
|
||||
}
|
||||
|
||||
if ("restoredFiles" in result) {
|
||||
defaultRuntime.log(
|
||||
`Secrets rollback complete for backup ${result.backupId}. Restored ${result.restoredFiles} file(s), deleted ${result.deletedFiles} file(s).`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.mode === "dry-run") {
|
||||
if (!result.changed) {
|
||||
defaultRuntime.log("Secrets migrate dry run: no changes needed.");
|
||||
return;
|
||||
}
|
||||
defaultRuntime.log(
|
||||
`Secrets migrate dry run: ${result.changedFiles.length} file(s) would change, ${result.counters.secretsWritten} secret value(s) would move to ${result.secretsFilePath}.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!result.changed) {
|
||||
defaultRuntime.log("Secrets migrate: no changes applied.");
|
||||
return;
|
||||
}
|
||||
defaultRuntime.log(
|
||||
`Secrets migrated. Backup: ${result.backupId}. Moved ${result.counters.secretsWritten} secret value(s) into ${result.secretsFilePath}.`,
|
||||
);
|
||||
return parsed;
|
||||
}
|
||||
|
||||
export function registerSecretsCli(program: Command) {
|
||||
@@ -94,25 +76,152 @@ export function registerSecretsCli(program: Command) {
|
||||
});
|
||||
|
||||
secrets
|
||||
.command("migrate")
|
||||
.description("Migrate plaintext secrets to file-backed SecretRefs")
|
||||
.option("--write", "Apply migration changes (default is dry-run)", false)
|
||||
.option("--rollback <backup-id>", "Rollback a previous migration backup id")
|
||||
.option("--no-scrub-env", "Keep matching plaintext values in ~/.openclaw/.env")
|
||||
.command("audit")
|
||||
.description("Audit plaintext secrets, unresolved refs, and precedence drift")
|
||||
.option("--check", "Exit non-zero when findings are present", false)
|
||||
.option("--json", "Output JSON", false)
|
||||
.action(async (opts: SecretsMigrateOptions) => {
|
||||
.action(async (opts: SecretsAuditOptions) => {
|
||||
try {
|
||||
if (typeof opts.rollback === "string" && opts.rollback.trim()) {
|
||||
const result = await rollbackSecretsMigration({ backupId: opts.rollback.trim() });
|
||||
printMigrationResult(result, Boolean(opts.json));
|
||||
return;
|
||||
const report = await runSecretsAudit();
|
||||
if (opts.json) {
|
||||
defaultRuntime.log(JSON.stringify(report, null, 2));
|
||||
} else {
|
||||
defaultRuntime.log(
|
||||
`Secrets audit: ${report.status}. plaintext=${report.summary.plaintextCount}, unresolved=${report.summary.unresolvedRefCount}, shadowed=${report.summary.shadowedRefCount}, legacy=${report.summary.legacyResidueCount}.`,
|
||||
);
|
||||
if (report.findings.length > 0) {
|
||||
for (const finding of report.findings.slice(0, 20)) {
|
||||
defaultRuntime.log(
|
||||
`- [${finding.code}] ${finding.file}:${finding.jsonPath} ${finding.message}`,
|
||||
);
|
||||
}
|
||||
if (report.findings.length > 20) {
|
||||
defaultRuntime.log(`... ${report.findings.length - 20} more finding(s).`);
|
||||
}
|
||||
}
|
||||
}
|
||||
const exitCode = resolveSecretsAuditExitCode(report, Boolean(opts.check));
|
||||
if (exitCode !== 0) {
|
||||
defaultRuntime.exit(exitCode);
|
||||
}
|
||||
} catch (err) {
|
||||
defaultRuntime.error(danger(String(err)));
|
||||
defaultRuntime.exit(2);
|
||||
}
|
||||
});
|
||||
|
||||
secrets
|
||||
.command("configure")
|
||||
.description("Interactive SecretRef helper with preflight validation")
|
||||
.option("--apply", "Apply changes immediately after preflight", false)
|
||||
.option("--yes", "Skip apply confirmation prompt", false)
|
||||
.option("--plan-out <path>", "Write generated plan JSON to a file")
|
||||
.option("--json", "Output JSON", false)
|
||||
.action(async (opts: SecretsConfigureOptions) => {
|
||||
try {
|
||||
const configured = await runSecretsConfigureInteractive();
|
||||
if (opts.planOut) {
|
||||
fs.writeFileSync(opts.planOut, `${JSON.stringify(configured.plan, null, 2)}\n`, "utf8");
|
||||
}
|
||||
if (opts.json) {
|
||||
defaultRuntime.log(
|
||||
JSON.stringify(
|
||||
{
|
||||
plan: configured.plan,
|
||||
preflight: configured.preflight,
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
defaultRuntime.log(
|
||||
`Preflight: changed=${configured.preflight.changed}, files=${configured.preflight.changedFiles.length}, warnings=${configured.preflight.warningCount}.`,
|
||||
);
|
||||
if (configured.preflight.warningCount > 0) {
|
||||
for (const warning of configured.preflight.warnings) {
|
||||
defaultRuntime.log(`- warning: ${warning}`);
|
||||
}
|
||||
}
|
||||
defaultRuntime.log(`Plan targets: ${configured.plan.targets.length}`);
|
||||
if (opts.planOut) {
|
||||
defaultRuntime.log(`Plan written to ${opts.planOut}`);
|
||||
}
|
||||
}
|
||||
|
||||
const result = await runSecretsMigration({
|
||||
write: Boolean(opts.write),
|
||||
scrubEnv: opts.scrubEnv ?? true,
|
||||
let shouldApply = Boolean(opts.apply);
|
||||
if (!shouldApply && !opts.json) {
|
||||
const approved = await confirm({
|
||||
message: "Apply this plan now?",
|
||||
initialValue: true,
|
||||
});
|
||||
if (typeof approved === "boolean") {
|
||||
shouldApply = approved;
|
||||
}
|
||||
}
|
||||
if (shouldApply) {
|
||||
const needsIrreversiblePrompt = Boolean(opts.apply);
|
||||
if (needsIrreversiblePrompt && !opts.yes && !opts.json) {
|
||||
const confirmed = await confirm({
|
||||
message:
|
||||
"This migration is one-way for migrated plaintext values. Continue with apply?",
|
||||
initialValue: true,
|
||||
});
|
||||
if (confirmed !== true) {
|
||||
defaultRuntime.log("Apply cancelled.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
const result = await runSecretsApply({
|
||||
plan: configured.plan,
|
||||
write: true,
|
||||
});
|
||||
if (opts.json) {
|
||||
defaultRuntime.log(JSON.stringify(result, null, 2));
|
||||
return;
|
||||
}
|
||||
defaultRuntime.log(
|
||||
result.changed
|
||||
? `Secrets applied. Updated ${result.changedFiles.length} file(s).`
|
||||
: "Secrets apply: no changes.",
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
defaultRuntime.error(danger(String(err)));
|
||||
defaultRuntime.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
secrets
|
||||
.command("apply")
|
||||
.description("Apply a previously generated secrets plan")
|
||||
.requiredOption("--from <path>", "Path to plan JSON")
|
||||
.option("--dry-run", "Validate/preflight only", false)
|
||||
.option("--json", "Output JSON", false)
|
||||
.action(async (opts: SecretsApplyOptions) => {
|
||||
try {
|
||||
const plan = readPlanFile(opts.from);
|
||||
const result = await runSecretsApply({
|
||||
plan,
|
||||
write: !opts.dryRun,
|
||||
});
|
||||
printMigrationResult(result, Boolean(opts.json));
|
||||
if (opts.json) {
|
||||
defaultRuntime.log(JSON.stringify(result, null, 2));
|
||||
return;
|
||||
}
|
||||
if (opts.dryRun) {
|
||||
defaultRuntime.log(
|
||||
result.changed
|
||||
? `Secrets apply dry run: ${result.changedFiles.length} file(s) would change.`
|
||||
: "Secrets apply dry run: no changes.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
defaultRuntime.log(
|
||||
result.changed
|
||||
? `Secrets applied. Updated ${result.changedFiles.length} file(s).`
|
||||
: "Secrets apply: no changes.",
|
||||
);
|
||||
} catch (err) {
|
||||
defaultRuntime.error(danger(String(err)));
|
||||
defaultRuntime.exit(1);
|
||||
|
||||
149
src/secrets/apply.test.ts
Normal file
149
src/secrets/apply.test.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
import { runSecretsApply } from "./apply.js";
|
||||
import type { SecretsApplyPlan } from "./plan.js";
|
||||
|
||||
describe("secrets apply", () => {
|
||||
let rootDir = "";
|
||||
let stateDir = "";
|
||||
let configPath = "";
|
||||
let authStorePath = "";
|
||||
let authJsonPath = "";
|
||||
let envPath = "";
|
||||
let env: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(async () => {
|
||||
rootDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-secrets-apply-"));
|
||||
stateDir = path.join(rootDir, ".openclaw");
|
||||
configPath = path.join(stateDir, "openclaw.json");
|
||||
authStorePath = path.join(stateDir, "agents", "main", "agent", "auth-profiles.json");
|
||||
authJsonPath = path.join(stateDir, "agents", "main", "agent", "auth.json");
|
||||
envPath = path.join(stateDir, ".env");
|
||||
env = {
|
||||
...process.env,
|
||||
OPENCLAW_STATE_DIR: stateDir,
|
||||
OPENCLAW_CONFIG_PATH: configPath,
|
||||
OPENAI_API_KEY: "sk-live-env",
|
||||
};
|
||||
|
||||
await fs.mkdir(path.dirname(configPath), { recursive: true });
|
||||
await fs.mkdir(path.dirname(authStorePath), { recursive: true });
|
||||
|
||||
await fs.writeFile(
|
||||
configPath,
|
||||
`${JSON.stringify(
|
||||
{
|
||||
models: {
|
||||
providers: {
|
||||
openai: {
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
api: "openai-completions",
|
||||
apiKey: "sk-openai-plaintext",
|
||||
models: [{ id: "gpt-5", name: "gpt-5" }],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
await fs.writeFile(
|
||||
authStorePath,
|
||||
`${JSON.stringify(
|
||||
{
|
||||
version: 1,
|
||||
profiles: {
|
||||
"openai:default": {
|
||||
type: "api_key",
|
||||
provider: "openai",
|
||||
key: "sk-openai-plaintext",
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
await fs.writeFile(
|
||||
authJsonPath,
|
||||
`${JSON.stringify(
|
||||
{
|
||||
openai: {
|
||||
type: "api_key",
|
||||
key: "sk-openai-plaintext",
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
await fs.writeFile(envPath, "OPENAI_API_KEY=sk-openai-plaintext\nUNRELATED=value\n", "utf8");
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(rootDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("preflights and applies one-way scrub without plaintext backups", async () => {
|
||||
const plan: SecretsApplyPlan = {
|
||||
version: 1,
|
||||
protocolVersion: 1,
|
||||
generatedAt: new Date().toISOString(),
|
||||
generatedBy: "manual",
|
||||
targets: [
|
||||
{
|
||||
type: "models.providers.apiKey",
|
||||
path: "models.providers.openai.apiKey",
|
||||
providerId: "openai",
|
||||
ref: { source: "env", provider: "default", id: "OPENAI_API_KEY" },
|
||||
},
|
||||
],
|
||||
options: {
|
||||
scrubEnv: true,
|
||||
scrubAuthProfilesForProviderTargets: true,
|
||||
scrubLegacyAuthJson: true,
|
||||
},
|
||||
};
|
||||
|
||||
const dryRun = await runSecretsApply({ plan, env, write: false });
|
||||
expect(dryRun.mode).toBe("dry-run");
|
||||
expect(dryRun.changed).toBe(true);
|
||||
|
||||
const applied = await runSecretsApply({ plan, env, write: true });
|
||||
expect(applied.mode).toBe("write");
|
||||
expect(applied.changed).toBe(true);
|
||||
|
||||
const nextConfig = JSON.parse(await fs.readFile(configPath, "utf8")) as {
|
||||
models: { providers: { openai: { apiKey: unknown } } };
|
||||
};
|
||||
expect(nextConfig.models.providers.openai.apiKey).toEqual({
|
||||
source: "env",
|
||||
provider: "default",
|
||||
id: "OPENAI_API_KEY",
|
||||
});
|
||||
|
||||
const nextAuthStore = JSON.parse(await fs.readFile(authStorePath, "utf8")) as {
|
||||
profiles: { "openai:default": { key?: string; keyRef?: unknown } };
|
||||
};
|
||||
expect(nextAuthStore.profiles["openai:default"].key).toBeUndefined();
|
||||
expect(nextAuthStore.profiles["openai:default"].keyRef).toBeUndefined();
|
||||
|
||||
const nextAuthJson = JSON.parse(await fs.readFile(authJsonPath, "utf8")) as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
expect(nextAuthJson.openai).toBeUndefined();
|
||||
|
||||
const nextEnv = await fs.readFile(envPath, "utf8");
|
||||
expect(nextEnv).not.toContain("sk-openai-plaintext");
|
||||
expect(nextEnv).toContain("UNRELATED=value");
|
||||
});
|
||||
});
|
||||
493
src/secrets/apply.ts
Normal file
493
src/secrets/apply.ts
Normal file
@@ -0,0 +1,493 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { listAgentIds, resolveAgentDir } from "../agents/agent-scope.js";
|
||||
import { loadAuthProfileStoreForSecretsRuntime } from "../agents/auth-profiles.js";
|
||||
import { resolveAuthStorePath } from "../agents/auth-profiles/paths.js";
|
||||
import { normalizeProviderId } from "../agents/model-selection.js";
|
||||
import { resolveStateDir, type OpenClawConfig } from "../config/config.js";
|
||||
import type { ConfigWriteOptions } from "../config/io.js";
|
||||
import { resolveConfigDir, resolveUserPath } from "../utils.js";
|
||||
import { createSecretsConfigIO } from "./config-io.js";
|
||||
import { type SecretsApplyPlan, normalizeSecretsPlanOptions } from "./plan.js";
|
||||
import { listKnownSecretEnvVarNames } from "./provider-env-vars.js";
|
||||
import { resolveSecretRefValue } from "./resolve.js";
|
||||
import { prepareSecretsRuntimeSnapshot } from "./runtime.js";
|
||||
import { isNonEmptyString, isRecord, writeTextFileAtomic } from "./shared.js";
|
||||
|
||||
type FileSnapshot = {
|
||||
existed: boolean;
|
||||
content: string;
|
||||
mode: number;
|
||||
};
|
||||
|
||||
type ApplyWrite = {
|
||||
path: string;
|
||||
content: string;
|
||||
mode: number;
|
||||
};
|
||||
|
||||
type ProjectedState = {
|
||||
nextConfig: OpenClawConfig;
|
||||
configPath: string;
|
||||
configWriteOptions: ConfigWriteOptions;
|
||||
authStoreByPath: Map<string, Record<string, unknown>>;
|
||||
authJsonByPath: Map<string, Record<string, unknown>>;
|
||||
envRawByPath: Map<string, string>;
|
||||
changedFiles: Set<string>;
|
||||
warnings: string[];
|
||||
};
|
||||
|
||||
export type SecretsApplyResult = {
|
||||
mode: "dry-run" | "write";
|
||||
changed: boolean;
|
||||
changedFiles: string[];
|
||||
warningCount: number;
|
||||
warnings: string[];
|
||||
};
|
||||
|
||||
function parseDotPath(pathname: string): string[] {
|
||||
return pathname.split(".").filter(Boolean);
|
||||
}
|
||||
|
||||
function getByDotPath(root: unknown, pathLabel: string): unknown {
|
||||
const segments = parseDotPath(pathLabel);
|
||||
let cursor: unknown = root;
|
||||
for (const segment of segments) {
|
||||
if (!isRecord(cursor)) {
|
||||
return undefined;
|
||||
}
|
||||
cursor = cursor[segment];
|
||||
}
|
||||
return cursor;
|
||||
}
|
||||
|
||||
function setByDotPath(root: OpenClawConfig, pathLabel: string, value: unknown): void {
|
||||
const segments = parseDotPath(pathLabel);
|
||||
if (segments.length === 0) {
|
||||
throw new Error("Target path is empty.");
|
||||
}
|
||||
let cursor: Record<string, unknown> = root as unknown as Record<string, unknown>;
|
||||
for (const segment of segments.slice(0, -1)) {
|
||||
const existing = cursor[segment];
|
||||
if (!isRecord(existing)) {
|
||||
cursor[segment] = {};
|
||||
}
|
||||
cursor = cursor[segment] as Record<string, unknown>;
|
||||
}
|
||||
cursor[segments[segments.length - 1]] = value;
|
||||
}
|
||||
|
||||
function deleteByDotPath(root: OpenClawConfig, pathLabel: string): void {
|
||||
const segments = parseDotPath(pathLabel);
|
||||
if (segments.length === 0) {
|
||||
return;
|
||||
}
|
||||
let cursor: Record<string, unknown> = root as unknown as Record<string, unknown>;
|
||||
for (const segment of segments.slice(0, -1)) {
|
||||
const existing = cursor[segment];
|
||||
if (!isRecord(existing)) {
|
||||
return;
|
||||
}
|
||||
cursor = existing;
|
||||
}
|
||||
delete cursor[segments[segments.length - 1]];
|
||||
}
|
||||
|
||||
function parseEnvValue(raw: string): string {
|
||||
const trimmed = raw.trim();
|
||||
if (
|
||||
(trimmed.startsWith('"') && trimmed.endsWith('"')) ||
|
||||
(trimmed.startsWith("'") && trimmed.endsWith("'"))
|
||||
) {
|
||||
return trimmed.slice(1, -1);
|
||||
}
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
function scrubEnvRaw(
|
||||
raw: string,
|
||||
migratedValues: Set<string>,
|
||||
allowedEnvKeys: Set<string>,
|
||||
): {
|
||||
nextRaw: string;
|
||||
removed: number;
|
||||
} {
|
||||
if (migratedValues.size === 0 || allowedEnvKeys.size === 0) {
|
||||
return { nextRaw: raw, removed: 0 };
|
||||
}
|
||||
const lines = raw.split(/\r?\n/);
|
||||
const nextLines: string[] = [];
|
||||
let removed = 0;
|
||||
for (const line of lines) {
|
||||
const match = line.match(/^\s*(?:export\s+)?([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)$/);
|
||||
if (!match) {
|
||||
nextLines.push(line);
|
||||
continue;
|
||||
}
|
||||
const envKey = match[1] ?? "";
|
||||
if (!allowedEnvKeys.has(envKey)) {
|
||||
nextLines.push(line);
|
||||
continue;
|
||||
}
|
||||
const parsedValue = parseEnvValue(match[2] ?? "");
|
||||
if (migratedValues.has(parsedValue)) {
|
||||
removed += 1;
|
||||
continue;
|
||||
}
|
||||
nextLines.push(line);
|
||||
}
|
||||
const hadTrailingNewline = raw.endsWith("\n");
|
||||
const joined = nextLines.join("\n");
|
||||
return {
|
||||
nextRaw:
|
||||
hadTrailingNewline || joined.length === 0
|
||||
? `${joined}${joined.endsWith("\n") ? "" : "\n"}`
|
||||
: joined,
|
||||
removed,
|
||||
};
|
||||
}
|
||||
|
||||
function collectAuthStorePaths(config: OpenClawConfig, stateDir: string): string[] {
|
||||
const paths = new Set<string>();
|
||||
paths.add(resolveUserPath(resolveAuthStorePath()));
|
||||
|
||||
const agentsRoot = path.join(resolveUserPath(stateDir), "agents");
|
||||
if (fs.existsSync(agentsRoot)) {
|
||||
for (const entry of fs.readdirSync(agentsRoot, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
paths.add(path.join(agentsRoot, entry.name, "agent", "auth-profiles.json"));
|
||||
}
|
||||
}
|
||||
|
||||
for (const agentId of listAgentIds(config)) {
|
||||
const agentDir = resolveAgentDir(config, agentId);
|
||||
paths.add(resolveUserPath(resolveAuthStorePath(agentDir)));
|
||||
}
|
||||
|
||||
return [...paths];
|
||||
}
|
||||
|
||||
function collectAuthJsonPaths(stateDir: string): string[] {
|
||||
const out: string[] = [];
|
||||
const agentsRoot = path.join(resolveUserPath(stateDir), "agents");
|
||||
if (!fs.existsSync(agentsRoot)) {
|
||||
return out;
|
||||
}
|
||||
for (const entry of fs.readdirSync(agentsRoot, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const candidate = path.join(agentsRoot, entry.name, "agent", "auth.json");
|
||||
if (fs.existsSync(candidate)) {
|
||||
out.push(candidate);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function resolveGoogleChatRefPath(pathLabel: string): string {
|
||||
if (pathLabel.endsWith(".serviceAccount")) {
|
||||
return `${pathLabel}Ref`;
|
||||
}
|
||||
throw new Error(`Google Chat target path must end with ".serviceAccount": ${pathLabel}`);
|
||||
}
|
||||
|
||||
async function projectPlanState(params: {
|
||||
plan: SecretsApplyPlan;
|
||||
env: NodeJS.ProcessEnv;
|
||||
}): Promise<ProjectedState> {
|
||||
const io = createSecretsConfigIO({ env: params.env });
|
||||
const { snapshot, writeOptions } = await io.readConfigFileSnapshotForWrite();
|
||||
if (!snapshot.valid) {
|
||||
throw new Error("Cannot apply secrets plan: config is invalid.");
|
||||
}
|
||||
const options = normalizeSecretsPlanOptions(params.plan.options);
|
||||
const nextConfig = structuredClone(snapshot.config);
|
||||
const stateDir = resolveStateDir(params.env, os.homedir);
|
||||
const changedFiles = new Set<string>();
|
||||
const warnings: string[] = [];
|
||||
const scrubbedValues = new Set<string>();
|
||||
const providerTargets = new Set<string>();
|
||||
|
||||
for (const target of params.plan.targets) {
|
||||
if (target.type === "channels.googlechat.serviceAccount") {
|
||||
const previous = getByDotPath(nextConfig, target.path);
|
||||
if (isNonEmptyString(previous)) {
|
||||
scrubbedValues.add(previous.trim());
|
||||
}
|
||||
const refPath = resolveGoogleChatRefPath(target.path);
|
||||
setByDotPath(nextConfig, refPath, target.ref);
|
||||
deleteByDotPath(nextConfig, target.path);
|
||||
changedFiles.add(resolveUserPath(snapshot.path));
|
||||
continue;
|
||||
}
|
||||
|
||||
const previous = getByDotPath(nextConfig, target.path);
|
||||
if (isNonEmptyString(previous)) {
|
||||
scrubbedValues.add(previous.trim());
|
||||
}
|
||||
setByDotPath(nextConfig, target.path, target.ref);
|
||||
changedFiles.add(resolveUserPath(snapshot.path));
|
||||
if (target.type === "models.providers.apiKey" && target.providerId) {
|
||||
providerTargets.add(normalizeProviderId(target.providerId));
|
||||
}
|
||||
}
|
||||
|
||||
const authStoreByPath = new Map<string, Record<string, unknown>>();
|
||||
if (options.scrubAuthProfilesForProviderTargets && providerTargets.size > 0) {
|
||||
for (const authStorePath of collectAuthStorePaths(nextConfig, stateDir)) {
|
||||
if (!fs.existsSync(authStorePath)) {
|
||||
continue;
|
||||
}
|
||||
const raw = fs.readFileSync(authStorePath, "utf8");
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
if (!isRecord(parsed) || !isRecord(parsed.profiles)) {
|
||||
continue;
|
||||
}
|
||||
const nextStore = structuredClone(parsed) as Record<string, unknown> & {
|
||||
profiles: Record<string, unknown>;
|
||||
};
|
||||
let mutated = false;
|
||||
for (const profileValue of Object.values(nextStore.profiles)) {
|
||||
if (!isRecord(profileValue) || !isNonEmptyString(profileValue.provider)) {
|
||||
continue;
|
||||
}
|
||||
const provider = normalizeProviderId(String(profileValue.provider));
|
||||
if (!providerTargets.has(provider)) {
|
||||
continue;
|
||||
}
|
||||
if (profileValue.type === "api_key") {
|
||||
if (isNonEmptyString(profileValue.key)) {
|
||||
scrubbedValues.add(profileValue.key.trim());
|
||||
}
|
||||
if ("key" in profileValue) {
|
||||
delete profileValue.key;
|
||||
mutated = true;
|
||||
}
|
||||
if ("keyRef" in profileValue) {
|
||||
delete profileValue.keyRef;
|
||||
mutated = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (profileValue.type === "token") {
|
||||
if (isNonEmptyString(profileValue.token)) {
|
||||
scrubbedValues.add(profileValue.token.trim());
|
||||
}
|
||||
if ("token" in profileValue) {
|
||||
delete profileValue.token;
|
||||
mutated = true;
|
||||
}
|
||||
if ("tokenRef" in profileValue) {
|
||||
delete profileValue.tokenRef;
|
||||
mutated = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (profileValue.type === "oauth") {
|
||||
warnings.push(
|
||||
`Provider "${provider}" has OAuth credentials in ${authStorePath}; those still take precedence and are out of scope for static SecretRef migration.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (mutated) {
|
||||
authStoreByPath.set(authStorePath, nextStore);
|
||||
changedFiles.add(authStorePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const authJsonByPath = new Map<string, Record<string, unknown>>();
|
||||
if (options.scrubLegacyAuthJson) {
|
||||
for (const authJsonPath of collectAuthJsonPaths(stateDir)) {
|
||||
const raw = fs.readFileSync(authJsonPath, "utf8");
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
if (!isRecord(parsed)) {
|
||||
continue;
|
||||
}
|
||||
let mutated = false;
|
||||
const nextParsed = structuredClone(parsed);
|
||||
for (const [providerId, value] of Object.entries(nextParsed)) {
|
||||
if (!isRecord(value)) {
|
||||
continue;
|
||||
}
|
||||
if (value.type === "api_key" && isNonEmptyString(value.key)) {
|
||||
delete nextParsed[providerId];
|
||||
mutated = true;
|
||||
}
|
||||
}
|
||||
if (mutated) {
|
||||
authJsonByPath.set(authJsonPath, nextParsed);
|
||||
changedFiles.add(authJsonPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const envRawByPath = new Map<string, string>();
|
||||
if (options.scrubEnv && scrubbedValues.size > 0) {
|
||||
const envPath = path.join(resolveConfigDir(params.env, os.homedir), ".env");
|
||||
if (fs.existsSync(envPath)) {
|
||||
const current = fs.readFileSync(envPath, "utf8");
|
||||
const scrubbed = scrubEnvRaw(current, scrubbedValues, new Set(listKnownSecretEnvVarNames()));
|
||||
if (scrubbed.removed > 0 && scrubbed.nextRaw !== current) {
|
||||
envRawByPath.set(envPath, scrubbed.nextRaw);
|
||||
changedFiles.add(envPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const cache = {};
|
||||
for (const target of params.plan.targets) {
|
||||
const resolved = await resolveSecretRefValue(target.ref, {
|
||||
config: nextConfig,
|
||||
env: params.env,
|
||||
cache,
|
||||
});
|
||||
if (target.type === "channels.googlechat.serviceAccount") {
|
||||
if (!(isNonEmptyString(resolved) || isRecord(resolved))) {
|
||||
throw new Error(
|
||||
`Ref ${target.ref.source}:${target.ref.provider}:${target.ref.id} is not string/object.`,
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!isNonEmptyString(resolved)) {
|
||||
throw new Error(
|
||||
`Ref ${target.ref.source}:${target.ref.provider}:${target.ref.id} is not a non-empty string.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const authStoreLookup = new Map<string, Record<string, unknown>>();
|
||||
for (const [authStorePath, store] of authStoreByPath.entries()) {
|
||||
authStoreLookup.set(resolveUserPath(authStorePath), store);
|
||||
}
|
||||
await prepareSecretsRuntimeSnapshot({
|
||||
config: nextConfig,
|
||||
env: params.env,
|
||||
loadAuthStore: (agentDir?: string) => {
|
||||
const storePath = resolveUserPath(resolveAuthStorePath(agentDir));
|
||||
const override = authStoreLookup.get(storePath);
|
||||
if (override) {
|
||||
return structuredClone(override) as unknown as ReturnType<
|
||||
typeof loadAuthProfileStoreForSecretsRuntime
|
||||
>;
|
||||
}
|
||||
return loadAuthProfileStoreForSecretsRuntime(agentDir);
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
nextConfig,
|
||||
configPath: resolveUserPath(snapshot.path),
|
||||
configWriteOptions: writeOptions,
|
||||
authStoreByPath,
|
||||
authJsonByPath,
|
||||
envRawByPath,
|
||||
changedFiles,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
function captureFileSnapshot(pathname: string): FileSnapshot {
|
||||
if (!fs.existsSync(pathname)) {
|
||||
return { existed: false, content: "", mode: 0o600 };
|
||||
}
|
||||
const stat = fs.statSync(pathname);
|
||||
return {
|
||||
existed: true,
|
||||
content: fs.readFileSync(pathname, "utf8"),
|
||||
mode: stat.mode & 0o777,
|
||||
};
|
||||
}
|
||||
|
||||
function restoreFileSnapshot(pathname: string, snapshot: FileSnapshot): void {
|
||||
if (!snapshot.existed) {
|
||||
if (fs.existsSync(pathname)) {
|
||||
fs.rmSync(pathname, { force: true });
|
||||
}
|
||||
return;
|
||||
}
|
||||
writeTextFileAtomic(pathname, snapshot.content, snapshot.mode || 0o600);
|
||||
}
|
||||
|
||||
function toJsonWrite(pathname: string, value: Record<string, unknown>): ApplyWrite {
|
||||
return {
|
||||
path: pathname,
|
||||
content: `${JSON.stringify(value, null, 2)}\n`,
|
||||
mode: 0o600,
|
||||
};
|
||||
}
|
||||
|
||||
export async function runSecretsApply(params: {
|
||||
plan: SecretsApplyPlan;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
write?: boolean;
|
||||
}): Promise<SecretsApplyResult> {
|
||||
const env = params.env ?? process.env;
|
||||
const projected = await projectPlanState({ plan: params.plan, env });
|
||||
const changedFiles = [...projected.changedFiles].toSorted();
|
||||
if (!params.write) {
|
||||
return {
|
||||
mode: "dry-run",
|
||||
changed: changedFiles.length > 0,
|
||||
changedFiles,
|
||||
warningCount: projected.warnings.length,
|
||||
warnings: projected.warnings,
|
||||
};
|
||||
}
|
||||
|
||||
const io = createSecretsConfigIO({ env });
|
||||
const snapshots = new Map<string, FileSnapshot>();
|
||||
const capture = (pathname: string) => {
|
||||
if (!snapshots.has(pathname)) {
|
||||
snapshots.set(pathname, captureFileSnapshot(pathname));
|
||||
}
|
||||
};
|
||||
|
||||
capture(projected.configPath);
|
||||
const writes: ApplyWrite[] = [];
|
||||
for (const [pathname, value] of projected.authStoreByPath.entries()) {
|
||||
capture(pathname);
|
||||
writes.push(toJsonWrite(pathname, value));
|
||||
}
|
||||
for (const [pathname, value] of projected.authJsonByPath.entries()) {
|
||||
capture(pathname);
|
||||
writes.push(toJsonWrite(pathname, value));
|
||||
}
|
||||
for (const [pathname, raw] of projected.envRawByPath.entries()) {
|
||||
capture(pathname);
|
||||
writes.push({
|
||||
path: pathname,
|
||||
content: raw,
|
||||
mode: 0o600,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await io.writeConfigFile(projected.nextConfig, projected.configWriteOptions);
|
||||
for (const write of writes) {
|
||||
writeTextFileAtomic(write.path, write.content, write.mode);
|
||||
}
|
||||
} catch (err) {
|
||||
for (const [pathname, snapshot] of snapshots.entries()) {
|
||||
try {
|
||||
restoreFileSnapshot(pathname, snapshot);
|
||||
} catch {
|
||||
// Best effort only; preserve original error.
|
||||
}
|
||||
}
|
||||
throw new Error(`Secrets apply failed: ${String(err)}`, { cause: err });
|
||||
}
|
||||
|
||||
return {
|
||||
mode: "write",
|
||||
changed: changedFiles.length > 0,
|
||||
changedFiles,
|
||||
warningCount: projected.warnings.length,
|
||||
warnings: projected.warnings,
|
||||
};
|
||||
}
|
||||
83
src/secrets/audit.test.ts
Normal file
83
src/secrets/audit.test.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
import { runSecretsAudit } from "./audit.js";
|
||||
|
||||
describe("secrets audit", () => {
|
||||
let rootDir = "";
|
||||
let stateDir = "";
|
||||
let configPath = "";
|
||||
let authStorePath = "";
|
||||
let envPath = "";
|
||||
let env: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(async () => {
|
||||
rootDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-secrets-audit-"));
|
||||
stateDir = path.join(rootDir, ".openclaw");
|
||||
configPath = path.join(stateDir, "openclaw.json");
|
||||
authStorePath = path.join(stateDir, "agents", "main", "agent", "auth-profiles.json");
|
||||
envPath = path.join(stateDir, ".env");
|
||||
env = {
|
||||
...process.env,
|
||||
OPENCLAW_STATE_DIR: stateDir,
|
||||
OPENCLAW_CONFIG_PATH: configPath,
|
||||
OPENAI_API_KEY: "env-openai-key",
|
||||
};
|
||||
|
||||
await fs.mkdir(path.dirname(configPath), { recursive: true });
|
||||
await fs.mkdir(path.dirname(authStorePath), { recursive: true });
|
||||
await fs.writeFile(
|
||||
configPath,
|
||||
`${JSON.stringify(
|
||||
{
|
||||
models: {
|
||||
providers: {
|
||||
openai: {
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
api: "openai-completions",
|
||||
apiKey: { source: "env", provider: "default", id: "OPENAI_API_KEY" },
|
||||
models: [{ id: "gpt-5", name: "gpt-5" }],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
await fs.writeFile(
|
||||
authStorePath,
|
||||
`${JSON.stringify(
|
||||
{
|
||||
version: 1,
|
||||
profiles: {
|
||||
"openai:default": {
|
||||
type: "api_key",
|
||||
provider: "openai",
|
||||
key: "sk-openai-plaintext",
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
await fs.writeFile(envPath, "OPENAI_API_KEY=sk-openai-plaintext\n", "utf8");
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(rootDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("reports plaintext + shadowing findings", async () => {
|
||||
const report = await runSecretsAudit({ env });
|
||||
expect(report.status).toBe("findings");
|
||||
expect(report.summary.plaintextCount).toBeGreaterThan(0);
|
||||
expect(report.summary.shadowedRefCount).toBeGreaterThan(0);
|
||||
expect(report.findings.some((entry) => entry.code === "REF_SHADOWED")).toBe(true);
|
||||
expect(report.findings.some((entry) => entry.code === "PLAINTEXT_FOUND")).toBe(true);
|
||||
});
|
||||
});
|
||||
613
src/secrets/audit.ts
Normal file
613
src/secrets/audit.ts
Normal file
@@ -0,0 +1,613 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { listAgentIds, resolveAgentDir } from "../agents/agent-scope.js";
|
||||
import { resolveAuthStorePath } from "../agents/auth-profiles/paths.js";
|
||||
import { normalizeProviderId } from "../agents/model-selection.js";
|
||||
import { resolveStateDir, type OpenClawConfig } from "../config/config.js";
|
||||
import { coerceSecretRef, type SecretRef } from "../config/types.secrets.js";
|
||||
import { resolveConfigDir, resolveUserPath } from "../utils.js";
|
||||
import { createSecretsConfigIO } from "./config-io.js";
|
||||
import { listKnownSecretEnvVarNames } from "./provider-env-vars.js";
|
||||
import { resolveSecretRefValue, type SecretRefResolveCache } from "./resolve.js";
|
||||
import { isNonEmptyString, isRecord } from "./shared.js";
|
||||
|
||||
export type SecretsAuditCode =
|
||||
| "PLAINTEXT_FOUND"
|
||||
| "REF_UNRESOLVED"
|
||||
| "REF_SHADOWED"
|
||||
| "LEGACY_RESIDUE";
|
||||
|
||||
export type SecretsAuditSeverity = "info" | "warn" | "error";
|
||||
|
||||
export type SecretsAuditFinding = {
|
||||
code: SecretsAuditCode;
|
||||
severity: SecretsAuditSeverity;
|
||||
file: string;
|
||||
jsonPath: string;
|
||||
message: string;
|
||||
provider?: string;
|
||||
profileId?: string;
|
||||
};
|
||||
|
||||
export type SecretsAuditStatus = "clean" | "findings" | "unresolved";
|
||||
|
||||
export type SecretsAuditReport = {
|
||||
version: 1;
|
||||
status: SecretsAuditStatus;
|
||||
filesScanned: string[];
|
||||
summary: {
|
||||
plaintextCount: number;
|
||||
unresolvedRefCount: number;
|
||||
shadowedRefCount: number;
|
||||
legacyResidueCount: number;
|
||||
};
|
||||
findings: SecretsAuditFinding[];
|
||||
};
|
||||
|
||||
type RefAssignment = {
|
||||
file: string;
|
||||
path: string;
|
||||
ref: SecretRef;
|
||||
expected: "string" | "string-or-object";
|
||||
provider?: string;
|
||||
};
|
||||
|
||||
type ProviderAuthState = {
|
||||
hasUsableStaticOrOAuth: boolean;
|
||||
modes: Set<"api_key" | "token" | "oauth">;
|
||||
};
|
||||
|
||||
type SecretDefaults = {
|
||||
env?: string;
|
||||
file?: string;
|
||||
exec?: string;
|
||||
};
|
||||
|
||||
type AuditCollector = {
|
||||
findings: SecretsAuditFinding[];
|
||||
refAssignments: RefAssignment[];
|
||||
configProviderRefPaths: Map<string, string[]>;
|
||||
authProviderState: Map<string, ProviderAuthState>;
|
||||
filesScanned: Set<string>;
|
||||
};
|
||||
|
||||
function addFinding(collector: AuditCollector, finding: SecretsAuditFinding): void {
|
||||
collector.findings.push(finding);
|
||||
}
|
||||
|
||||
function collectProviderRefPath(
|
||||
collector: AuditCollector,
|
||||
providerId: string,
|
||||
configPath: string,
|
||||
): void {
|
||||
const key = normalizeProviderId(providerId);
|
||||
const existing = collector.configProviderRefPaths.get(key);
|
||||
if (existing) {
|
||||
existing.push(configPath);
|
||||
return;
|
||||
}
|
||||
collector.configProviderRefPaths.set(key, [configPath]);
|
||||
}
|
||||
|
||||
function trackAuthProviderState(
|
||||
collector: AuditCollector,
|
||||
provider: string,
|
||||
mode: "api_key" | "token" | "oauth",
|
||||
): void {
|
||||
const key = normalizeProviderId(provider);
|
||||
const existing = collector.authProviderState.get(key);
|
||||
if (existing) {
|
||||
existing.hasUsableStaticOrOAuth = true;
|
||||
existing.modes.add(mode);
|
||||
return;
|
||||
}
|
||||
collector.authProviderState.set(key, {
|
||||
hasUsableStaticOrOAuth: true,
|
||||
modes: new Set([mode]),
|
||||
});
|
||||
}
|
||||
|
||||
function parseDotPath(pathname: string): string[] {
|
||||
return pathname.split(".").filter(Boolean);
|
||||
}
|
||||
|
||||
function parseEnvValue(raw: string): string {
|
||||
const trimmed = raw.trim();
|
||||
if (
|
||||
(trimmed.startsWith('"') && trimmed.endsWith('"')) ||
|
||||
(trimmed.startsWith("'") && trimmed.endsWith("'"))
|
||||
) {
|
||||
return trimmed.slice(1, -1);
|
||||
}
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
function collectEnvPlaintext(params: { envPath: string; collector: AuditCollector }): void {
|
||||
if (!fs.existsSync(params.envPath)) {
|
||||
return;
|
||||
}
|
||||
params.collector.filesScanned.add(params.envPath);
|
||||
const knownKeys = new Set(listKnownSecretEnvVarNames());
|
||||
const raw = fs.readFileSync(params.envPath, "utf8");
|
||||
const lines = raw.split(/\r?\n/);
|
||||
for (const line of lines) {
|
||||
const match = line.match(/^\s*(?:export\s+)?([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)$/);
|
||||
if (!match) {
|
||||
continue;
|
||||
}
|
||||
const key = match[1] ?? "";
|
||||
if (!knownKeys.has(key)) {
|
||||
continue;
|
||||
}
|
||||
const value = parseEnvValue(match[2] ?? "");
|
||||
if (!value) {
|
||||
continue;
|
||||
}
|
||||
addFinding(params.collector, {
|
||||
code: "PLAINTEXT_FOUND",
|
||||
severity: "warn",
|
||||
file: params.envPath,
|
||||
jsonPath: `$env.${key}`,
|
||||
message: `Potential secret found in .env (${key}).`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function readJsonObject(filePath: string): Record<string, unknown> | null {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return null;
|
||||
}
|
||||
const raw = fs.readFileSync(filePath, "utf8");
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
if (!isRecord(parsed)) {
|
||||
return null;
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function collectConfigSecrets(params: {
|
||||
config: OpenClawConfig;
|
||||
configPath: string;
|
||||
collector: AuditCollector;
|
||||
}): void {
|
||||
const defaults = params.config.secrets?.defaults;
|
||||
const providers = params.config.models?.providers as
|
||||
| Record<string, { apiKey?: unknown }>
|
||||
| undefined;
|
||||
if (providers) {
|
||||
for (const [providerId, provider] of Object.entries(providers)) {
|
||||
const pathLabel = `models.providers.${providerId}.apiKey`;
|
||||
const ref = coerceSecretRef(provider.apiKey, defaults);
|
||||
if (ref) {
|
||||
params.collector.refAssignments.push({
|
||||
file: params.configPath,
|
||||
path: pathLabel,
|
||||
ref,
|
||||
expected: "string",
|
||||
provider: providerId,
|
||||
});
|
||||
collectProviderRefPath(params.collector, providerId, pathLabel);
|
||||
continue;
|
||||
}
|
||||
if (isNonEmptyString(provider.apiKey)) {
|
||||
addFinding(params.collector, {
|
||||
code: "PLAINTEXT_FOUND",
|
||||
severity: "warn",
|
||||
file: params.configPath,
|
||||
jsonPath: pathLabel,
|
||||
message: "Provider apiKey is stored as plaintext.",
|
||||
provider: providerId,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const entries = params.config.skills?.entries as Record<string, { apiKey?: unknown }> | undefined;
|
||||
if (entries) {
|
||||
for (const [entryId, entry] of Object.entries(entries)) {
|
||||
const pathLabel = `skills.entries.${entryId}.apiKey`;
|
||||
const ref = coerceSecretRef(entry.apiKey, defaults);
|
||||
if (ref) {
|
||||
params.collector.refAssignments.push({
|
||||
file: params.configPath,
|
||||
path: pathLabel,
|
||||
ref,
|
||||
expected: "string",
|
||||
});
|
||||
continue;
|
||||
}
|
||||
if (isNonEmptyString(entry.apiKey)) {
|
||||
addFinding(params.collector, {
|
||||
code: "PLAINTEXT_FOUND",
|
||||
severity: "warn",
|
||||
file: params.configPath,
|
||||
jsonPath: pathLabel,
|
||||
message: "Skill apiKey is stored as plaintext.",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const googlechat = params.config.channels?.googlechat as
|
||||
| {
|
||||
serviceAccount?: unknown;
|
||||
serviceAccountRef?: unknown;
|
||||
accounts?: Record<string, unknown>;
|
||||
}
|
||||
| undefined;
|
||||
if (!googlechat) {
|
||||
return;
|
||||
}
|
||||
|
||||
const collectGoogleChatValue = (
|
||||
value: unknown,
|
||||
refValue: unknown,
|
||||
pathLabel: string,
|
||||
accountId?: string,
|
||||
) => {
|
||||
const explicitRef = coerceSecretRef(refValue, defaults);
|
||||
const inlineRef = explicitRef ? null : coerceSecretRef(value, defaults);
|
||||
const ref = explicitRef ?? inlineRef;
|
||||
if (ref) {
|
||||
params.collector.refAssignments.push({
|
||||
file: params.configPath,
|
||||
path: pathLabel,
|
||||
ref,
|
||||
expected: "string-or-object",
|
||||
provider: accountId ? "googlechat" : undefined,
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (isNonEmptyString(value) || (isRecord(value) && Object.keys(value).length > 0)) {
|
||||
addFinding(params.collector, {
|
||||
code: "PLAINTEXT_FOUND",
|
||||
severity: "warn",
|
||||
file: params.configPath,
|
||||
jsonPath: pathLabel,
|
||||
message: "Google Chat serviceAccount is stored as plaintext.",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
collectGoogleChatValue(
|
||||
googlechat.serviceAccount,
|
||||
googlechat.serviceAccountRef,
|
||||
"channels.googlechat.serviceAccount",
|
||||
);
|
||||
if (!isRecord(googlechat.accounts)) {
|
||||
return;
|
||||
}
|
||||
for (const [accountId, accountValue] of Object.entries(googlechat.accounts)) {
|
||||
if (!isRecord(accountValue)) {
|
||||
continue;
|
||||
}
|
||||
collectGoogleChatValue(
|
||||
accountValue.serviceAccount,
|
||||
accountValue.serviceAccountRef,
|
||||
`channels.googlechat.accounts.${accountId}.serviceAccount`,
|
||||
accountId,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function collectAuthStorePaths(config: OpenClawConfig, stateDir: string): string[] {
|
||||
const paths = new Set<string>();
|
||||
paths.add(resolveUserPath(resolveAuthStorePath()));
|
||||
|
||||
const agentsRoot = path.join(resolveUserPath(stateDir), "agents");
|
||||
if (fs.existsSync(agentsRoot)) {
|
||||
for (const entry of fs.readdirSync(agentsRoot, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
paths.add(path.join(agentsRoot, entry.name, "agent", "auth-profiles.json"));
|
||||
}
|
||||
}
|
||||
|
||||
for (const agentId of listAgentIds(config)) {
|
||||
const agentDir = resolveAgentDir(config, agentId);
|
||||
paths.add(resolveUserPath(resolveAuthStorePath(agentDir)));
|
||||
}
|
||||
|
||||
return [...paths];
|
||||
}
|
||||
|
||||
function collectAuthStoreSecrets(params: {
|
||||
authStorePath: string;
|
||||
collector: AuditCollector;
|
||||
defaults?: SecretDefaults;
|
||||
}): void {
|
||||
if (!fs.existsSync(params.authStorePath)) {
|
||||
return;
|
||||
}
|
||||
params.collector.filesScanned.add(params.authStorePath);
|
||||
const parsed = readJsonObject(params.authStorePath);
|
||||
if (!parsed || !isRecord(parsed.profiles)) {
|
||||
return;
|
||||
}
|
||||
for (const [profileId, profileValue] of Object.entries(parsed.profiles)) {
|
||||
if (!isRecord(profileValue) || !isNonEmptyString(profileValue.provider)) {
|
||||
continue;
|
||||
}
|
||||
const provider = String(profileValue.provider);
|
||||
if (profileValue.type === "api_key") {
|
||||
const keyRef = coerceSecretRef(profileValue.keyRef, params.defaults);
|
||||
const inlineRef = keyRef ? null : coerceSecretRef(profileValue.key, params.defaults);
|
||||
const ref = keyRef ?? inlineRef;
|
||||
if (ref) {
|
||||
params.collector.refAssignments.push({
|
||||
file: params.authStorePath,
|
||||
path: `profiles.${profileId}.key`,
|
||||
ref,
|
||||
expected: "string",
|
||||
provider,
|
||||
});
|
||||
trackAuthProviderState(params.collector, provider, "api_key");
|
||||
}
|
||||
if (isNonEmptyString(profileValue.key)) {
|
||||
addFinding(params.collector, {
|
||||
code: "PLAINTEXT_FOUND",
|
||||
severity: "warn",
|
||||
file: params.authStorePath,
|
||||
jsonPath: `profiles.${profileId}.key`,
|
||||
message: "Auth profile API key is stored as plaintext.",
|
||||
provider,
|
||||
profileId,
|
||||
});
|
||||
trackAuthProviderState(params.collector, provider, "api_key");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (profileValue.type === "token") {
|
||||
const tokenRef = coerceSecretRef(profileValue.tokenRef, params.defaults);
|
||||
const inlineRef = tokenRef ? null : coerceSecretRef(profileValue.token, params.defaults);
|
||||
const ref = tokenRef ?? inlineRef;
|
||||
if (ref) {
|
||||
params.collector.refAssignments.push({
|
||||
file: params.authStorePath,
|
||||
path: `profiles.${profileId}.token`,
|
||||
ref,
|
||||
expected: "string",
|
||||
provider,
|
||||
});
|
||||
trackAuthProviderState(params.collector, provider, "token");
|
||||
}
|
||||
if (isNonEmptyString(profileValue.token)) {
|
||||
addFinding(params.collector, {
|
||||
code: "PLAINTEXT_FOUND",
|
||||
severity: "warn",
|
||||
file: params.authStorePath,
|
||||
jsonPath: `profiles.${profileId}.token`,
|
||||
message: "Auth profile token is stored as plaintext.",
|
||||
provider,
|
||||
profileId,
|
||||
});
|
||||
trackAuthProviderState(params.collector, provider, "token");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (profileValue.type === "oauth") {
|
||||
const hasAccess = isNonEmptyString(profileValue.access);
|
||||
const hasRefresh = isNonEmptyString(profileValue.refresh);
|
||||
if (hasAccess || hasRefresh) {
|
||||
addFinding(params.collector, {
|
||||
code: "LEGACY_RESIDUE",
|
||||
severity: "info",
|
||||
file: params.authStorePath,
|
||||
jsonPath: `profiles.${profileId}`,
|
||||
message: "OAuth credentials are present (out of scope for static SecretRef migration).",
|
||||
provider,
|
||||
profileId,
|
||||
});
|
||||
trackAuthProviderState(params.collector, provider, "oauth");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function collectAuthJsonResidue(params: { stateDir: string; collector: AuditCollector }): void {
|
||||
const agentsRoot = path.join(resolveUserPath(params.stateDir), "agents");
|
||||
if (!fs.existsSync(agentsRoot)) {
|
||||
return;
|
||||
}
|
||||
for (const entry of fs.readdirSync(agentsRoot, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const authJsonPath = path.join(agentsRoot, entry.name, "agent", "auth.json");
|
||||
if (!fs.existsSync(authJsonPath)) {
|
||||
continue;
|
||||
}
|
||||
params.collector.filesScanned.add(authJsonPath);
|
||||
const parsed = readJsonObject(authJsonPath);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
for (const [providerId, value] of Object.entries(parsed)) {
|
||||
if (!isRecord(value)) {
|
||||
continue;
|
||||
}
|
||||
if (value.type === "api_key" && isNonEmptyString(value.key)) {
|
||||
addFinding(params.collector, {
|
||||
code: "LEGACY_RESIDUE",
|
||||
severity: "warn",
|
||||
file: authJsonPath,
|
||||
jsonPath: providerId,
|
||||
message: "Legacy auth.json contains static api_key credentials.",
|
||||
provider: providerId,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function collectUnresolvedRefFindings(params: {
|
||||
collector: AuditCollector;
|
||||
config: OpenClawConfig;
|
||||
env: NodeJS.ProcessEnv;
|
||||
}): Promise<void> {
|
||||
const cache: SecretRefResolveCache = {};
|
||||
for (const assignment of params.collector.refAssignments) {
|
||||
try {
|
||||
const resolved = await resolveSecretRefValue(assignment.ref, {
|
||||
config: params.config,
|
||||
env: params.env,
|
||||
cache,
|
||||
});
|
||||
if (assignment.expected === "string") {
|
||||
if (!isNonEmptyString(resolved)) {
|
||||
throw new Error("resolved value is not a non-empty string");
|
||||
}
|
||||
} else if (!(isNonEmptyString(resolved) || isRecord(resolved))) {
|
||||
throw new Error("resolved value is not a string/object");
|
||||
}
|
||||
} catch (err) {
|
||||
addFinding(params.collector, {
|
||||
code: "REF_UNRESOLVED",
|
||||
severity: "error",
|
||||
file: assignment.file,
|
||||
jsonPath: assignment.path,
|
||||
message: `Failed to resolve ${assignment.ref.source}:${assignment.ref.provider}:${assignment.ref.id} (${String(err)}).`,
|
||||
provider: assignment.provider,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function collectShadowingFindings(collector: AuditCollector): void {
|
||||
for (const [provider, paths] of collector.configProviderRefPaths.entries()) {
|
||||
const authState = collector.authProviderState.get(provider);
|
||||
if (!authState?.hasUsableStaticOrOAuth) {
|
||||
continue;
|
||||
}
|
||||
const modeText = [...authState.modes].join("/");
|
||||
for (const configPath of paths) {
|
||||
addFinding(collector, {
|
||||
code: "REF_SHADOWED",
|
||||
severity: "warn",
|
||||
file: "openclaw.json",
|
||||
jsonPath: configPath,
|
||||
message: `Auth profile credentials (${modeText}) take precedence for provider "${provider}", so this config ref may never be used.`,
|
||||
provider,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function summarizeFindings(findings: SecretsAuditFinding[]): SecretsAuditReport["summary"] {
|
||||
return {
|
||||
plaintextCount: findings.filter((entry) => entry.code === "PLAINTEXT_FOUND").length,
|
||||
unresolvedRefCount: findings.filter((entry) => entry.code === "REF_UNRESOLVED").length,
|
||||
shadowedRefCount: findings.filter((entry) => entry.code === "REF_SHADOWED").length,
|
||||
legacyResidueCount: findings.filter((entry) => entry.code === "LEGACY_RESIDUE").length,
|
||||
};
|
||||
}
|
||||
|
||||
export async function runSecretsAudit(
|
||||
params: {
|
||||
env?: NodeJS.ProcessEnv;
|
||||
} = {},
|
||||
): Promise<SecretsAuditReport> {
|
||||
const env = params.env ?? process.env;
|
||||
const io = createSecretsConfigIO({ env });
|
||||
const { snapshot } = await io.readConfigFileSnapshotForWrite();
|
||||
const configPath = resolveUserPath(snapshot.path);
|
||||
const defaults = snapshot.valid ? snapshot.config.secrets?.defaults : undefined;
|
||||
|
||||
const collector: AuditCollector = {
|
||||
findings: [],
|
||||
refAssignments: [],
|
||||
configProviderRefPaths: new Map(),
|
||||
authProviderState: new Map(),
|
||||
filesScanned: new Set([configPath]),
|
||||
};
|
||||
|
||||
const stateDir = resolveStateDir(env, os.homedir);
|
||||
const envPath = path.join(resolveConfigDir(env, os.homedir), ".env");
|
||||
const config = snapshot.valid ? snapshot.config : ({} as OpenClawConfig);
|
||||
|
||||
if (snapshot.valid) {
|
||||
collectConfigSecrets({
|
||||
config,
|
||||
configPath,
|
||||
collector,
|
||||
});
|
||||
for (const authStorePath of collectAuthStorePaths(config, stateDir)) {
|
||||
collectAuthStoreSecrets({
|
||||
authStorePath,
|
||||
collector,
|
||||
defaults,
|
||||
});
|
||||
}
|
||||
await collectUnresolvedRefFindings({
|
||||
collector,
|
||||
config,
|
||||
env,
|
||||
});
|
||||
collectShadowingFindings(collector);
|
||||
} else {
|
||||
addFinding(collector, {
|
||||
code: "REF_UNRESOLVED",
|
||||
severity: "error",
|
||||
file: configPath,
|
||||
jsonPath: "<root>",
|
||||
message: "Config is invalid; cannot validate secret references reliably.",
|
||||
});
|
||||
}
|
||||
|
||||
collectEnvPlaintext({
|
||||
envPath,
|
||||
collector,
|
||||
});
|
||||
collectAuthJsonResidue({
|
||||
stateDir,
|
||||
collector,
|
||||
});
|
||||
|
||||
const summary = summarizeFindings(collector.findings);
|
||||
const status: SecretsAuditStatus =
|
||||
summary.unresolvedRefCount > 0
|
||||
? "unresolved"
|
||||
: collector.findings.length > 0
|
||||
? "findings"
|
||||
: "clean";
|
||||
|
||||
return {
|
||||
version: 1,
|
||||
status,
|
||||
filesScanned: [...collector.filesScanned].toSorted(),
|
||||
summary,
|
||||
findings: collector.findings,
|
||||
};
|
||||
}
|
||||
|
||||
export function resolveSecretsAuditExitCode(report: SecretsAuditReport, check: boolean): number {
|
||||
if (report.summary.unresolvedRefCount > 0) {
|
||||
return 2;
|
||||
}
|
||||
if (check && report.findings.length > 0) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
export function applySecretsPlanTarget(
|
||||
config: OpenClawConfig,
|
||||
pathLabel: string,
|
||||
value: unknown,
|
||||
): void {
|
||||
const segments = parseDotPath(pathLabel);
|
||||
if (segments.length === 0) {
|
||||
throw new Error("Invalid target path.");
|
||||
}
|
||||
let cursor: Record<string, unknown> = config as unknown as Record<string, unknown>;
|
||||
for (const segment of segments.slice(0, -1)) {
|
||||
const existing = cursor[segment];
|
||||
if (!isRecord(existing)) {
|
||||
cursor[segment] = {};
|
||||
}
|
||||
cursor = cursor[segment] as Record<string, unknown>;
|
||||
}
|
||||
cursor[segments[segments.length - 1]] = value;
|
||||
}
|
||||
14
src/secrets/config-io.ts
Normal file
14
src/secrets/config-io.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { createConfigIO } from "../config/config.js";
|
||||
|
||||
const silentConfigIoLogger = {
|
||||
error: () => {},
|
||||
warn: () => {},
|
||||
} as const;
|
||||
|
||||
export function createSecretsConfigIO(params: { env: NodeJS.ProcessEnv }) {
|
||||
// Secrets command output is owned by the CLI command so --json stays machine-parseable.
|
||||
return createConfigIO({
|
||||
env: params.env,
|
||||
logger: silentConfigIoLogger,
|
||||
});
|
||||
}
|
||||
236
src/secrets/configure.ts
Normal file
236
src/secrets/configure.ts
Normal file
@@ -0,0 +1,236 @@
|
||||
import { confirm, select, text } from "@clack/prompts";
|
||||
import type { OpenClawConfig } from "../config/config.js";
|
||||
import type { SecretRef, SecretRefSource } from "../config/types.secrets.js";
|
||||
import { runSecretsApply, type SecretsApplyResult } from "./apply.js";
|
||||
import { createSecretsConfigIO } from "./config-io.js";
|
||||
import { type SecretsApplyPlan } from "./plan.js";
|
||||
import { resolveDefaultSecretProviderAlias } from "./ref-contract.js";
|
||||
import { isRecord } from "./shared.js";
|
||||
|
||||
type ConfigureCandidate = {
|
||||
type: "models.providers.apiKey" | "skills.entries.apiKey" | "channels.googlechat.serviceAccount";
|
||||
path: string;
|
||||
label: string;
|
||||
providerId?: string;
|
||||
accountId?: string;
|
||||
};
|
||||
|
||||
export type SecretsConfigureResult = {
|
||||
plan: SecretsApplyPlan;
|
||||
preflight: SecretsApplyResult;
|
||||
};
|
||||
|
||||
function buildCandidates(config: OpenClawConfig): ConfigureCandidate[] {
|
||||
const out: ConfigureCandidate[] = [];
|
||||
const providers = config.models?.providers as Record<string, unknown> | undefined;
|
||||
if (providers) {
|
||||
for (const [providerId, providerValue] of Object.entries(providers)) {
|
||||
if (!isRecord(providerValue)) {
|
||||
continue;
|
||||
}
|
||||
out.push({
|
||||
type: "models.providers.apiKey",
|
||||
path: `models.providers.${providerId}.apiKey`,
|
||||
label: `Provider API key: ${providerId}`,
|
||||
providerId,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const entries = config.skills?.entries as Record<string, unknown> | undefined;
|
||||
if (entries) {
|
||||
for (const [entryId, entryValue] of Object.entries(entries)) {
|
||||
if (!isRecord(entryValue)) {
|
||||
continue;
|
||||
}
|
||||
out.push({
|
||||
type: "skills.entries.apiKey",
|
||||
path: `skills.entries.${entryId}.apiKey`,
|
||||
label: `Skill API key: ${entryId}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const googlechat = config.channels?.googlechat;
|
||||
if (isRecord(googlechat)) {
|
||||
out.push({
|
||||
type: "channels.googlechat.serviceAccount",
|
||||
path: "channels.googlechat.serviceAccount",
|
||||
label: "Google Chat serviceAccount (default)",
|
||||
});
|
||||
const accounts = googlechat.accounts;
|
||||
if (isRecord(accounts)) {
|
||||
for (const [accountId, value] of Object.entries(accounts)) {
|
||||
if (!isRecord(value)) {
|
||||
continue;
|
||||
}
|
||||
out.push({
|
||||
type: "channels.googlechat.serviceAccount",
|
||||
path: `channels.googlechat.accounts.${accountId}.serviceAccount`,
|
||||
label: `Google Chat serviceAccount (${accountId})`,
|
||||
accountId,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
function toSourceChoices(config: OpenClawConfig): Array<{ value: SecretRefSource; label: string }> {
|
||||
const hasSource = (source: SecretRefSource) =>
|
||||
Object.values(config.secrets?.providers ?? {}).some((provider) => provider?.source === source);
|
||||
const choices: Array<{ value: SecretRefSource; label: string }> = [
|
||||
{ value: "env", label: "env" },
|
||||
];
|
||||
if (hasSource("file")) {
|
||||
choices.push({ value: "file", label: "file" });
|
||||
}
|
||||
if (hasSource("exec")) {
|
||||
choices.push({ value: "exec", label: "exec" });
|
||||
}
|
||||
return choices;
|
||||
}
|
||||
|
||||
function assertNoCancel<T>(value: T | symbol, message: string): T {
|
||||
if (typeof value === "symbol") {
|
||||
throw new Error(message);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export async function runSecretsConfigureInteractive(
|
||||
params: {
|
||||
env?: NodeJS.ProcessEnv;
|
||||
} = {},
|
||||
): Promise<SecretsConfigureResult> {
|
||||
if (!process.stdin.isTTY) {
|
||||
throw new Error("secrets configure requires an interactive TTY.");
|
||||
}
|
||||
const env = params.env ?? process.env;
|
||||
const io = createSecretsConfigIO({ env });
|
||||
const { snapshot } = await io.readConfigFileSnapshotForWrite();
|
||||
if (!snapshot.valid) {
|
||||
throw new Error("Cannot run interactive secrets configure because config is invalid.");
|
||||
}
|
||||
|
||||
const candidates = buildCandidates(snapshot.config);
|
||||
if (candidates.length === 0) {
|
||||
throw new Error("No configurable secret-bearing fields found in openclaw.json.");
|
||||
}
|
||||
|
||||
const selectedByPath = new Map<string, ConfigureCandidate & { ref: SecretRef }>();
|
||||
const sourceChoices = toSourceChoices(snapshot.config);
|
||||
|
||||
while (true) {
|
||||
const options = candidates.map((candidate) => ({
|
||||
value: candidate.path,
|
||||
label: candidate.label,
|
||||
hint: candidate.path,
|
||||
}));
|
||||
if (selectedByPath.size > 0) {
|
||||
options.unshift({
|
||||
value: "__done__",
|
||||
label: "Done",
|
||||
hint: "Finish and run preflight",
|
||||
});
|
||||
}
|
||||
|
||||
const selectedPath = assertNoCancel(
|
||||
await select({
|
||||
message: "Select credential field",
|
||||
options,
|
||||
}),
|
||||
"Secrets configure cancelled.",
|
||||
);
|
||||
|
||||
if (selectedPath === "__done__") {
|
||||
break;
|
||||
}
|
||||
|
||||
const candidate = candidates.find((entry) => entry.path === selectedPath);
|
||||
if (!candidate) {
|
||||
throw new Error(`Unknown configure target: ${selectedPath}`);
|
||||
}
|
||||
|
||||
const source = assertNoCancel(
|
||||
await select({
|
||||
message: "Secret source",
|
||||
options: sourceChoices,
|
||||
}),
|
||||
"Secrets configure cancelled.",
|
||||
) as SecretRefSource;
|
||||
|
||||
const defaultAlias = resolveDefaultSecretProviderAlias(snapshot.config, source, {
|
||||
preferFirstProviderForSource: true,
|
||||
});
|
||||
const provider = assertNoCancel(
|
||||
await text({
|
||||
message: "Provider alias",
|
||||
initialValue: defaultAlias,
|
||||
validate: (value) => (String(value ?? "").trim().length > 0 ? undefined : "Required"),
|
||||
}),
|
||||
"Secrets configure cancelled.",
|
||||
);
|
||||
const id = assertNoCancel(
|
||||
await text({
|
||||
message: "Secret id",
|
||||
validate: (value) => (String(value ?? "").trim().length > 0 ? undefined : "Required"),
|
||||
}),
|
||||
"Secrets configure cancelled.",
|
||||
);
|
||||
const ref: SecretRef = {
|
||||
source,
|
||||
provider: String(provider).trim(),
|
||||
id: String(id).trim(),
|
||||
};
|
||||
|
||||
const next = {
|
||||
...candidate,
|
||||
ref,
|
||||
};
|
||||
selectedByPath.set(candidate.path, next);
|
||||
|
||||
const addMore = assertNoCancel(
|
||||
await confirm({
|
||||
message: "Configure another credential?",
|
||||
initialValue: true,
|
||||
}),
|
||||
"Secrets configure cancelled.",
|
||||
);
|
||||
if (!addMore) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (selectedByPath.size === 0) {
|
||||
throw new Error("No secrets were selected.");
|
||||
}
|
||||
|
||||
const plan: SecretsApplyPlan = {
|
||||
version: 1,
|
||||
protocolVersion: 1,
|
||||
generatedAt: new Date().toISOString(),
|
||||
generatedBy: "openclaw secrets configure",
|
||||
targets: [...selectedByPath.values()].map((entry) => ({
|
||||
type: entry.type,
|
||||
path: entry.path,
|
||||
ref: entry.ref,
|
||||
...(entry.providerId ? { providerId: entry.providerId } : {}),
|
||||
...(entry.accountId ? { accountId: entry.accountId } : {}),
|
||||
})),
|
||||
options: {
|
||||
scrubEnv: true,
|
||||
scrubAuthProfilesForProviderTargets: true,
|
||||
scrubLegacyAuthJson: true,
|
||||
},
|
||||
};
|
||||
|
||||
const preflight = await runSecretsApply({
|
||||
plan,
|
||||
env,
|
||||
write: false,
|
||||
});
|
||||
|
||||
return { plan, preflight };
|
||||
}
|
||||
@@ -1,241 +0,0 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
import { rollbackSecretsMigration, runSecretsMigration } from "./migrate.js";
|
||||
|
||||
describe("secrets migrate", () => {
|
||||
let baseDir = "";
|
||||
let stateDir = "";
|
||||
let configPath = "";
|
||||
let env: NodeJS.ProcessEnv;
|
||||
let authStorePath = "";
|
||||
let envPath = "";
|
||||
|
||||
beforeEach(async () => {
|
||||
baseDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-secrets-migrate-"));
|
||||
stateDir = path.join(baseDir, ".openclaw");
|
||||
configPath = path.join(stateDir, "openclaw.json");
|
||||
authStorePath = path.join(stateDir, "agents", "main", "agent", "auth-profiles.json");
|
||||
envPath = path.join(stateDir, ".env");
|
||||
env = {
|
||||
...process.env,
|
||||
OPENCLAW_STATE_DIR: stateDir,
|
||||
OPENCLAW_CONFIG_PATH: configPath,
|
||||
};
|
||||
|
||||
await fs.mkdir(path.dirname(configPath), { recursive: true });
|
||||
await fs.mkdir(path.dirname(authStorePath), { recursive: true });
|
||||
|
||||
await fs.writeFile(
|
||||
configPath,
|
||||
`${JSON.stringify(
|
||||
{
|
||||
models: {
|
||||
providers: {
|
||||
openai: {
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
apiKey: "sk-openai-plaintext",
|
||||
models: [{ id: "gpt-5", name: "gpt-5" }],
|
||||
},
|
||||
},
|
||||
},
|
||||
skills: {
|
||||
entries: {
|
||||
"review-pr": {
|
||||
enabled: true,
|
||||
apiKey: "sk-skill-plaintext",
|
||||
},
|
||||
},
|
||||
},
|
||||
channels: {
|
||||
googlechat: {
|
||||
serviceAccount: '{"type":"service_account","client_email":"bot@example.com"}',
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
await fs.writeFile(
|
||||
authStorePath,
|
||||
`${JSON.stringify(
|
||||
{
|
||||
version: 1,
|
||||
profiles: {
|
||||
"openai:default": {
|
||||
type: "api_key",
|
||||
provider: "openai",
|
||||
key: "sk-profile-plaintext",
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
await fs.writeFile(
|
||||
envPath,
|
||||
"OPENAI_API_KEY=sk-openai-plaintext\nSKILL_KEY=sk-skill-plaintext\nUNRELATED=value\n",
|
||||
"utf8",
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(baseDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("reports a dry-run without mutating files", async () => {
|
||||
const beforeConfig = await fs.readFile(configPath, "utf8");
|
||||
const beforeAuthStore = await fs.readFile(authStorePath, "utf8");
|
||||
|
||||
const result = await runSecretsMigration({ env });
|
||||
|
||||
expect(result.mode).toBe("dry-run");
|
||||
expect(result.changed).toBe(true);
|
||||
expect(result.counters.secretsWritten).toBeGreaterThanOrEqual(3);
|
||||
|
||||
expect(await fs.readFile(configPath, "utf8")).toBe(beforeConfig);
|
||||
expect(await fs.readFile(authStorePath, "utf8")).toBe(beforeAuthStore);
|
||||
});
|
||||
|
||||
it("migrates plaintext to file-backed refs and can rollback", async () => {
|
||||
const applyResult = await runSecretsMigration({ env, write: true });
|
||||
|
||||
expect(applyResult.mode).toBe("write");
|
||||
expect(applyResult.changed).toBe(true);
|
||||
expect(applyResult.backupId).toBeTruthy();
|
||||
|
||||
const migratedConfig = JSON.parse(await fs.readFile(configPath, "utf8")) as {
|
||||
models: { providers: { openai: { apiKey: unknown } } };
|
||||
skills: { entries: { "review-pr": { apiKey: unknown } } };
|
||||
channels: { googlechat: { serviceAccount?: unknown; serviceAccountRef?: unknown } };
|
||||
secrets: { providers: Record<string, { source: string; path: string }> };
|
||||
};
|
||||
expect(migratedConfig.models.providers.openai.apiKey).toEqual({
|
||||
source: "file",
|
||||
provider: "default",
|
||||
id: "/providers/openai/apiKey",
|
||||
});
|
||||
expect(migratedConfig.skills.entries["review-pr"].apiKey).toEqual({
|
||||
source: "file",
|
||||
provider: "default",
|
||||
id: "/skills/entries/review-pr/apiKey",
|
||||
});
|
||||
expect(migratedConfig.channels.googlechat.serviceAccount).toBeUndefined();
|
||||
expect(migratedConfig.channels.googlechat.serviceAccountRef).toEqual({
|
||||
source: "file",
|
||||
provider: "default",
|
||||
id: "/channels/googlechat/serviceAccount",
|
||||
});
|
||||
expect(migratedConfig.secrets.providers.default.source).toBe("file");
|
||||
|
||||
const migratedAuth = JSON.parse(await fs.readFile(authStorePath, "utf8")) as {
|
||||
profiles: { "openai:default": { key?: string; keyRef?: unknown } };
|
||||
};
|
||||
expect(migratedAuth.profiles["openai:default"].key).toBeUndefined();
|
||||
expect(migratedAuth.profiles["openai:default"].keyRef).toEqual({
|
||||
source: "file",
|
||||
provider: "default",
|
||||
id: "/auth-profiles/main/openai:default/key",
|
||||
});
|
||||
|
||||
const migratedEnv = await fs.readFile(envPath, "utf8");
|
||||
expect(migratedEnv).not.toContain("sk-openai-plaintext");
|
||||
expect(migratedEnv).toContain("SKILL_KEY=sk-skill-plaintext");
|
||||
expect(migratedEnv).toContain("UNRELATED=value");
|
||||
|
||||
const secretsPath = path.join(stateDir, "secrets.json");
|
||||
const secretsPayload = JSON.parse(await fs.readFile(secretsPath, "utf8")) as {
|
||||
providers: { openai: { apiKey: string } };
|
||||
skills: { entries: { "review-pr": { apiKey: string } } };
|
||||
channels: { googlechat: { serviceAccount: string } };
|
||||
"auth-profiles": { main: { "openai:default": { key: string } } };
|
||||
};
|
||||
expect(secretsPayload.providers.openai.apiKey).toBe("sk-openai-plaintext");
|
||||
expect(secretsPayload.skills.entries["review-pr"].apiKey).toBe("sk-skill-plaintext");
|
||||
expect(secretsPayload.channels.googlechat.serviceAccount).toContain("service_account");
|
||||
expect(secretsPayload["auth-profiles"].main["openai:default"].key).toBe("sk-profile-plaintext");
|
||||
|
||||
const rollbackResult = await rollbackSecretsMigration({ env, backupId: applyResult.backupId! });
|
||||
expect(rollbackResult.restoredFiles).toBeGreaterThan(0);
|
||||
|
||||
const rolledBackConfig = await fs.readFile(configPath, "utf8");
|
||||
expect(rolledBackConfig).toContain("sk-openai-plaintext");
|
||||
expect(rolledBackConfig).toContain("sk-skill-plaintext");
|
||||
|
||||
const rolledBackAuth = await fs.readFile(authStorePath, "utf8");
|
||||
expect(rolledBackAuth).toContain("sk-profile-plaintext");
|
||||
|
||||
await expect(fs.stat(secretsPath)).rejects.toThrow();
|
||||
const rolledBackEnv = await fs.readFile(envPath, "utf8");
|
||||
expect(rolledBackEnv).toContain("OPENAI_API_KEY=sk-openai-plaintext");
|
||||
});
|
||||
|
||||
it("uses a unique backup id when multiple writes happen in the same second", async () => {
|
||||
const now = new Date("2026-02-22T00:00:00.000Z");
|
||||
const first = await runSecretsMigration({ env, write: true, now });
|
||||
await rollbackSecretsMigration({ env, backupId: first.backupId! });
|
||||
|
||||
const second = await runSecretsMigration({ env, write: true, now });
|
||||
|
||||
expect(first.backupId).toBeTruthy();
|
||||
expect(second.backupId).toBeTruthy();
|
||||
expect(second.backupId).not.toBe(first.backupId);
|
||||
});
|
||||
|
||||
it("reuses configured file provider aliases", async () => {
|
||||
await fs.writeFile(
|
||||
configPath,
|
||||
`${JSON.stringify(
|
||||
{
|
||||
secrets: {
|
||||
providers: {
|
||||
teamfile: {
|
||||
source: "file",
|
||||
path: "~/.openclaw/team-secrets.json",
|
||||
mode: "jsonPointer",
|
||||
},
|
||||
},
|
||||
defaults: {
|
||||
file: "teamfile",
|
||||
},
|
||||
},
|
||||
models: {
|
||||
providers: {
|
||||
openai: {
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
apiKey: "sk-openai-plaintext",
|
||||
models: [{ id: "gpt-5", name: "gpt-5" }],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
await runSecretsMigration({ env, write: true });
|
||||
const migratedConfig = JSON.parse(await fs.readFile(configPath, "utf8")) as {
|
||||
models: { providers: { openai: { apiKey: unknown } } };
|
||||
};
|
||||
expect(migratedConfig.models.providers.openai.apiKey).toEqual({
|
||||
source: "file",
|
||||
provider: "teamfile",
|
||||
id: "/providers/openai/apiKey",
|
||||
});
|
||||
});
|
||||
|
||||
it("keeps .env values when scrub-env is disabled", async () => {
|
||||
await runSecretsMigration({ env, write: true, scrubEnv: false });
|
||||
const migratedEnv = await fs.readFile(envPath, "utf8");
|
||||
expect(migratedEnv).toContain("OPENAI_API_KEY=sk-openai-plaintext");
|
||||
});
|
||||
});
|
||||
@@ -1,63 +0,0 @@
|
||||
import { applyMigrationPlan } from "./migrate/apply.js";
|
||||
import {
|
||||
listSecretsMigrationBackups,
|
||||
readBackupManifest,
|
||||
resolveSecretsMigrationBackupRoot,
|
||||
restoreFromManifest,
|
||||
} from "./migrate/backup.js";
|
||||
import { buildMigrationPlan } from "./migrate/plan.js";
|
||||
import type {
|
||||
SecretsMigrationRollbackOptions,
|
||||
SecretsMigrationRollbackResult,
|
||||
SecretsMigrationRunOptions,
|
||||
SecretsMigrationRunResult,
|
||||
} from "./migrate/types.js";
|
||||
|
||||
export type {
|
||||
SecretsMigrationRollbackOptions,
|
||||
SecretsMigrationRollbackResult,
|
||||
SecretsMigrationRunOptions,
|
||||
SecretsMigrationRunResult,
|
||||
};
|
||||
|
||||
export async function runSecretsMigration(
|
||||
options: SecretsMigrationRunOptions = {},
|
||||
): Promise<SecretsMigrationRunResult> {
|
||||
const env = options.env ?? process.env;
|
||||
const scrubEnv = options.scrubEnv ?? true;
|
||||
const plan = await buildMigrationPlan({ env, scrubEnv });
|
||||
|
||||
if (!options.write) {
|
||||
return {
|
||||
mode: "dry-run",
|
||||
changed: plan.changed,
|
||||
secretsFilePath: plan.secretsFilePath,
|
||||
counters: plan.counters,
|
||||
changedFiles: plan.backupTargets,
|
||||
};
|
||||
}
|
||||
|
||||
return await applyMigrationPlan({
|
||||
plan,
|
||||
env,
|
||||
now: options.now ?? new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
export { resolveSecretsMigrationBackupRoot, listSecretsMigrationBackups };
|
||||
|
||||
export async function rollbackSecretsMigration(
|
||||
options: SecretsMigrationRollbackOptions,
|
||||
): Promise<SecretsMigrationRollbackResult> {
|
||||
const env = options.env ?? process.env;
|
||||
const manifest = readBackupManifest({
|
||||
backupId: options.backupId,
|
||||
env,
|
||||
});
|
||||
const restored = restoreFromManifest(manifest);
|
||||
return {
|
||||
backupId: options.backupId,
|
||||
restoredFiles: restored.restoredFiles,
|
||||
deletedFiles: restored.deletedFiles,
|
||||
};
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import { ensureDirForFile, writeJsonFileSecure } from "../shared.js";
|
||||
import {
|
||||
createBackupManifest,
|
||||
pruneOldBackups,
|
||||
resolveUniqueBackupId,
|
||||
restoreFromManifest,
|
||||
} from "./backup.js";
|
||||
import { createSecretsMigrationConfigIO } from "./config-io.js";
|
||||
import type { MigrationPlan, SecretsMigrationRunResult } from "./types.js";
|
||||
|
||||
export async function applyMigrationPlan(params: {
|
||||
plan: MigrationPlan;
|
||||
env: NodeJS.ProcessEnv;
|
||||
now: Date;
|
||||
}): Promise<SecretsMigrationRunResult> {
|
||||
const { plan } = params;
|
||||
if (!plan.changed) {
|
||||
return {
|
||||
mode: "write",
|
||||
changed: false,
|
||||
secretsFilePath: plan.secretsFilePath,
|
||||
counters: plan.counters,
|
||||
changedFiles: [],
|
||||
};
|
||||
}
|
||||
|
||||
const backupId = resolveUniqueBackupId(plan.stateDir, params.now);
|
||||
const backup = createBackupManifest({
|
||||
stateDir: plan.stateDir,
|
||||
targets: plan.backupTargets,
|
||||
backupId,
|
||||
now: params.now,
|
||||
});
|
||||
|
||||
try {
|
||||
if (plan.payloadChanged) {
|
||||
writeJsonFileSecure(plan.secretsFilePath, plan.nextPayload);
|
||||
}
|
||||
|
||||
if (plan.configChanged) {
|
||||
const io = createSecretsMigrationConfigIO({ env: params.env });
|
||||
await io.writeConfigFile(plan.nextConfig, plan.configWriteOptions);
|
||||
}
|
||||
|
||||
for (const change of plan.authStoreChanges) {
|
||||
writeJsonFileSecure(change.path, change.nextStore);
|
||||
}
|
||||
|
||||
if (plan.envChange) {
|
||||
ensureDirForFile(plan.envChange.path);
|
||||
fs.writeFileSync(plan.envChange.path, plan.envChange.nextRaw, "utf8");
|
||||
fs.chmodSync(plan.envChange.path, 0o600);
|
||||
}
|
||||
} catch (err) {
|
||||
restoreFromManifest(backup.manifest);
|
||||
throw new Error(
|
||||
`Secrets migration failed and was rolled back from backup ${backupId}: ${String(err)}`,
|
||||
{
|
||||
cause: err,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
pruneOldBackups(plan.stateDir);
|
||||
|
||||
return {
|
||||
mode: "write",
|
||||
changed: true,
|
||||
backupId,
|
||||
backupDir: backup.backupDir,
|
||||
secretsFilePath: plan.secretsFilePath,
|
||||
counters: plan.counters,
|
||||
changedFiles: plan.backupTargets,
|
||||
};
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { resolveStateDir } from "../../config/config.js";
|
||||
import { resolveUserPath } from "../../utils.js";
|
||||
import { ensureDirForFile, isRecord } from "../shared.js";
|
||||
import type { BackupManifest } from "./types.js";
|
||||
|
||||
export const BACKUP_DIRNAME = "secrets-migrate";
|
||||
export const BACKUP_MANIFEST_FILENAME = "manifest.json";
|
||||
export const BACKUP_RETENTION = 20;
|
||||
|
||||
export function resolveBackupRoot(stateDir: string): string {
|
||||
return path.join(resolveUserPath(stateDir), "backups", BACKUP_DIRNAME);
|
||||
}
|
||||
|
||||
function formatBackupId(now: Date): string {
|
||||
const year = now.getUTCFullYear();
|
||||
const month = String(now.getUTCMonth() + 1).padStart(2, "0");
|
||||
const day = String(now.getUTCDate()).padStart(2, "0");
|
||||
const hour = String(now.getUTCHours()).padStart(2, "0");
|
||||
const minute = String(now.getUTCMinutes()).padStart(2, "0");
|
||||
const second = String(now.getUTCSeconds()).padStart(2, "0");
|
||||
return `${year}${month}${day}T${hour}${minute}${second}Z`;
|
||||
}
|
||||
|
||||
export function resolveUniqueBackupId(stateDir: string, now: Date): string {
|
||||
const backupRoot = resolveBackupRoot(stateDir);
|
||||
const base = formatBackupId(now);
|
||||
let candidate = base;
|
||||
let attempt = 0;
|
||||
|
||||
while (fs.existsSync(path.join(backupRoot, candidate))) {
|
||||
attempt += 1;
|
||||
const suffix = `${String(attempt).padStart(2, "0")}-${crypto.randomBytes(2).toString("hex")}`;
|
||||
candidate = `${base}-${suffix}`;
|
||||
}
|
||||
|
||||
return candidate;
|
||||
}
|
||||
|
||||
export function createBackupManifest(params: {
|
||||
stateDir: string;
|
||||
targets: string[];
|
||||
backupId: string;
|
||||
now: Date;
|
||||
}): { backupDir: string; manifestPath: string; manifest: BackupManifest } {
|
||||
const backupDir = path.join(resolveBackupRoot(params.stateDir), params.backupId);
|
||||
fs.mkdirSync(backupDir, { recursive: true, mode: 0o700 });
|
||||
|
||||
const entries: BackupManifest["entries"] = [];
|
||||
let index = 0;
|
||||
for (const target of params.targets) {
|
||||
const normalized = resolveUserPath(target);
|
||||
const exists = fs.existsSync(normalized);
|
||||
if (!exists) {
|
||||
entries.push({ path: normalized, existed: false });
|
||||
continue;
|
||||
}
|
||||
|
||||
const backupName = `file-${String(index).padStart(4, "0")}.bak`;
|
||||
const backupPath = path.join(backupDir, backupName);
|
||||
fs.copyFileSync(normalized, backupPath);
|
||||
const stats = fs.statSync(normalized);
|
||||
entries.push({
|
||||
path: normalized,
|
||||
existed: true,
|
||||
backupPath,
|
||||
mode: stats.mode & 0o777,
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
|
||||
const manifest: BackupManifest = {
|
||||
version: 1,
|
||||
backupId: params.backupId,
|
||||
createdAt: params.now.toISOString(),
|
||||
entries,
|
||||
};
|
||||
const manifestPath = path.join(backupDir, BACKUP_MANIFEST_FILENAME);
|
||||
fs.writeFileSync(manifestPath, `${JSON.stringify(manifest, null, 2)}\n`, "utf8");
|
||||
fs.chmodSync(manifestPath, 0o600);
|
||||
|
||||
return { backupDir, manifestPath, manifest };
|
||||
}
|
||||
|
||||
export function restoreFromManifest(manifest: BackupManifest): {
|
||||
restoredFiles: number;
|
||||
deletedFiles: number;
|
||||
} {
|
||||
let restoredFiles = 0;
|
||||
let deletedFiles = 0;
|
||||
|
||||
for (const entry of manifest.entries) {
|
||||
if (!entry.existed) {
|
||||
if (fs.existsSync(entry.path)) {
|
||||
fs.rmSync(entry.path, { force: true });
|
||||
deletedFiles += 1;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!entry.backupPath || !fs.existsSync(entry.backupPath)) {
|
||||
throw new Error(`Backup file is missing for ${entry.path}.`);
|
||||
}
|
||||
ensureDirForFile(entry.path);
|
||||
fs.copyFileSync(entry.backupPath, entry.path);
|
||||
fs.chmodSync(entry.path, entry.mode ?? 0o600);
|
||||
restoredFiles += 1;
|
||||
}
|
||||
|
||||
return { restoredFiles, deletedFiles };
|
||||
}
|
||||
|
||||
export function pruneOldBackups(stateDir: string): void {
|
||||
const backupRoot = resolveBackupRoot(stateDir);
|
||||
if (!fs.existsSync(backupRoot)) {
|
||||
return;
|
||||
}
|
||||
const dirs = fs
|
||||
.readdirSync(backupRoot, { withFileTypes: true })
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => entry.name)
|
||||
.toSorted();
|
||||
|
||||
if (dirs.length <= BACKUP_RETENTION) {
|
||||
return;
|
||||
}
|
||||
|
||||
const toDelete = dirs.slice(0, Math.max(0, dirs.length - BACKUP_RETENTION));
|
||||
for (const dir of toDelete) {
|
||||
fs.rmSync(path.join(backupRoot, dir), { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
export function resolveSecretsMigrationBackupRoot(env: NodeJS.ProcessEnv = process.env): string {
|
||||
return resolveBackupRoot(resolveStateDir(env, os.homedir));
|
||||
}
|
||||
|
||||
export function listSecretsMigrationBackups(env: NodeJS.ProcessEnv = process.env): string[] {
|
||||
const root = resolveSecretsMigrationBackupRoot(env);
|
||||
if (!fs.existsSync(root)) {
|
||||
return [];
|
||||
}
|
||||
return fs
|
||||
.readdirSync(root, { withFileTypes: true })
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => entry.name)
|
||||
.toSorted();
|
||||
}
|
||||
|
||||
export function readBackupManifest(params: {
|
||||
backupId: string;
|
||||
env: NodeJS.ProcessEnv;
|
||||
}): BackupManifest {
|
||||
const backupDir = path.join(resolveSecretsMigrationBackupRoot(params.env), params.backupId);
|
||||
const manifestPath = path.join(backupDir, BACKUP_MANIFEST_FILENAME);
|
||||
if (!fs.existsSync(manifestPath)) {
|
||||
const available = listSecretsMigrationBackups(params.env);
|
||||
const suffix =
|
||||
available.length > 0
|
||||
? ` Available backups: ${available.slice(-10).join(", ")}`
|
||||
: " No backups were found.";
|
||||
throw new Error(`Backup "${params.backupId}" was not found.${suffix}`);
|
||||
}
|
||||
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(fs.readFileSync(manifestPath, "utf8")) as unknown;
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to read backup manifest at ${manifestPath}: ${String(err)}`, {
|
||||
cause: err,
|
||||
});
|
||||
}
|
||||
|
||||
if (!isRecord(parsed) || !Array.isArray(parsed.entries)) {
|
||||
throw new Error(`Backup manifest at ${manifestPath} is invalid.`);
|
||||
}
|
||||
|
||||
return parsed as BackupManifest;
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import { createConfigIO } from "../../config/config.js";
|
||||
|
||||
const silentConfigIoLogger = {
|
||||
error: () => {},
|
||||
warn: () => {},
|
||||
} as const;
|
||||
|
||||
export function createSecretsMigrationConfigIO(params: { env: NodeJS.ProcessEnv }) {
|
||||
// Migration output is owned by the CLI command so --json remains machine-parseable.
|
||||
return createConfigIO({
|
||||
env: params.env,
|
||||
logger: silentConfigIoLogger,
|
||||
});
|
||||
}
|
||||
@@ -1,545 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { isDeepStrictEqual } from "node:util";
|
||||
import { listAgentIds, resolveAgentDir } from "../../agents/agent-scope.js";
|
||||
import { resolveAuthStorePath } from "../../agents/auth-profiles/paths.js";
|
||||
import { resolveStateDir, type OpenClawConfig } from "../../config/config.js";
|
||||
import { coerceSecretRef, DEFAULT_SECRET_PROVIDER_ALIAS } from "../../config/types.secrets.js";
|
||||
import { resolveConfigDir, resolveUserPath } from "../../utils.js";
|
||||
import {
|
||||
encodeJsonPointerToken,
|
||||
readJsonPointer as readJsonPointerRaw,
|
||||
setJsonPointer,
|
||||
} from "../json-pointer.js";
|
||||
import { listKnownSecretEnvVarNames } from "../provider-env-vars.js";
|
||||
import { isNonEmptyString, isRecord } from "../shared.js";
|
||||
import { createSecretsMigrationConfigIO } from "./config-io.js";
|
||||
import type { AuthStoreChange, EnvChange, MigrationCounters, MigrationPlan } from "./types.js";
|
||||
|
||||
const DEFAULT_SECRETS_FILE_PATH = "~/.openclaw/secrets.json";
|
||||
|
||||
function readJsonPointer(root: unknown, pointer: string): unknown {
|
||||
return readJsonPointerRaw(root, pointer, { onMissing: "undefined" });
|
||||
}
|
||||
|
||||
function parseEnvValue(raw: string): string {
|
||||
const trimmed = raw.trim();
|
||||
if (
|
||||
(trimmed.startsWith('"') && trimmed.endsWith('"')) ||
|
||||
(trimmed.startsWith("'") && trimmed.endsWith("'"))
|
||||
) {
|
||||
return trimmed.slice(1, -1);
|
||||
}
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
function scrubEnvRaw(
|
||||
raw: string,
|
||||
migratedValues: Set<string>,
|
||||
allowedEnvKeys: Set<string>,
|
||||
): {
|
||||
nextRaw: string;
|
||||
removed: number;
|
||||
} {
|
||||
if (migratedValues.size === 0 || allowedEnvKeys.size === 0) {
|
||||
return { nextRaw: raw, removed: 0 };
|
||||
}
|
||||
const lines = raw.split(/\r?\n/);
|
||||
const nextLines: string[] = [];
|
||||
let removed = 0;
|
||||
for (const line of lines) {
|
||||
const match = line.match(/^\s*(?:export\s+)?([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)$/);
|
||||
if (!match) {
|
||||
nextLines.push(line);
|
||||
continue;
|
||||
}
|
||||
const envKey = match[1] ?? "";
|
||||
if (!allowedEnvKeys.has(envKey)) {
|
||||
nextLines.push(line);
|
||||
continue;
|
||||
}
|
||||
const parsedValue = parseEnvValue(match[2] ?? "");
|
||||
if (migratedValues.has(parsedValue)) {
|
||||
removed += 1;
|
||||
continue;
|
||||
}
|
||||
nextLines.push(line);
|
||||
}
|
||||
const hadTrailingNewline = raw.endsWith("\n");
|
||||
const joined = nextLines.join("\n");
|
||||
return {
|
||||
nextRaw:
|
||||
hadTrailingNewline || joined.length === 0
|
||||
? `${joined}${joined.endsWith("\n") ? "" : "\n"}`
|
||||
: joined,
|
||||
removed,
|
||||
};
|
||||
}
|
||||
|
||||
function resolveFileSource(
|
||||
config: OpenClawConfig,
|
||||
env: NodeJS.ProcessEnv,
|
||||
): {
|
||||
providerName: string;
|
||||
path: string;
|
||||
hadConfiguredProvider: boolean;
|
||||
} {
|
||||
const configuredProviders = config.secrets?.providers;
|
||||
const defaultProviderName =
|
||||
config.secrets?.defaults?.file?.trim() || DEFAULT_SECRET_PROVIDER_ALIAS;
|
||||
|
||||
if (configuredProviders) {
|
||||
const defaultProvider = configuredProviders[defaultProviderName];
|
||||
if (defaultProvider?.source === "file" && isNonEmptyString(defaultProvider.path)) {
|
||||
return {
|
||||
providerName: defaultProviderName,
|
||||
path: resolveUserPath(defaultProvider.path),
|
||||
hadConfiguredProvider: true,
|
||||
};
|
||||
}
|
||||
|
||||
for (const [providerName, provider] of Object.entries(configuredProviders)) {
|
||||
if (provider?.source === "file" && isNonEmptyString(provider.path)) {
|
||||
return {
|
||||
providerName,
|
||||
path: resolveUserPath(provider.path),
|
||||
hadConfiguredProvider: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
providerName: defaultProviderName,
|
||||
path: resolveUserPath(resolveDefaultSecretsConfigPath(env)),
|
||||
hadConfiguredProvider: false,
|
||||
};
|
||||
}
|
||||
|
||||
function resolveDefaultSecretsConfigPath(env: NodeJS.ProcessEnv): string {
|
||||
if (env.OPENCLAW_STATE_DIR?.trim() || env.CLAWDBOT_STATE_DIR?.trim()) {
|
||||
return path.join(resolveStateDir(env, os.homedir), "secrets.json");
|
||||
}
|
||||
return DEFAULT_SECRETS_FILE_PATH;
|
||||
}
|
||||
|
||||
async function readSecretsFileJson(pathname: string): Promise<Record<string, unknown>> {
|
||||
if (!fs.existsSync(pathname)) {
|
||||
return {};
|
||||
}
|
||||
const raw = fs.readFileSync(pathname, "utf8");
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
if (!isRecord(parsed)) {
|
||||
throw new Error("Secrets file payload is not a JSON object.");
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function migrateModelProviderSecrets(params: {
|
||||
config: OpenClawConfig;
|
||||
payload: Record<string, unknown>;
|
||||
counters: MigrationCounters;
|
||||
migratedValues: Set<string>;
|
||||
fileProviderName: string;
|
||||
}): void {
|
||||
const providers = params.config.models?.providers as
|
||||
| Record<string, { apiKey?: unknown }>
|
||||
| undefined;
|
||||
if (!providers) {
|
||||
return;
|
||||
}
|
||||
for (const [providerId, provider] of Object.entries(providers)) {
|
||||
if (coerceSecretRef(provider.apiKey)) {
|
||||
continue;
|
||||
}
|
||||
if (!isNonEmptyString(provider.apiKey)) {
|
||||
continue;
|
||||
}
|
||||
const value = provider.apiKey.trim();
|
||||
const id = `/providers/${encodeJsonPointerToken(providerId)}/apiKey`;
|
||||
const existing = readJsonPointer(params.payload, id);
|
||||
if (!isDeepStrictEqual(existing, value)) {
|
||||
setJsonPointer(params.payload, id, value);
|
||||
params.counters.secretsWritten += 1;
|
||||
}
|
||||
provider.apiKey = { source: "file", provider: params.fileProviderName, id };
|
||||
params.counters.configRefs += 1;
|
||||
params.migratedValues.add(value);
|
||||
}
|
||||
}
|
||||
|
||||
function migrateSkillEntrySecrets(params: {
|
||||
config: OpenClawConfig;
|
||||
payload: Record<string, unknown>;
|
||||
counters: MigrationCounters;
|
||||
migratedValues: Set<string>;
|
||||
fileProviderName: string;
|
||||
}): void {
|
||||
const entries = params.config.skills?.entries as Record<string, { apiKey?: unknown }> | undefined;
|
||||
if (!entries) {
|
||||
return;
|
||||
}
|
||||
for (const [skillKey, entry] of Object.entries(entries)) {
|
||||
if (!isRecord(entry) || coerceSecretRef(entry.apiKey)) {
|
||||
continue;
|
||||
}
|
||||
if (!isNonEmptyString(entry.apiKey)) {
|
||||
continue;
|
||||
}
|
||||
const value = entry.apiKey.trim();
|
||||
const id = `/skills/entries/${encodeJsonPointerToken(skillKey)}/apiKey`;
|
||||
const existing = readJsonPointer(params.payload, id);
|
||||
if (!isDeepStrictEqual(existing, value)) {
|
||||
setJsonPointer(params.payload, id, value);
|
||||
params.counters.secretsWritten += 1;
|
||||
}
|
||||
entry.apiKey = { source: "file", provider: params.fileProviderName, id };
|
||||
params.counters.configRefs += 1;
|
||||
params.migratedValues.add(value);
|
||||
}
|
||||
}
|
||||
|
||||
function migrateGoogleChatServiceAccount(params: {
|
||||
account: Record<string, unknown>;
|
||||
pointerId: string;
|
||||
counters: MigrationCounters;
|
||||
payload: Record<string, unknown>;
|
||||
fileProviderName: string;
|
||||
}): void {
|
||||
const explicitRef = coerceSecretRef(params.account.serviceAccountRef);
|
||||
const inlineRef = coerceSecretRef(params.account.serviceAccount);
|
||||
if (explicitRef || inlineRef) {
|
||||
if (
|
||||
params.account.serviceAccount !== undefined &&
|
||||
!coerceSecretRef(params.account.serviceAccount)
|
||||
) {
|
||||
delete params.account.serviceAccount;
|
||||
params.counters.plaintextRemoved += 1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const value = params.account.serviceAccount;
|
||||
const hasStringValue = isNonEmptyString(value);
|
||||
const hasObjectValue = isRecord(value) && Object.keys(value).length > 0;
|
||||
if (!hasStringValue && !hasObjectValue) {
|
||||
return;
|
||||
}
|
||||
|
||||
const id = `${params.pointerId}/serviceAccount`;
|
||||
const normalizedValue = hasStringValue ? value.trim() : structuredClone(value);
|
||||
const existing = readJsonPointer(params.payload, id);
|
||||
if (!isDeepStrictEqual(existing, normalizedValue)) {
|
||||
setJsonPointer(params.payload, id, normalizedValue);
|
||||
params.counters.secretsWritten += 1;
|
||||
}
|
||||
|
||||
params.account.serviceAccountRef = {
|
||||
source: "file",
|
||||
provider: params.fileProviderName,
|
||||
id,
|
||||
};
|
||||
delete params.account.serviceAccount;
|
||||
params.counters.configRefs += 1;
|
||||
}
|
||||
|
||||
function migrateGoogleChatSecrets(params: {
|
||||
config: OpenClawConfig;
|
||||
payload: Record<string, unknown>;
|
||||
counters: MigrationCounters;
|
||||
fileProviderName: string;
|
||||
}): void {
|
||||
const googlechat = params.config.channels?.googlechat;
|
||||
if (!isRecord(googlechat)) {
|
||||
return;
|
||||
}
|
||||
|
||||
migrateGoogleChatServiceAccount({
|
||||
account: googlechat,
|
||||
pointerId: "/channels/googlechat",
|
||||
payload: params.payload,
|
||||
counters: params.counters,
|
||||
fileProviderName: params.fileProviderName,
|
||||
});
|
||||
|
||||
if (!isRecord(googlechat.accounts)) {
|
||||
return;
|
||||
}
|
||||
for (const [accountId, accountValue] of Object.entries(googlechat.accounts)) {
|
||||
if (!isRecord(accountValue)) {
|
||||
continue;
|
||||
}
|
||||
migrateGoogleChatServiceAccount({
|
||||
account: accountValue,
|
||||
pointerId: `/channels/googlechat/accounts/${encodeJsonPointerToken(accountId)}`,
|
||||
payload: params.payload,
|
||||
counters: params.counters,
|
||||
fileProviderName: params.fileProviderName,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function collectAuthStorePaths(config: OpenClawConfig, stateDir: string): string[] {
|
||||
const paths = new Set<string>();
|
||||
paths.add(resolveUserPath(resolveAuthStorePath()));
|
||||
|
||||
const agentsRoot = path.join(resolveUserPath(stateDir), "agents");
|
||||
if (fs.existsSync(agentsRoot)) {
|
||||
for (const entry of fs.readdirSync(agentsRoot, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
paths.add(path.join(agentsRoot, entry.name, "agent", "auth-profiles.json"));
|
||||
}
|
||||
}
|
||||
|
||||
for (const agentId of listAgentIds(config)) {
|
||||
const agentDir = resolveAgentDir(config, agentId);
|
||||
paths.add(resolveUserPath(resolveAuthStorePath(agentDir)));
|
||||
}
|
||||
|
||||
return [...paths];
|
||||
}
|
||||
|
||||
function deriveAuthStoreScope(authStorePath: string, stateDir: string): string {
|
||||
const agentsRoot = path.join(resolveUserPath(stateDir), "agents");
|
||||
const relative = path.relative(agentsRoot, authStorePath);
|
||||
if (!relative.startsWith("..")) {
|
||||
const segments = relative.split(path.sep);
|
||||
if (segments.length >= 3 && segments[1] === "agent" && segments[2] === "auth-profiles.json") {
|
||||
const candidate = segments[0]?.trim();
|
||||
if (candidate) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const digest = crypto.createHash("sha1").update(authStorePath).digest("hex").slice(0, 8);
|
||||
return `path-${digest}`;
|
||||
}
|
||||
|
||||
function migrateAuthStoreSecrets(params: {
|
||||
store: Record<string, unknown>;
|
||||
scope: string;
|
||||
payload: Record<string, unknown>;
|
||||
counters: MigrationCounters;
|
||||
migratedValues: Set<string>;
|
||||
fileProviderName: string;
|
||||
}): boolean {
|
||||
const profiles = params.store.profiles;
|
||||
if (!isRecord(profiles)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let changed = false;
|
||||
for (const [profileId, profileValue] of Object.entries(profiles)) {
|
||||
if (!isRecord(profileValue)) {
|
||||
continue;
|
||||
}
|
||||
if (profileValue.type === "api_key") {
|
||||
const keyRef = coerceSecretRef(profileValue.keyRef);
|
||||
const key = isNonEmptyString(profileValue.key) ? profileValue.key.trim() : "";
|
||||
if (keyRef) {
|
||||
if (key) {
|
||||
delete profileValue.key;
|
||||
params.counters.plaintextRemoved += 1;
|
||||
changed = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!key) {
|
||||
continue;
|
||||
}
|
||||
const id = `/auth-profiles/${encodeJsonPointerToken(params.scope)}/${encodeJsonPointerToken(profileId)}/key`;
|
||||
const existing = readJsonPointer(params.payload, id);
|
||||
if (!isDeepStrictEqual(existing, key)) {
|
||||
setJsonPointer(params.payload, id, key);
|
||||
params.counters.secretsWritten += 1;
|
||||
}
|
||||
profileValue.keyRef = { source: "file", provider: params.fileProviderName, id };
|
||||
delete profileValue.key;
|
||||
params.counters.authProfileRefs += 1;
|
||||
params.migratedValues.add(key);
|
||||
changed = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (profileValue.type === "token") {
|
||||
const tokenRef = coerceSecretRef(profileValue.tokenRef);
|
||||
const token = isNonEmptyString(profileValue.token) ? profileValue.token.trim() : "";
|
||||
if (tokenRef) {
|
||||
if (token) {
|
||||
delete profileValue.token;
|
||||
params.counters.plaintextRemoved += 1;
|
||||
changed = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!token) {
|
||||
continue;
|
||||
}
|
||||
const id = `/auth-profiles/${encodeJsonPointerToken(params.scope)}/${encodeJsonPointerToken(profileId)}/token`;
|
||||
const existing = readJsonPointer(params.payload, id);
|
||||
if (!isDeepStrictEqual(existing, token)) {
|
||||
setJsonPointer(params.payload, id, token);
|
||||
params.counters.secretsWritten += 1;
|
||||
}
|
||||
profileValue.tokenRef = { source: "file", provider: params.fileProviderName, id };
|
||||
delete profileValue.token;
|
||||
params.counters.authProfileRefs += 1;
|
||||
params.migratedValues.add(token);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
return changed;
|
||||
}
|
||||
|
||||
export async function buildMigrationPlan(params: {
|
||||
env: NodeJS.ProcessEnv;
|
||||
scrubEnv: boolean;
|
||||
}): Promise<MigrationPlan> {
|
||||
const io = createSecretsMigrationConfigIO({ env: params.env });
|
||||
const { snapshot, writeOptions } = await io.readConfigFileSnapshotForWrite();
|
||||
if (!snapshot.valid) {
|
||||
const issues =
|
||||
snapshot.issues.length > 0
|
||||
? snapshot.issues.map((issue) => `${issue.path || "<root>"}: ${issue.message}`).join("\n")
|
||||
: "Unknown validation issue.";
|
||||
throw new Error(`Cannot migrate secrets because config is invalid:\n${issues}`);
|
||||
}
|
||||
|
||||
const stateDir = resolveStateDir(params.env, os.homedir);
|
||||
const nextConfig = structuredClone(snapshot.config);
|
||||
const fileSource = resolveFileSource(nextConfig, params.env);
|
||||
const previousPayload = await readSecretsFileJson(fileSource.path);
|
||||
const nextPayload = structuredClone(previousPayload);
|
||||
|
||||
const counters: MigrationCounters = {
|
||||
configRefs: 0,
|
||||
authProfileRefs: 0,
|
||||
plaintextRemoved: 0,
|
||||
secretsWritten: 0,
|
||||
envEntriesRemoved: 0,
|
||||
authStoresChanged: 0,
|
||||
};
|
||||
|
||||
const migratedValues = new Set<string>();
|
||||
|
||||
migrateModelProviderSecrets({
|
||||
config: nextConfig,
|
||||
payload: nextPayload,
|
||||
counters,
|
||||
migratedValues,
|
||||
fileProviderName: fileSource.providerName,
|
||||
});
|
||||
migrateSkillEntrySecrets({
|
||||
config: nextConfig,
|
||||
payload: nextPayload,
|
||||
counters,
|
||||
migratedValues,
|
||||
fileProviderName: fileSource.providerName,
|
||||
});
|
||||
migrateGoogleChatSecrets({
|
||||
config: nextConfig,
|
||||
payload: nextPayload,
|
||||
counters,
|
||||
fileProviderName: fileSource.providerName,
|
||||
});
|
||||
|
||||
const authStoreChanges: AuthStoreChange[] = [];
|
||||
for (const authStorePath of collectAuthStorePaths(nextConfig, stateDir)) {
|
||||
if (!fs.existsSync(authStorePath)) {
|
||||
continue;
|
||||
}
|
||||
const raw = fs.readFileSync(authStorePath, "utf8");
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(raw) as unknown;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
if (!isRecord(parsed)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const nextStore = structuredClone(parsed);
|
||||
const scope = deriveAuthStoreScope(authStorePath, stateDir);
|
||||
const changed = migrateAuthStoreSecrets({
|
||||
store: nextStore,
|
||||
scope,
|
||||
payload: nextPayload,
|
||||
counters,
|
||||
migratedValues,
|
||||
fileProviderName: fileSource.providerName,
|
||||
});
|
||||
if (!changed) {
|
||||
continue;
|
||||
}
|
||||
authStoreChanges.push({ path: authStorePath, nextStore });
|
||||
}
|
||||
counters.authStoresChanged = authStoreChanges.length;
|
||||
|
||||
if (counters.secretsWritten > 0 && !fileSource.hadConfiguredProvider) {
|
||||
const defaultConfigPath = resolveDefaultSecretsConfigPath(params.env);
|
||||
nextConfig.secrets ??= {};
|
||||
nextConfig.secrets.providers ??= {};
|
||||
nextConfig.secrets.providers[fileSource.providerName] = {
|
||||
source: "file",
|
||||
path: defaultConfigPath,
|
||||
mode: "jsonPointer",
|
||||
};
|
||||
nextConfig.secrets.defaults ??= {};
|
||||
nextConfig.secrets.defaults.file ??= fileSource.providerName;
|
||||
}
|
||||
|
||||
const configChanged = !isDeepStrictEqual(snapshot.config, nextConfig);
|
||||
const payloadChanged = !isDeepStrictEqual(previousPayload, nextPayload);
|
||||
|
||||
let envChange: EnvChange | null = null;
|
||||
if (params.scrubEnv && migratedValues.size > 0) {
|
||||
const envPath = path.join(resolveConfigDir(params.env, os.homedir), ".env");
|
||||
if (fs.existsSync(envPath)) {
|
||||
const rawEnv = fs.readFileSync(envPath, "utf8");
|
||||
const scrubbed = scrubEnvRaw(rawEnv, migratedValues, new Set(listKnownSecretEnvVarNames()));
|
||||
if (scrubbed.removed > 0 && scrubbed.nextRaw !== rawEnv) {
|
||||
counters.envEntriesRemoved = scrubbed.removed;
|
||||
envChange = {
|
||||
path: envPath,
|
||||
nextRaw: scrubbed.nextRaw,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const backupTargets = new Set<string>();
|
||||
if (configChanged) {
|
||||
backupTargets.add(io.configPath);
|
||||
}
|
||||
if (payloadChanged) {
|
||||
backupTargets.add(fileSource.path);
|
||||
}
|
||||
for (const change of authStoreChanges) {
|
||||
backupTargets.add(change.path);
|
||||
}
|
||||
if (envChange) {
|
||||
backupTargets.add(envChange.path);
|
||||
}
|
||||
|
||||
return {
|
||||
changed: configChanged || payloadChanged || authStoreChanges.length > 0 || Boolean(envChange),
|
||||
counters,
|
||||
stateDir,
|
||||
configChanged,
|
||||
nextConfig,
|
||||
configWriteOptions: writeOptions,
|
||||
authStoreChanges,
|
||||
payloadChanged,
|
||||
nextPayload,
|
||||
secretsFilePath: fileSource.path,
|
||||
envChange,
|
||||
backupTargets: [...backupTargets],
|
||||
};
|
||||
}
|
||||
@@ -1,78 +0,0 @@
|
||||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import type { ConfigWriteOptions } from "../../config/io.js";
|
||||
|
||||
export type MigrationCounters = {
|
||||
configRefs: number;
|
||||
authProfileRefs: number;
|
||||
plaintextRemoved: number;
|
||||
secretsWritten: number;
|
||||
envEntriesRemoved: number;
|
||||
authStoresChanged: number;
|
||||
};
|
||||
|
||||
export type AuthStoreChange = {
|
||||
path: string;
|
||||
nextStore: Record<string, unknown>;
|
||||
};
|
||||
|
||||
export type EnvChange = {
|
||||
path: string;
|
||||
nextRaw: string;
|
||||
};
|
||||
|
||||
export type BackupManifestEntry = {
|
||||
path: string;
|
||||
existed: boolean;
|
||||
backupPath?: string;
|
||||
mode?: number;
|
||||
};
|
||||
|
||||
export type BackupManifest = {
|
||||
version: 1;
|
||||
backupId: string;
|
||||
createdAt: string;
|
||||
entries: BackupManifestEntry[];
|
||||
};
|
||||
|
||||
export type MigrationPlan = {
|
||||
changed: boolean;
|
||||
counters: MigrationCounters;
|
||||
stateDir: string;
|
||||
configChanged: boolean;
|
||||
nextConfig: OpenClawConfig;
|
||||
configWriteOptions: ConfigWriteOptions;
|
||||
authStoreChanges: AuthStoreChange[];
|
||||
payloadChanged: boolean;
|
||||
nextPayload: Record<string, unknown>;
|
||||
secretsFilePath: string;
|
||||
envChange: EnvChange | null;
|
||||
backupTargets: string[];
|
||||
};
|
||||
|
||||
export type SecretsMigrationRunOptions = {
|
||||
write?: boolean;
|
||||
scrubEnv?: boolean;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
now?: Date;
|
||||
};
|
||||
|
||||
export type SecretsMigrationRunResult = {
|
||||
mode: "dry-run" | "write";
|
||||
changed: boolean;
|
||||
backupId?: string;
|
||||
backupDir?: string;
|
||||
secretsFilePath: string;
|
||||
counters: MigrationCounters;
|
||||
changedFiles: string[];
|
||||
};
|
||||
|
||||
export type SecretsMigrationRollbackOptions = {
|
||||
backupId: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
};
|
||||
|
||||
export type SecretsMigrationRollbackResult = {
|
||||
backupId: string;
|
||||
restoredFiles: number;
|
||||
deletedFiles: number;
|
||||
};
|
||||
81
src/secrets/plan.ts
Normal file
81
src/secrets/plan.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import type { SecretRef } from "../config/types.secrets.js";
|
||||
|
||||
export type SecretsPlanTargetType =
|
||||
| "models.providers.apiKey"
|
||||
| "skills.entries.apiKey"
|
||||
| "channels.googlechat.serviceAccount";
|
||||
|
||||
export type SecretsPlanTarget = {
|
||||
type: SecretsPlanTargetType;
|
||||
/**
|
||||
* Dot path in openclaw.json for operator readability.
|
||||
* Example: "models.providers.openai.apiKey"
|
||||
*/
|
||||
path: string;
|
||||
ref: SecretRef;
|
||||
/**
|
||||
* For provider targets, used to scrub auth-profile/static residues.
|
||||
*/
|
||||
providerId?: string;
|
||||
/**
|
||||
* For googlechat account-scoped targets.
|
||||
*/
|
||||
accountId?: string;
|
||||
};
|
||||
|
||||
export type SecretsApplyPlan = {
|
||||
version: 1;
|
||||
protocolVersion: 1;
|
||||
generatedAt: string;
|
||||
generatedBy: "openclaw secrets configure" | "manual";
|
||||
targets: SecretsPlanTarget[];
|
||||
options?: {
|
||||
scrubEnv?: boolean;
|
||||
scrubAuthProfilesForProviderTargets?: boolean;
|
||||
scrubLegacyAuthJson?: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
export function isSecretsApplyPlan(value: unknown): value is SecretsApplyPlan {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
||||
return false;
|
||||
}
|
||||
const typed = value as Partial<SecretsApplyPlan>;
|
||||
if (typed.version !== 1 || typed.protocolVersion !== 1 || !Array.isArray(typed.targets)) {
|
||||
return false;
|
||||
}
|
||||
for (const target of typed.targets) {
|
||||
if (!target || typeof target !== "object") {
|
||||
return false;
|
||||
}
|
||||
const candidate = target as Partial<SecretsPlanTarget>;
|
||||
const ref = candidate.ref as Partial<SecretRef> | undefined;
|
||||
if (
|
||||
(candidate.type !== "models.providers.apiKey" &&
|
||||
candidate.type !== "skills.entries.apiKey" &&
|
||||
candidate.type !== "channels.googlechat.serviceAccount") ||
|
||||
typeof candidate.path !== "string" ||
|
||||
!candidate.path.trim() ||
|
||||
!ref ||
|
||||
typeof ref !== "object" ||
|
||||
(ref.source !== "env" && ref.source !== "file" && ref.source !== "exec") ||
|
||||
typeof ref.provider !== "string" ||
|
||||
ref.provider.trim().length === 0 ||
|
||||
typeof ref.id !== "string" ||
|
||||
ref.id.trim().length === 0
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function normalizeSecretsPlanOptions(
|
||||
options: SecretsApplyPlan["options"] | undefined,
|
||||
): Required<NonNullable<SecretsApplyPlan["options"]>> {
|
||||
return {
|
||||
scrubEnv: options?.scrubEnv ?? true,
|
||||
scrubAuthProfilesForProviderTargets: options?.scrubAuthProfilesForProviderTargets ?? true,
|
||||
scrubLegacyAuthJson: options?.scrubLegacyAuthJson ?? true,
|
||||
};
|
||||
}
|
||||
@@ -25,3 +25,18 @@ export function writeJsonFileSecure(pathname: string, value: unknown): void {
|
||||
fs.writeFileSync(pathname, `${JSON.stringify(value, null, 2)}\n`, "utf8");
|
||||
fs.chmodSync(pathname, 0o600);
|
||||
}
|
||||
|
||||
export function readTextFileIfExists(pathname: string): string | null {
|
||||
if (!fs.existsSync(pathname)) {
|
||||
return null;
|
||||
}
|
||||
return fs.readFileSync(pathname, "utf8");
|
||||
}
|
||||
|
||||
export function writeTextFileAtomic(pathname: string, value: string, mode = 0o600): void {
|
||||
ensureDirForFile(pathname);
|
||||
const tempPath = `${pathname}.tmp-${process.pid}-${Date.now()}`;
|
||||
fs.writeFileSync(tempPath, value, "utf8");
|
||||
fs.chmodSync(tempPath, mode);
|
||||
fs.renameSync(tempPath, pathname);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user