refactor: move channel pairing state to sqlite

This commit is contained in:
Peter Steinberger
2026-05-07 00:09:09 +01:00
parent be790009f8
commit 003dfb4821
24 changed files with 553 additions and 782 deletions

View File

@@ -156,7 +156,7 @@ Docs: https://docs.openclaw.ai
- Image generation: include enabled generation providers such as fal in provider discovery even when another image provider is already active. Fixes #78141. Thanks @leoge007.
- Gateway/sessions: remove the automatic cron session reaper and retired `cron.sessionRetention`; use `openclaw sessions cleanup` for session-row maintenance while cron run-log pruning remains under `cron.runLog`.
- Cron/state: store runtime schedule state and run history in the shared SQLite state database; `openclaw doctor --fix` imports legacy `jobs-state.json` and `cron/runs/*.jsonl` files.
- Gateway/state: store device identity/auth, bootstrap tokens, device and node pairing ledgers, web push subscriptions/VAPID keys, and APNs registrations in the shared SQLite state database; `openclaw doctor --fix` imports and removes the legacy JSON files.
- Gateway/state: store device identity/auth, bootstrap tokens, device and node pairing ledgers, channel pairing requests/allowlists, web push subscriptions/VAPID keys, and APNs registrations in the shared SQLite state database; `openclaw doctor --fix` imports and removes the legacy JSON files.
- PR triage: mark external pull requests with `proof: supplied` when Barnacle finds structured real behavior proof, keep stale negative proof labels in sync across CRLF-edited PR bodies, and let ClawSweeper own the stronger `proof: sufficient` judgement.
- ACPX/Codex: preserve trusted Codex project declarations when launching isolated Codex ACP sessions, avoiding interactive trust prompts in headless runs. Thanks @Stedyclaw.
- ACPX/Codex: reap stale OpenClaw-owned ACPX/Codex ACP process trees on startup and after ACP session close, preventing orphaned harness processes from slowing the Gateway. Thanks @91wan.

View File

@@ -274,7 +274,7 @@ Control how group/room messages are handled per channel:
- `groupPolicy` is separate from mention-gating (which requires @mentions).
- WhatsApp/Telegram/Signal/iMessage/Microsoft Teams/Zalo: use `groupAllowFrom` (fallback: explicit `allowFrom`).
- Signal: `groupAllowFrom` can match either the inbound Signal group id or the sender phone/UUID.
- DM pairing approvals (`*-allowFrom` store entries) apply to DM access only; group sender authorization stays explicit to group allowlists.
- DM pairing approvals (stored in SQLite pairing state) apply to DM access only; group sender authorization stays explicit to group allowlists.
- Discord: allowlist uses `channels.discord.guilds.<id>.channels`.
- Slack: allowlist uses `channels.slack.channels`.
- Matrix: allowlist uses `channels.matrix.groups`. Prefer room IDs or aliases; joined-room name lookup is best-effort, and unresolved names are ignored at runtime. Use `channels.matrix.groupAllowFrom` to restrict senders; per-room `users` allowlists are also supported.

View File

@@ -78,17 +78,20 @@ Access groups are documented in detail here: [Access groups](/channels/access-gr
### Where the state lives
Stored under `~/.openclaw/credentials/`:
Stored in `~/.openclaw/state/openclaw.sqlite`:
- Pending requests: `<channel>-pairing.json`
- Approved allowlist store:
- Default account: `<channel>-allowFrom.json`
- Non-default account: `<channel>-<accountId>-allowFrom.json`
- Pending requests: SQLite `kv` scope `pairing.channel`
- Approved allowlist store: same SQLite record, account-scoped by channel account ID
Account scoping behavior:
- Non-default accounts read/write only their scoped allowlist file.
- Default account uses the channel-scoped unscoped allowlist file.
- Non-default accounts read/write only their scoped allowlist entry.
- Default account uses the `default` account entry.
Older `~/.openclaw/credentials/<channel>-pairing.json`,
`<channel>-allowFrom.json`, and `<channel>-<accountId>-allowFrom.json` files
are legacy import sources only. Run `openclaw doctor --fix` to import them into
SQLite and remove the JSON files.
Treat these as sensitive (they gate access to your assistant).

View File

@@ -118,7 +118,7 @@ Token resolution order is account-aware. In practice, config values win over env
`dmPolicy: "allowlist"` with empty `allowFrom` blocks all DMs and is rejected by config validation.
Setup asks for numeric user IDs only.
If you upgraded and your config contains `@username` allowlist entries, run `openclaw doctor --fix` to resolve them (best-effort; requires a Telegram bot token).
If you previously relied on pairing-store allowlist files, `openclaw doctor --fix` can recover entries into `channels.telegram.allowFrom` in allowlist flows (for example when `dmPolicy: "allowlist"` has no explicit IDs yet).
If you previously relied on pairing-store allowlist state, `openclaw doctor --fix` can recover entries into `channels.telegram.allowFrom` in allowlist flows (for example when `dmPolicy: "allowlist"` has no explicit IDs yet). Older pairing JSON files are imported into SQLite first.
For one-owner bots, prefer `dmPolicy: "allowlist"` with explicit numeric `allowFrom` IDs to keep access policy durable in config (instead of depending on previous pairing approvals).

View File

@@ -231,7 +231,7 @@ content and identifiers.
Runtime behavior details:
- pairings are persisted in channel allow-store and merged with configured `allowFrom`
- pairings are persisted in SQLite channel pairing state and merged with configured `allowFrom`
- scheduled automation and heartbeat recipient fallback use explicit delivery targets or configured `allowFrom`; DM pairing approvals are not implicit cron or heartbeat recipients
- if no allowlist is configured, the linked self number is allowed by default
- OpenClaw never auto-pairs outbound `fromMe` DMs (messages you send to yourself from the linked device)

View File

@@ -71,7 +71,7 @@ openclaw security audit --fix --json | jq '{fix: .fix.ok, summary: .report.summa
- flips common `groupPolicy="open"` to `groupPolicy="allowlist"` (including account variants in supported channels)
- when WhatsApp group policy flips to `allowlist`, seeds `groupAllowFrom` from
the stored `allowFrom` file when that list exists and config does not already
the stored pairing allowlist when that list exists and config does not already
define `allowFrom`
- sets `logging.redactSensitive` from `"off"` to `"tools"`
- tightens permissions for state/config and common sensitive files

View File

@@ -241,9 +241,7 @@ Use this when auditing access or deciding what to back up:
- **Telegram bot token**: config/env or `channels.telegram.tokenFile` (regular file only; symlinks rejected)
- **Discord bot token**: config/env or SecretRef (env/file/exec providers)
- **Slack tokens**: config/env (`channels.slack.*`)
- **Pairing allowlists**:
- `~/.openclaw/credentials/<channel>-allowFrom.json` (default account)
- `~/.openclaw/credentials/<channel>-<accountId>-allowFrom.json` (non-default accounts)
- **Pairing allowlists**: `~/.openclaw/state/openclaw.sqlite` (`kv` scope `pairing.channel`)
- **Model auth profiles**: `~/.openclaw/agents/<agentId>/agent/auth-profiles.json`
- **Codex runtime state**: `~/.openclaw/agents/<agentId>/agent/codex-home/`
- **File-backed secrets payload (optional)**: `~/.openclaw/secrets.json`
@@ -575,7 +573,7 @@ If you run multiple accounts on the same channel, use `per-account-channel-peer`
OpenClaw has two separate "who can trigger me?" layers:
- **DM allowlist** (`allowFrom` / `channels.discord.allowFrom` / `channels.slack.allowFrom`; legacy: `channels.discord.dm.allowFrom`, `channels.slack.dm.allowFrom`): who is allowed to talk to the bot in direct messages.
- When `dmPolicy="pairing"`, approvals are written to the account-scoped pairing allowlist store under `~/.openclaw/credentials/` (`<channel>-allowFrom.json` for default account, `<channel>-<accountId>-allowFrom.json` for non-default accounts), merged with config allowlists.
- When `dmPolicy="pairing"`, approvals are written to the account-scoped pairing allowlist store in `~/.openclaw/state/openclaw.sqlite`, merged with config allowlists. Older `~/.openclaw/credentials/*-pairing.json` and `*-allowFrom.json` files are imported only by `openclaw doctor --fix`.
- **Group allowlist** (channel-specific): which groups/channels/guilds the bot will accept messages from at all.
- Common patterns:
- `channels.whatsapp.groups`, `channels.telegram.groups`, `channels.imessage.groups`: per-group defaults like `requireMention`; when set, it also acts as a group allowlist (include `"*"` to keep allow-all behavior).

View File

@@ -131,9 +131,11 @@ This plan has started landing in slices:
legacy import source and is removed after import; `auth-profiles.json` still
owns credentials and stays file-backed.
- Device identity, local device auth tokens, bootstrap tokens, device/node
pairing ledgers, web push subscriptions/VAPID keys, and APNs registration
state now use the shared SQLite `kv` store. `openclaw doctor --fix` imports
the legacy `identity/*.json`, `devices/*.json`, `nodes/*.json`, and
pairing ledgers, channel pairing requests/allowlists, web push
subscriptions/VAPID keys, and APNs registration state now use the shared
SQLite `kv` store. `openclaw doctor --fix` imports the legacy
`identity/*.json`, `devices/*.json`, `nodes/*.json`,
`credentials/*-pairing.json`, `credentials/*-allowFrom.json`, and
`push/*.json` files into SQLite and removes those files after a successful
import. Runtime paths no longer read or write those JSON ledgers.
- `AgentRuntimeBackend`, `PreparedAgentRun`, and the Node worker runner exist

View File

@@ -143,9 +143,7 @@ Use this when debugging auth or deciding what to back up:
- **Telegram bot token**: config/env or `channels.telegram.tokenFile` (regular file only; symlinks rejected)
- **Discord bot token**: config/env or SecretRef (env/file/exec providers)
- **Slack tokens**: config/env (`channels.slack.*`)
- **Pairing allowlists**:
- `~/.openclaw/credentials/<channel>-allowFrom.json` (default account)
- `~/.openclaw/credentials/<channel>-<accountId>-allowFrom.json` (non-default accounts)
- **Pairing allowlists**: `~/.openclaw/state/openclaw.sqlite` (`kv` scope `pairing.channel`)
- **Model auth profiles**: `~/.openclaw/agents/<agentId>/agent/auth-profiles.json`
- **File-backed secrets payload (optional)**: `~/.openclaw/secrets.json`
- **Legacy OAuth import**: `~/.openclaw/credentials/oauth.json`

View File

@@ -1 +0,0 @@
export { detectTelegramLegacyStateMigrations } from "./src/state-migrations.js";

View File

@@ -20,8 +20,7 @@
],
"setupEntry": "./setup-entry.ts",
"setupFeatures": {
"configPromotion": true,
"legacyStateMigrations": true
"configPromotion": true
},
"channel": {
"id": "telegram",

View File

@@ -2,17 +2,10 @@ import { defineBundledChannelSetupEntry } from "openclaw/plugin-sdk/channel-entr
export default defineBundledChannelSetupEntry({
importMetaUrl: import.meta.url,
features: {
legacyStateMigrations: true,
},
plugin: {
specifier: "./setup-plugin-api.js",
exportName: "telegramSetupPlugin",
},
legacyStateMigrations: {
specifier: "./legacy-state-migrations-api.js",
exportName: "detectTelegramLegacyStateMigrations",
},
secrets: {
specifier: "./secret-contract-api.js",
exportName: "channelSecrets",

View File

@@ -4,15 +4,10 @@ import type { TelegramProbe } from "./probe.js";
import { telegramSetupAdapter } from "./setup-core.js";
import { telegramSetupWizard } from "./setup-surface.js";
import { createTelegramPluginBase } from "./shared.js";
import { detectTelegramLegacyStateMigrations } from "./state-migrations.js";
export const telegramSetupPlugin: ChannelPlugin<ResolvedTelegramAccount, TelegramProbe> = {
...createTelegramPluginBase({
setupWizard: telegramSetupWizard,
setup: telegramSetupAdapter,
}),
lifecycle: {
detectLegacyStateMigrations: ({ cfg, env }) =>
detectTelegramLegacyStateMigrations({ cfg, env }),
},
};

View File

@@ -73,7 +73,6 @@ import {
formatDuplicateTelegramTokenReason,
telegramConfigAdapter,
} from "./shared.js";
import { detectTelegramLegacyStateMigrations } from "./state-migrations.js";
import { collectTelegramStatusIssues } from "./status-issues.js";
import { parseTelegramTarget } from "./targets.js";
import {
@@ -732,8 +731,6 @@ export const telegramPlugin = createChatChannelPlugin({
await resolveTelegramTargets({ cfg, accountId, inputs, kind }),
},
lifecycle: {
detectLegacyStateMigrations: ({ cfg, env }) =>
detectTelegramLegacyStateMigrations({ cfg, env }),
onAccountConfigChanged: async ({ prevCfg, nextCfg, accountId }) => {
const previousToken = resolveTelegramAccount({ cfg: prevCfg, accountId }).token.trim();
const nextToken = resolveTelegramAccount({ cfg: nextCfg, accountId }).token.trim();

View File

@@ -1,36 +0,0 @@
import type { ChannelLegacyStateMigrationPlan } from "openclaw/plugin-sdk/channel-contract";
import { resolveChannelAllowFromPath } from "openclaw/plugin-sdk/channel-pairing-paths";
import type { OpenClawConfig } from "openclaw/plugin-sdk/config-types";
import { statRegularFileSync } from "openclaw/plugin-sdk/security-runtime";
import { resolveDefaultTelegramAccountId } from "./account-selection.js";
function fileExists(pathValue: string): boolean {
try {
return !statRegularFileSync(pathValue).missing;
} catch {
return false;
}
}
export function detectTelegramLegacyStateMigrations(params: {
cfg: OpenClawConfig;
env: NodeJS.ProcessEnv;
}): ChannelLegacyStateMigrationPlan[] {
const legacyPath = resolveChannelAllowFromPath("telegram", params.env);
if (!fileExists(legacyPath)) {
return [];
}
const accountId = resolveDefaultTelegramAccountId(params.cfg);
const targetPath = resolveChannelAllowFromPath("telegram", params.env, accountId);
if (fileExists(targetPath)) {
return [];
}
return [
{
kind: "copy",
label: "Telegram pairing allowFrom",
sourcePath: legacyPath,
targetPath,
},
];
}

View File

@@ -2,6 +2,7 @@ import fs from "node:fs/promises";
import path from "node:path";
import { withTempHome } from "openclaw/plugin-sdk/test-env";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { addChannelAllowFromStoreEntry } from "../pairing/pairing-store.js";
import { loadAndMaybeMigrateDoctorConfig } from "./doctor-config-flow.js";
import {
getDoctorConfigInputForTest,
@@ -2268,8 +2269,7 @@ describe("doctor config flow", () => {
const result = await withTempHome(
async (home) => {
const configDir = path.join(home, ".openclaw");
const credentialsDir = path.join(configDir, "credentials");
await fs.mkdir(credentialsDir, { recursive: true });
await fs.mkdir(configDir, { recursive: true });
await fs.writeFile(
path.join(configDir, "openclaw.json"),
JSON.stringify(
@@ -2286,11 +2286,11 @@ describe("doctor config flow", () => {
),
"utf-8",
);
await fs.writeFile(
path.join(credentialsDir, "telegram-allowFrom.json"),
JSON.stringify({ version: 1, allowFrom: ["12345"] }, null, 2),
"utf-8",
);
await addChannelAllowFromStoreEntry({
channel: "telegram",
entry: "12345",
accountId: "default",
});
return await loadAndMaybeMigrateDoctorConfig({
options: { nonInteractive: true, repair: true },
confirm: async () => false,

View File

@@ -1,10 +1,12 @@
import fs from "node:fs/promises";
import path from "node:path";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { resolveOAuthDir } from "../config/paths.js";
import { loadDeviceAuthStore } from "../infra/device-auth-store.js";
import { listDevicePairing } from "../infra/device-pairing.js";
import { loadApnsRegistration } from "../infra/push-apns.js";
import { listWebPushSubscriptions } from "../infra/push-web.js";
import { listChannelPairingRequests, readChannelAllowFromStore } from "../pairing/pairing-store.js";
import { withEnvAsync } from "../test-utils/env.js";
import { withTempDir } from "../test-utils/temp-dir.js";
@@ -86,6 +88,29 @@ describe("maybeRepairLegacyRuntimeStateFiles", () => {
})}\n`,
"utf8",
);
const oauthDir = resolveOAuthDir(env, stateDir);
await fs.mkdir(oauthDir, { recursive: true });
await fs.writeFile(
path.join(oauthDir, "telegram-pairing.json"),
`${JSON.stringify({
version: 1,
requests: [
{
id: "sender-1",
code: "ABCD1234",
createdAt: new Date().toISOString(),
lastSeenAt: new Date().toISOString(),
meta: { accountId: "default" },
},
],
})}\n`,
"utf8",
);
await fs.writeFile(
path.join(oauthDir, "telegram-default-allowFrom.json"),
`${JSON.stringify({ version: 1, allowFrom: ["sender-2"] })}\n`,
"utf8",
);
await fs.mkdir(path.join(stateDir, "push"), { recursive: true });
await fs.writeFile(
path.join(stateDir, "push", "web-push-subscriptions.json"),
@@ -132,6 +157,18 @@ describe("maybeRepairLegacyRuntimeStateFiles", () => {
paired: [expect.objectContaining({ deviceId: "device-2" })],
});
expect(loadDeviceAuthStore({ env })?.tokens.operator?.token).toBe("local-token");
await expect(listChannelPairingRequests("telegram", env, "default")).resolves.toEqual([
expect.objectContaining({ id: "sender-1", code: "ABCD1234" }),
]);
await expect(readChannelAllowFromStore("telegram", env, "default")).resolves.toEqual([
"sender-2",
]);
await expect(fs.stat(path.join(oauthDir, "telegram-pairing.json"))).rejects.toMatchObject({
code: "ENOENT",
});
await expect(
fs.stat(path.join(oauthDir, "telegram-default-allowFrom.json")),
).rejects.toMatchObject({ code: "ENOENT" });
await expect(listWebPushSubscriptions(stateDir)).resolves.toEqual([
expect.objectContaining({ subscriptionId: "sub-1" }),
]);

View File

@@ -22,6 +22,10 @@ import {
legacyApnsRegistrationFileExists,
} from "../infra/push-apns.js";
import { importLegacyWebPushFilesToSqlite, legacyWebPushFilesExist } from "../infra/push-web.js";
import {
importLegacyChannelPairingFilesToSqlite,
legacyChannelPairingFilesExist,
} from "../pairing/pairing-store.js";
import { note } from "../terminal/note.js";
import type { DoctorPrompter } from "./doctor-prompter.js";
@@ -31,6 +35,7 @@ type LegacyStateProbe = {
deviceBootstrap: boolean;
devicePairing: boolean;
nodePairing: boolean;
channelPairing: boolean;
webPush: boolean;
apns: boolean;
};
@@ -43,6 +48,7 @@ async function probeLegacyRuntimeStateFiles(env: NodeJS.ProcessEnv): Promise<Leg
deviceBootstrap: await legacyDeviceBootstrapFileExists(baseDir),
devicePairing: await legacyPairingStateFilesExist({ baseDir, subdir: "devices" }),
nodePairing: await legacyPairingStateFilesExist({ baseDir, subdir: "nodes" }),
channelPairing: await legacyChannelPairingFilesExist(env),
webPush: await legacyWebPushFilesExist(baseDir),
apns: await legacyApnsRegistrationFileExists(baseDir),
};
@@ -64,7 +70,7 @@ export async function maybeRepairLegacyRuntimeStateFiles(params: {
}
if (!params.prompter.shouldRepair) {
note(
"Legacy runtime JSON state files detected. Run `openclaw doctor --fix` to import device, bootstrap, pairing, and push state into SQLite.",
"Legacy runtime JSON state files detected. Run `openclaw doctor --fix` to import device, bootstrap, channel pairing, node pairing, and push state into SQLite.",
"SQLite state",
);
return;
@@ -130,6 +136,16 @@ export async function maybeRepairLegacyRuntimeStateFiles(params: {
}
});
}
if (probe.channelPairing) {
await runImport("Channel pairing", async () => {
const result = await importLegacyChannelPairingFilesToSqlite(env);
if (result.files > 0) {
changes.push(
`- Imported ${result.requests} channel pairing request(s) and ${result.allowFrom} channel allowlist entr${result.allowFrom === 1 ? "y" : "ies"} into SQLite.`,
);
}
});
}
if (probe.webPush) {
await runImport("Web push", async () => {
const result = await importLegacyWebPushFilesToSqlite(baseDir);

View File

@@ -617,7 +617,7 @@ function shouldRequireOAuthDir(cfg: OpenClawConfig, env: NodeJS.ProcessEnv): boo
if ([...withPersistedAuth].some((channelId) => !withoutPersistedAuth.has(channelId))) {
return true;
}
// Pairing allowlists are persisted under credentials/<channel>-allowFrom.json.
// Pairing allowlists are persisted in the shared SQLite state database.
for (const [channelId, channelCfg] of Object.entries(channels)) {
if (channelId === "defaults" || channelId === "modelByChannel") {
continue;

View File

@@ -29,46 +29,6 @@ vi.mock("../channels/plugins/bundled.js", async () => {
}
}
function resolveTelegramAccountId(cfg: OpenClawConfig): string {
const defaultAgentId = cfg.agents?.list?.find((agent) => agent.default)?.id ?? "main";
const boundAccountId = cfg.bindings?.find(
(binding) =>
binding.agentId === defaultAgentId &&
binding.match?.channel === "telegram" &&
typeof binding.match.accountId === "string",
)?.match.accountId;
return boundAccountId ?? cfg.channels?.telegram?.defaultAccount ?? "default";
}
function detectTelegramAllowFromMigration(params: {
cfg: OpenClawConfig;
env: NodeJS.ProcessEnv;
}) {
const root = params.env.OPENCLAW_STATE_DIR;
if (!root) {
return [];
}
const legacyPath = path.join(root, "credentials", "telegram-allowFrom.json");
if (!fileExists(legacyPath)) {
return [];
}
const targetPath = path.join(
root,
"credentials",
`telegram-${resolveTelegramAccountId(params.cfg)}-allowFrom.json`,
);
return fileExists(targetPath)
? []
: [
{
kind: "copy" as const,
label: "Telegram pairing allowFrom",
sourcePath: legacyPath,
targetPath,
},
];
}
function detectWhatsAppLegacyStateMigrations(params: { oauthDir: string }) {
let entries: fs.Dirent[] = [];
try {
@@ -106,8 +66,6 @@ vi.mock("../channels/plugins/bundled.js", async () => {
]),
listBundledChannelLegacyStateMigrationDetectors: vi.fn(() => [
({ oauthDir }: { oauthDir: string }) => detectWhatsAppLegacyStateMigrations({ oauthDir }),
({ cfg, env }: { cfg: OpenClawConfig; env: NodeJS.ProcessEnv }) =>
detectTelegramAllowFromMigration({ cfg, env }),
]),
listBundledChannelSetupPluginsByFeature: vi.fn((feature: string) => {
if (feature === "legacySessionSurfaces") {
@@ -133,18 +91,6 @@ vi.mock("../channels/plugins/bundled.js", async () => {
detectWhatsAppLegacyStateMigrations({ oauthDir }),
},
},
{
id: "telegram",
lifecycle: {
detectLegacyStateMigrations: ({
cfg,
env,
}: {
cfg: OpenClawConfig;
env: NodeJS.ProcessEnv;
}) => detectTelegramAllowFromMigration({ cfg, env }),
},
},
];
}
return [];
@@ -195,32 +141,6 @@ async function makeRootWithEmptyCfg() {
return { root, cfg };
}
function writeLegacyTelegramAllowFromStore(oauthDir: string) {
fs.writeFileSync(
path.join(oauthDir, "telegram-allowFrom.json"),
JSON.stringify(
{
version: 1,
allowFrom: ["123456"],
},
null,
2,
) + "\n",
"utf-8",
);
}
async function runTelegramAllowFromMigration(params: { root: string; cfg: OpenClawConfig }) {
const oauthDir = ensureCredentialsDir(params.root);
writeLegacyTelegramAllowFromStore(oauthDir);
const detected = await detectLegacyStateMigrations({
cfg: params.cfg,
env: { OPENCLAW_STATE_DIR: params.root } as NodeJS.ProcessEnv,
});
const result = await runLegacyStateMigrations({ detected, now: () => 123 });
return { oauthDir, detected, result };
}
afterEach(async () => {
closeOpenClawStateDatabaseForTest();
resetAutoMigrateLegacyStateForTest();
@@ -526,91 +446,6 @@ describe("doctor legacy state migrations", () => {
expect(fs.existsSync(path.join(oauthDir, "creds.json"))).toBe(false);
});
it("migrates legacy Telegram pairing allowFrom store to account-scoped default file", async () => {
const { root, cfg } = await makeRootWithEmptyCfg();
const { oauthDir, detected, result } = await runTelegramAllowFromMigration({ root, cfg });
expect(detected.channelPlans.hasLegacy).toBe(true);
expect(detected.channelPlans.plans.map((plan) => path.basename(plan.targetPath))).toEqual([
"telegram-default-allowFrom.json",
]);
expect(result.warnings).toStrictEqual([]);
const target = path.join(oauthDir, "telegram-default-allowFrom.json");
expect(fs.existsSync(target)).toBe(true);
expect(JSON.parse(fs.readFileSync(target, "utf-8"))).toEqual({
version: 1,
allowFrom: ["123456"],
});
});
it("does not fan out legacy Telegram pairing allowFrom store to configured named accounts", async () => {
const root = await makeTempRoot();
const cfg: OpenClawConfig = {
channels: {
telegram: {
defaultAccount: "bot2",
accounts: {
bot1: {},
bot2: {},
},
},
},
};
const { oauthDir, detected, result } = await runTelegramAllowFromMigration({ root, cfg });
expect(detected.channelPlans.hasLegacy).toBe(true);
expect(detected.channelPlans.plans.map((plan) => path.basename(plan.targetPath))).toEqual([
"telegram-bot2-allowFrom.json",
]);
expect(result.warnings).toStrictEqual([]);
const bot1Target = path.join(oauthDir, "telegram-bot1-allowFrom.json");
const bot2Target = path.join(oauthDir, "telegram-bot2-allowFrom.json");
const defaultTarget = path.join(oauthDir, "telegram-default-allowFrom.json");
expect(fs.existsSync(bot1Target)).toBe(false);
expect(fs.existsSync(bot2Target)).toBe(true);
expect(fs.existsSync(defaultTarget)).toBe(false);
expect(JSON.parse(fs.readFileSync(bot2Target, "utf-8"))).toEqual({
version: 1,
allowFrom: ["123456"],
});
});
it("migrates legacy Telegram pairing allowFrom store to the default agent bound account", async () => {
const root = await makeTempRoot();
const cfg: OpenClawConfig = {
agents: {
list: [{ id: "ops", default: true }],
},
bindings: [{ agentId: "ops", match: { channel: "telegram", accountId: "alerts" } }],
channels: {
telegram: {
accounts: {
alerts: {},
backup: {},
},
},
},
};
const { oauthDir, detected, result } = await runTelegramAllowFromMigration({ root, cfg });
expect(detected.channelPlans.hasLegacy).toBe(true);
expect(detected.channelPlans.plans.map((plan) => path.basename(plan.targetPath))).toEqual([
"telegram-alerts-allowFrom.json",
]);
expect(result.warnings).toStrictEqual([]);
const alertsTarget = path.join(oauthDir, "telegram-alerts-allowFrom.json");
const backupTarget = path.join(oauthDir, "telegram-backup-allowFrom.json");
const defaultTarget = path.join(oauthDir, "telegram-default-allowFrom.json");
expect(fs.existsSync(alertsTarget)).toBe(true);
expect(fs.existsSync(backupTarget)).toBe(false);
expect(fs.existsSync(defaultTarget)).toBe(false);
expect(JSON.parse(fs.readFileSync(alertsTarget, "utf-8"))).toEqual({
version: 1,
allowFrom: ["123456"],
});
});
it("no-ops when nothing detected", async () => {
const root = await makeTempRoot();
const cfg: OpenClawConfig = {};

View File

@@ -5,8 +5,8 @@ import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest";
import {
clearAllowFromStoreReadCacheForTest,
readChannelAllowFromStoreEntriesSync,
resolveChannelAllowFromPath,
} from "./allow-from-store-read.js";
import { addChannelAllowFromStoreEntry } from "./pairing-store.js";
let fixtureRoot = "";
let caseId = 0;
@@ -24,19 +24,20 @@ function makeHomeDir(): string {
return dir;
}
function writeAllowFromFile(params: {
async function writeAllowFromStore(params: {
channel: "telegram";
env: NodeJS.ProcessEnv;
accountId?: string;
allowFrom: string[];
}): void {
const filePath = resolveChannelAllowFromPath(params.channel, params.env, params.accountId);
fs.mkdirSync(path.dirname(filePath), { recursive: true });
fs.writeFileSync(
filePath,
JSON.stringify({ version: 1, allowFrom: params.allowFrom }, null, 2),
"utf8",
);
}): Promise<void> {
for (const entry of params.allowFrom) {
await addChannelAllowFromStoreEntry({
channel: params.channel,
env: params.env,
accountId: params.accountId,
entry,
});
}
}
beforeAll(() => {
@@ -54,35 +55,26 @@ afterEach(() => {
});
describe("allow-from-store-read", () => {
it("merges scoped and legacy entries for the default account", () => {
it("reads default account entries from SQLite", async () => {
const env = makeEnv(makeHomeDir());
writeAllowFromFile({
channel: "telegram",
env,
allowFrom: [" legacy-a ", "legacy-a", "legacy-b"],
});
writeAllowFromFile({
await writeAllowFromStore({
channel: "telegram",
env,
accountId: "default",
allowFrom: [" scoped-a ", "legacy-b"],
allowFrom: [" scoped-a ", "scoped-a", "legacy-b"],
});
expect(readChannelAllowFromStoreEntriesSync("telegram", env)).toEqual([
"scoped-a",
"legacy-b",
"legacy-a",
]);
expect(readChannelAllowFromStoreEntriesSync("telegram", env)).toEqual(["scoped-a", "legacy-b"]);
});
it("keeps non-default account reads scoped", () => {
it("keeps non-default account reads scoped", async () => {
const env = makeEnv(makeHomeDir());
writeAllowFromFile({
await writeAllowFromStore({
channel: "telegram",
env,
allowFrom: ["legacy-a"],
allowFrom: ["default-a"],
});
writeAllowFromFile({
await writeAllowFromStore({
channel: "telegram",
env,
accountId: "work",

View File

@@ -1,35 +1,7 @@
import { normalizeOptionalString } from "../shared/string-coerce.js";
import {
clearAllowFromFileReadCacheForNamespace,
dedupePreserveOrder,
readAllowFromFileSyncWithExists,
resolveAllowFromAccountId,
resolveAllowFromFilePath,
shouldIncludeLegacyAllowFromEntries,
type AllowFromStore,
} from "./allow-from-store-file.js";
import { resolveAllowFromAccountId, resolveAllowFromFilePath } from "./allow-from-store-file.js";
import { readChannelAllowFromStoreSync } from "./pairing-store.js";
import type { PairingChannel } from "./pairing-store.types.js";
const ALLOW_FROM_STORE_READ_CACHE_NAMESPACE = "allow-from-store-read";
function normalizeRawAllowFromList(store: AllowFromStore): string[] {
const list = Array.isArray(store.allowFrom) ? store.allowFrom : [];
return dedupePreserveOrder(
list.map((entry) => normalizeOptionalString(entry) ?? "").filter(Boolean),
);
}
function readAllowFromEntriesForPathSyncWithExists(filePath: string): {
entries: string[];
exists: boolean;
} {
return readAllowFromFileSyncWithExists({
cacheNamespace: ALLOW_FROM_STORE_READ_CACHE_NAMESPACE,
filePath,
normalizeStore: normalizeRawAllowFromList,
});
}
export function resolveChannelAllowFromPath(
channel: PairingChannel,
env: NodeJS.ProcessEnv = process.env,
@@ -43,21 +15,9 @@ export function readChannelAllowFromStoreEntriesSync(
env: NodeJS.ProcessEnv = process.env,
accountId?: string,
): string[] {
const resolvedAccountId = resolveAllowFromAccountId(accountId);
if (!shouldIncludeLegacyAllowFromEntries(resolvedAccountId)) {
return readAllowFromEntriesForPathSyncWithExists(
resolveAllowFromFilePath(channel, env, resolvedAccountId),
).entries;
}
const scopedEntries = readAllowFromEntriesForPathSyncWithExists(
resolveAllowFromFilePath(channel, env, resolvedAccountId),
).entries;
const legacyEntries = readAllowFromEntriesForPathSyncWithExists(
resolveAllowFromFilePath(channel, env),
).entries;
return dedupePreserveOrder([...scopedEntries, ...legacyEntries]);
return readChannelAllowFromStoreSync(channel, env, resolveAllowFromAccountId(accountId));
}
export function clearAllowFromStoreReadCacheForTest(): void {
clearAllowFromFileReadCacheForNamespace(ALLOW_FROM_STORE_READ_CACHE_NAMESPACE);
// SQLite-backed reads do not keep a process-local file cache.
}

View File

@@ -14,6 +14,8 @@ import {
} from "vitest";
import { resolveOAuthDir } from "../config/paths.js";
import { DEFAULT_ACCOUNT_ID } from "../routing/session-key.js";
import { deleteOpenClawStateKvScope } from "../state/openclaw-state-kv.js";
import { readOpenClawStateKvJson, writeOpenClawStateKvJson } from "../state/openclaw-state-kv.js";
import { withEnvAsync } from "../test-utils/env.js";
vi.mock("../channels/plugins/pairing.js", () => ({
@@ -40,9 +42,10 @@ import {
let fixtureRoot = "";
let caseId = 0;
type RandomIntSync = (minOrMax: number, max?: number) => number;
type FileReadSpy = {
readCount: () => number;
mockRestore: () => void;
type ChannelPairingTestState = {
version: 1;
requests: Array<Record<string, unknown>>;
allowFrom?: Record<string, string[]>;
};
let randomIntSpy: MockInstance<RandomIntSync>;
@@ -100,10 +103,6 @@ function writeJsonFixture(filePath: string, value: unknown) {
fsSync.writeFileSync(filePath, `${JSON.stringify(value, null, 2)}\n`, "utf8");
}
function resolvePairingFilePath(stateDir: string, channel: string) {
return path.join(resolveOAuthDir(process.env, stateDir), `${channel}-pairing.json`);
}
function resolveAllowFromFilePath(stateDir: string, channel: string, accountId?: string) {
const suffix = accountId ? `-${accountId}` : "";
return path.join(resolveOAuthDir(process.env, stateDir), `${channel}${suffix}-allowFrom.json`);
@@ -112,6 +111,31 @@ function resolveAllowFromFilePath(stateDir: string, channel: string, accountId?:
function clearOAuthFixtures(stateDir: string) {
clearPairingAllowFromReadCacheForTest();
fsSync.rmSync(resolveOAuthDir(process.env, stateDir), { recursive: true, force: true });
deleteOpenClawStateKvScope("pairing.channel", {
env: { ...process.env, OPENCLAW_STATE_DIR: stateDir },
});
}
function readChannelPairingTestState(stateDir: string, channel: string): ChannelPairingTestState {
return (
(readOpenClawStateKvJson("pairing.channel", channel, {
env: { ...process.env, OPENCLAW_STATE_DIR: stateDir },
}) as ChannelPairingTestState | undefined) ?? {
version: 1,
requests: [],
allowFrom: {},
}
);
}
function writeChannelPairingTestState(
stateDir: string,
channel: string,
state: ChannelPairingTestState,
) {
writeOpenClawStateKvJson("pairing.channel", channel, state, {
env: { ...process.env, OPENCLAW_STATE_DIR: stateDir },
});
}
async function writeAllowFromFixture(params: {
@@ -120,10 +144,10 @@ async function writeAllowFromFixture(params: {
allowFrom: string[];
accountId?: string;
}) {
writeJsonFixture(resolveAllowFromFilePath(params.stateDir, params.channel, params.accountId), {
version: 1,
allowFrom: params.allowFrom,
});
const state = readChannelPairingTestState(params.stateDir, params.channel);
state.allowFrom ??= {};
state.allowFrom[params.accountId ?? DEFAULT_ACCOUNT_ID] = params.allowFrom;
writeChannelPairingTestState(params.stateDir, params.channel, state);
}
async function createTelegramPairingRequest(accountId: string, id = "12345") {
@@ -155,28 +179,6 @@ async function seedTelegramAllowFromFixtures(params: {
});
}
async function assertAllowFromCacheInvalidation(params: {
stateDir: string;
readAllowFrom: () => Promise<string[]>;
readSpy: FileReadSpy;
}) {
const first = await params.readAllowFrom();
const second = await params.readAllowFrom();
expect(first).toEqual(["1001"]);
expect(second).toEqual(["1001"]);
expect(params.readSpy.readCount()).toBe(1);
await writeAllowFromFixture({
stateDir: params.stateDir,
channel: "telegram",
accountId: "yy",
allowFrom: ["10022"],
});
const third = await params.readAllowFrom();
expect(third).toEqual(["10022"]);
expect(params.readSpy.readCount()).toBe(2);
}
async function expectAccountScopedEntryIsolated(entry: string, accountId = "yy") {
const accountScoped = await readChannelAllowFromStore("telegram", process.env, accountId);
const channelScoped = await readLegacyChannelAllowFromStore("telegram");
@@ -184,41 +186,6 @@ async function expectAccountScopedEntryIsolated(entry: string, accountId = "yy")
expect(channelScoped).not.toContain(entry);
}
async function expectAllowFromCacheInvalidationWithReadSpy(params: {
stateDir: string;
createReadSpy: (filePath: string) => FileReadSpy;
readAllowFrom: () => Promise<string[]>;
}) {
const filePath = resolveAllowFromFilePath(params.stateDir, "telegram", "yy");
await writeAllowFromFixture({
stateDir: params.stateDir,
channel: "telegram",
accountId: "yy",
allowFrom: ["1001"],
});
clearPairingAllowFromReadCacheForTest();
const readSpy = params.createReadSpy(filePath);
try {
await assertAllowFromCacheInvalidation({
stateDir: params.stateDir,
readAllowFrom: params.readAllowFrom,
readSpy,
});
} finally {
readSpy.mockRestore();
}
}
function countFileReads(spy: { mock: { calls: unknown[][] } }, filePath: string): number {
let count = 0;
for (const [candidate] of spy.mock.calls) {
if (candidate === filePath) {
count++;
}
}
return count;
}
async function seedDefaultAccountAllowFromFixture(stateDir: string) {
await seedTelegramAllowFromFixtures({
stateDir,
@@ -323,16 +290,15 @@ describe("pairing store", () => {
accountId: DEFAULT_ACCOUNT_ID,
});
expect(created.created).toBe(true);
const filePath = resolvePairingFilePath(stateDir, "demo-pairing-b");
const raw = fsSync.readFileSync(filePath, "utf8");
const parsed = JSON.parse(raw) as {
requests?: Array<Record<string, unknown>>;
};
const parsed = readChannelPairingTestState(stateDir, "demo-pairing-b");
const expiredAt = new Date(Date.now() - 2 * 60 * 60 * 1000).toISOString();
const requests = (parsed.requests ?? []).map((entry) =>
Object.assign({}, entry, { createdAt: expiredAt, lastSeenAt: expiredAt }),
);
writeJsonFixture(filePath, { version: 1, requests });
writeChannelPairingTestState(stateDir, "demo-pairing-b", {
...parsed,
requests,
});
expect(await listChannelPairingRequests("demo-pairing-b")).toHaveLength(0);
const next = await upsertChannelPairingRequest({
channel: "demo-pairing-b",
@@ -360,7 +326,7 @@ describe("pairing store", () => {
expect(listIds).toEqual(["+15550000001", "+15550000002", "+15550000003"]);
const createdAt = new Date().toISOString();
writeJsonFixture(resolvePairingFilePath(stateDir, "demo-pairing-d"), {
writeChannelPairingTestState(stateDir, "demo-pairing-d", {
version: 1,
requests: ids.map((id, index) => ({
id,
@@ -487,29 +453,17 @@ describe("pairing store", () => {
});
});
it("rethrows unexpected stat errors after allowFrom writes", async () => {
it("stores allowFrom approvals in SQLite without writing legacy files", async () => {
await withTempStateDir(async (stateDir) => {
const allowFromPath = resolveAllowFromFilePath(stateDir, "telegram", "yy");
const error = Object.assign(new Error("stat failed"), { code: "EACCES" });
const originalStat = fsSync.promises.stat.bind(fsSync.promises);
const statSpy = vi.spyOn(fsSync.promises, "stat").mockImplementation(async (target) => {
if (String(target) === allowFromPath) {
throw error;
}
return await originalStat(target);
const result = await addChannelAllowFromStoreEntry({
channel: "telegram",
accountId: "yy",
entry: "12345",
});
try {
await expect(
addChannelAllowFromStoreEntry({
channel: "telegram",
accountId: "yy",
entry: "12345",
}),
).rejects.toBe(error);
} finally {
statSpy.mockRestore();
}
expect(result).toEqual({ changed: true, allowFrom: ["12345"] });
expect(await readChannelAllowFromStore("telegram", process.env, "yy")).toEqual(["12345"]);
expect(fsSync.existsSync(resolveAllowFromFilePath(stateDir, "telegram", "yy"))).toBe(false);
});
});
@@ -559,14 +513,14 @@ describe("pairing store", () => {
await seedDefaultAccountAllowFromFixture(stateDir);
},
accountId: DEFAULT_ACCOUNT_ID,
expected: ["1002", "1001"],
expected: ["1002"],
},
{
setup: async () => {
await seedDefaultAccountAllowFromFixture(stateDir);
},
accountId: undefined,
expected: ["1002", "1001"],
expected: ["1002"],
},
] as const) {
clearOAuthFixtures(stateDir);
@@ -617,37 +571,25 @@ describe("pairing store", () => {
});
});
it("reuses cached allowFrom reads and invalidates on file updates", async () => {
it("reads latest SQLite allowFrom entries without file cache invalidation", async () => {
await withTempStateDir(async (stateDir) => {
for (const variant of [
{
createReadSpy: (filePath: string) => {
const spy = vi.spyOn(fsSync.promises, "readFile");
return {
readCount: () => countFileReads(spy, filePath),
mockRestore: () => spy.mockRestore(),
};
},
readAllowFrom: () => readChannelAllowFromStore("telegram", process.env, "yy"),
},
{
createReadSpy: (filePath: string) => {
const spy = vi.spyOn(fsSync, "readFileSync");
return {
readCount: () => countFileReads(spy, filePath),
mockRestore: () => spy.mockRestore(),
};
},
readAllowFrom: async () => readChannelAllowFromStoreSync("telegram", process.env, "yy"),
},
]) {
clearOAuthFixtures(stateDir);
await expectAllowFromCacheInvalidationWithReadSpy({
stateDir,
createReadSpy: variant.createReadSpy,
readAllowFrom: variant.readAllowFrom,
});
}
await writeAllowFromFixture({
stateDir,
channel: "telegram",
accountId: "yy",
allowFrom: ["1001"],
});
expect(await readChannelAllowFromStore("telegram", process.env, "yy")).toEqual(["1001"]);
expect(readChannelAllowFromStoreSync("telegram", process.env, "yy")).toEqual(["1001"]);
await writeAllowFromFixture({
stateDir,
channel: "telegram",
accountId: "yy",
allowFrom: ["10022"],
});
expect(await readChannelAllowFromStore("telegram", process.env, "yy")).toEqual(["10022"]);
expect(readChannelAllowFromStoreSync("telegram", process.env, "yy")).toEqual(["10022"]);
});
});
});

View File

@@ -1,10 +1,9 @@
import crypto from "node:crypto";
import fs from "node:fs";
import path from "node:path";
import { CHANNEL_IDS } from "../channels/ids.js";
import { getPairingAdapter } from "../channels/plugins/pairing.js";
import type { ChannelPairingAdapter } from "../channels/plugins/pairing.types.js";
import { withFileLock as withPathLock } from "../infra/file-lock.js";
import { readJsonFileWithFallback, writeJsonFileAtomically } from "../plugin-sdk/json-store.js";
import { DEFAULT_ACCOUNT_ID } from "../routing/session-key.js";
import {
normalizeLowercaseStringOrEmpty,
@@ -12,17 +11,23 @@ import {
normalizeOptionalString,
normalizeStringifiedOptionalString,
} from "../shared/string-coerce.js";
import type { OpenClawStateDatabaseOptions } from "../state/openclaw-state-db.js";
import {
runOpenClawStateWriteTransaction,
type OpenClawStateDatabase,
} from "../state/openclaw-state-db.js";
import {
readOpenClawStateKvJson,
writeOpenClawStateKvJson,
type OpenClawStateJsonValue,
} from "../state/openclaw-state-kv.js";
import {
clearAllowFromFileReadCacheForNamespace,
dedupePreserveOrder,
readAllowFromFileSyncWithExists,
readAllowFromFileWithExists,
resolveAllowFromAccountId,
resolveAllowFromFilePath,
resolvePairingCredentialsDir,
safeChannelKey,
setAllowFromFileReadCache,
shouldIncludeLegacyAllowFromEntries,
type AllowFromStore,
} from "./allow-from-store-file.js";
import type { PairingChannel } from "./pairing-store.types.js";
@@ -33,17 +38,9 @@ const PAIRING_CODE_ALPHABET = "ABCDEFGHJKLMNPQRSTUVWXYZ23456789";
const PAIRING_CODE_MAX_ATTEMPTS = 500;
const PAIRING_PENDING_TTL_MS = 60 * 60 * 1000;
const PAIRING_PENDING_MAX = 3;
const PAIRING_STORE_LOCK_OPTIONS = {
retries: {
retries: 10,
factor: 2,
minTimeout: 100,
maxTimeout: 10_000,
randomize: true,
},
stale: 30_000,
} as const;
const PAIRING_ALLOW_FROM_CACHE_NAMESPACE = "pairing-store";
const CHANNEL_PAIRING_SCOPE = "pairing.channel";
const LEGACY_PAIRING_SUFFIX = "-pairing.json";
const LEGACY_ALLOW_FROM_SUFFIX = "-allowFrom.json";
export type PairingRequest = {
id: string;
@@ -58,9 +55,9 @@ type PairingStore = {
requests: PairingRequest[];
};
function resolvePairingPath(channel: PairingChannel, env: NodeJS.ProcessEnv = process.env): string {
return path.join(resolvePairingCredentialsDir(env), `${safeChannelKey(channel)}-pairing.json`);
}
type ChannelPairingState = PairingStore & {
allowFrom?: Record<string, string[]>;
};
export function resolveChannelAllowFromPath(
channel: PairingChannel,
@@ -70,51 +67,6 @@ export function resolveChannelAllowFromPath(
return resolveAllowFromFilePath(channel, env, accountId);
}
async function readJsonFile<T>(
filePath: string,
fallback: T,
): Promise<{ value: T; exists: boolean }> {
return await readJsonFileWithFallback(filePath, fallback);
}
async function writeJsonFile(filePath: string, value: unknown): Promise<void> {
await writeJsonFileAtomically(filePath, value);
}
async function readPairingRequests(filePath: string): Promise<PairingRequest[]> {
const { value } = await readJsonFile<PairingStore>(filePath, {
version: 1,
requests: [],
});
return Array.isArray(value.requests) ? value.requests : [];
}
async function readPrunedPairingRequests(filePath: string): Promise<{
requests: PairingRequest[];
removed: boolean;
}> {
return pruneExpiredRequests(await readPairingRequests(filePath), Date.now());
}
async function ensureJsonFile(filePath: string, fallback: unknown) {
try {
await fs.promises.access(filePath);
} catch {
await writeJsonFile(filePath, fallback);
}
}
async function withFileLock<T>(
filePath: string,
fallback: unknown,
fn: () => Promise<T>,
): Promise<T> {
await ensureJsonFile(filePath, fallback);
return await withPathLock(filePath, PAIRING_STORE_LOCK_OPTIONS, async () => {
return await fn();
});
}
function parseTimestamp(value: string | undefined): number | null {
if (!value) {
return null;
@@ -262,83 +214,146 @@ async function readAllowFromStateForPathWithExists(
filePath: string,
): Promise<{ entries: string[]; exists: boolean }> {
return await readAllowFromFileWithExists({
cacheNamespace: PAIRING_ALLOW_FROM_CACHE_NAMESPACE,
cacheNamespace: "pairing-store-legacy-import",
filePath,
normalizeStore: (store) => normalizeAllowFromList(channel, store),
});
}
function readAllowFromStateForPathSync(channel: PairingChannel, filePath: string): string[] {
return readAllowFromStateForPathSyncWithExists(channel, filePath).entries;
function sqliteOptionsForEnv(env: NodeJS.ProcessEnv): OpenClawStateDatabaseOptions {
return { env };
}
function readAllowFromStateForPathSyncWithExists(
function channelPairingKey(channel: PairingChannel): string {
return safeChannelKey(channel);
}
function normalizeChannelPairingState(
channel: PairingChannel,
filePath: string,
): { entries: string[]; exists: boolean } {
return readAllowFromFileSyncWithExists({
cacheNamespace: PAIRING_ALLOW_FROM_CACHE_NAMESPACE,
filePath,
normalizeStore: (store) => normalizeAllowFromList(channel, store),
value: unknown,
): ChannelPairingState {
const record = value && typeof value === "object" && !Array.isArray(value) ? value : {};
const rawRequests = Array.isArray((record as { requests?: unknown }).requests)
? (record as { requests: unknown[] }).requests
: [];
const requests = rawRequests.flatMap((entry) => {
if (!entry || typeof entry !== "object" || Array.isArray(entry)) {
return [];
}
const candidate = entry as Partial<PairingRequest>;
if (
typeof candidate.id !== "string" ||
typeof candidate.code !== "string" ||
typeof candidate.createdAt !== "string"
) {
return [];
}
return [
{
id: candidate.id,
code: candidate.code,
createdAt: candidate.createdAt,
lastSeenAt:
typeof candidate.lastSeenAt === "string" ? candidate.lastSeenAt : candidate.createdAt,
...(candidate.meta && typeof candidate.meta === "object" && !Array.isArray(candidate.meta)
? { meta: candidate.meta as Record<string, string> }
: {}),
} satisfies PairingRequest,
];
});
}
async function readAllowFromState(params: {
channel: PairingChannel;
entry: string | number;
filePath: string;
}): Promise<{ current: string[]; normalized: string | null }> {
const { value } = await readJsonFile<AllowFromStore>(params.filePath, {
version: 1,
allowFrom: [],
});
const current = normalizeAllowFromList(params.channel, value);
const normalized = normalizeAllowFromInput(params.channel, params.entry);
return { current, normalized: normalized || null };
}
async function writeAllowFromState(filePath: string, allowFrom: string[]): Promise<void> {
await writeJsonFile(filePath, {
version: 1,
allowFrom,
} satisfies AllowFromStore);
let stat: Awaited<ReturnType<typeof fs.promises.stat>> | null = null;
try {
stat = await fs.promises.stat(filePath);
} catch (err) {
const code = (err as { code?: string }).code;
if (code !== "ENOENT") {
throw err;
const allowFrom: Record<string, string[]> = {};
const rawAllowFrom = (record as { allowFrom?: unknown }).allowFrom;
if (rawAllowFrom && typeof rawAllowFrom === "object" && !Array.isArray(rawAllowFrom)) {
for (const [accountId, entries] of Object.entries(rawAllowFrom)) {
const normalizedAccountId = resolveAllowFromAccountId(accountId);
allowFrom[normalizedAccountId] = normalizeAllowFromList(channel, {
version: 1,
allowFrom: Array.isArray(entries) ? entries.map(String) : [],
});
}
}
setAllowFromFileReadCache({
cacheNamespace: PAIRING_ALLOW_FROM_CACHE_NAMESPACE,
filePath,
entry: {
exists: true,
mtimeMs: stat?.mtimeMs ?? null,
size: stat?.size ?? null,
entries: allowFrom.slice(),
},
});
return { version: 1, requests, allowFrom };
}
async function readNonDefaultAccountAllowFrom(params: {
channel: PairingChannel;
env: NodeJS.ProcessEnv;
accountId: string;
}): Promise<string[]> {
const scopedPath = resolveAllowFromFilePath(params.channel, params.env, params.accountId);
return await readAllowFromStateForPath(params.channel, scopedPath);
function readChannelPairingStateFromDatabase(
database: OpenClawStateDatabase,
channel: PairingChannel,
): ChannelPairingState {
const row = database.db
.prepare("SELECT value_json FROM kv WHERE scope = ? AND key = ?")
.get(CHANNEL_PAIRING_SCOPE, channelPairingKey(channel)) as { value_json?: string } | undefined;
if (!row?.value_json) {
return { version: 1, requests: [], allowFrom: {} };
}
try {
return normalizeChannelPairingState(channel, JSON.parse(row.value_json));
} catch {
return { version: 1, requests: [], allowFrom: {} };
}
}
function readNonDefaultAccountAllowFromSync(params: {
channel: PairingChannel;
env: NodeJS.ProcessEnv;
accountId: string;
}): string[] {
const scopedPath = resolveAllowFromFilePath(params.channel, params.env, params.accountId);
return readAllowFromStateForPathSync(params.channel, scopedPath);
function readChannelPairingState(
channel: PairingChannel,
env: NodeJS.ProcessEnv,
): ChannelPairingState {
return normalizeChannelPairingState(
channel,
readOpenClawStateKvJson(
CHANNEL_PAIRING_SCOPE,
channelPairingKey(channel),
sqliteOptionsForEnv(env),
),
);
}
function writeChannelPairingStateToDatabase(
database: OpenClawStateDatabase,
channel: PairingChannel,
state: ChannelPairingState,
): void {
database.db
.prepare(
`
INSERT INTO kv (scope, key, value_json, updated_at)
VALUES (?, ?, ?, ?)
ON CONFLICT(scope, key) DO UPDATE SET
value_json = excluded.value_json,
updated_at = excluded.updated_at
`,
)
.run(
CHANNEL_PAIRING_SCOPE,
channelPairingKey(channel),
JSON.stringify({
version: 1,
requests: state.requests,
allowFrom: state.allowFrom ?? {},
} satisfies ChannelPairingState),
Date.now(),
);
}
function writeChannelPairingState(
channel: PairingChannel,
state: ChannelPairingState,
env: NodeJS.ProcessEnv,
): void {
writeOpenClawStateKvJson<OpenClawStateJsonValue>(
CHANNEL_PAIRING_SCOPE,
channelPairingKey(channel),
{
version: 1,
requests: state.requests,
allowFrom: state.allowFrom ?? {},
} as OpenClawStateJsonValue,
sqliteOptionsForEnv(env),
);
}
function readAllowFromState(channel: PairingChannel, env: NodeJS.ProcessEnv, accountId?: string) {
const resolvedAccountId = resolveAllowFromAccountId(accountId);
const state = readChannelPairingState(channel, env);
return (state.allowFrom?.[resolvedAccountId] ?? []).slice();
}
async function updateAllowFromStoreEntry(params: {
@@ -349,35 +364,30 @@ async function updateAllowFromStoreEntry(params: {
apply: (current: string[], normalized: string) => string[] | null;
}): Promise<{ changed: boolean; allowFrom: string[] }> {
const env = params.env ?? process.env;
const filePath = resolveAllowFromFilePath(params.channel, env, params.accountId);
return await withFileLock(
filePath,
{ version: 1, allowFrom: [] } satisfies AllowFromStore,
async () => {
const { current, normalized } = await readAllowFromState({
channel: params.channel,
entry: params.entry,
filePath,
});
if (!normalized) {
return { changed: false, allowFrom: current };
}
const next = params.apply(current, normalized);
if (!next) {
return { changed: false, allowFrom: current };
}
await writeAllowFromState(filePath, next);
return { changed: true, allowFrom: next };
},
);
const normalizedAccountId = resolveAllowFromAccountId(params.accountId);
const normalized = normalizeAllowFromInput(params.channel, params.entry);
return runOpenClawStateWriteTransaction((database) => {
const state = readChannelPairingStateFromDatabase(database, params.channel);
const current = (state.allowFrom?.[normalizedAccountId] ?? []).slice();
if (!normalized) {
return { changed: false, allowFrom: current };
}
const next = params.apply(current, normalized);
if (!next) {
return { changed: false, allowFrom: current };
}
state.allowFrom ??= {};
state.allowFrom[normalizedAccountId] = next;
writeChannelPairingStateToDatabase(database, params.channel, state);
return { changed: true, allowFrom: next };
}, sqliteOptionsForEnv(env));
}
export async function readLegacyChannelAllowFromStore(
channel: PairingChannel,
env: NodeJS.ProcessEnv = process.env,
): Promise<string[]> {
const filePath = resolveAllowFromFilePath(channel, env);
return await readAllowFromStateForPath(channel, filePath);
return readAllowFromState(channel, env, DEFAULT_ACCOUNT_ID);
}
export async function readChannelAllowFromStore(
@@ -385,30 +395,14 @@ export async function readChannelAllowFromStore(
env: NodeJS.ProcessEnv = process.env,
accountId?: string,
): Promise<string[]> {
const resolvedAccountId = resolveAllowFromAccountId(accountId);
if (!shouldIncludeLegacyAllowFromEntries(resolvedAccountId)) {
return await readNonDefaultAccountAllowFrom({
channel,
env,
accountId: resolvedAccountId,
});
}
const scopedPath = resolveAllowFromFilePath(channel, env, resolvedAccountId);
const scopedEntries = await readAllowFromStateForPath(channel, scopedPath);
// Backward compatibility: legacy channel-level allowFrom store was unscoped.
// Keep honoring it for default account to prevent re-pair prompts after upgrades.
const legacyPath = resolveAllowFromFilePath(channel, env);
const legacyEntries = await readAllowFromStateForPath(channel, legacyPath);
return dedupePreserveOrder([...scopedEntries, ...legacyEntries]);
return readAllowFromState(channel, env, accountId);
}
export function readLegacyChannelAllowFromStoreSync(
channel: PairingChannel,
env: NodeJS.ProcessEnv = process.env,
): string[] {
const filePath = resolveAllowFromFilePath(channel, env);
return readAllowFromStateForPathSync(channel, filePath);
return readAllowFromState(channel, env, DEFAULT_ACCOUNT_ID);
}
export function readChannelAllowFromStoreSync(
@@ -416,24 +410,12 @@ export function readChannelAllowFromStoreSync(
env: NodeJS.ProcessEnv = process.env,
accountId?: string,
): string[] {
const resolvedAccountId = resolveAllowFromAccountId(accountId);
if (!shouldIncludeLegacyAllowFromEntries(resolvedAccountId)) {
return readNonDefaultAccountAllowFromSync({
channel,
env,
accountId: resolvedAccountId,
});
}
const scopedPath = resolveAllowFromFilePath(channel, env, resolvedAccountId);
const scopedEntries = readAllowFromStateForPathSync(channel, scopedPath);
const legacyPath = resolveAllowFromFilePath(channel, env);
const legacyEntries = readAllowFromStateForPathSync(channel, legacyPath);
return dedupePreserveOrder([...scopedEntries, ...legacyEntries]);
return readAllowFromState(channel, env, accountId);
}
export function clearPairingAllowFromReadCacheForTest(): void {
clearAllowFromFileReadCacheForNamespace(PAIRING_ALLOW_FROM_CACHE_NAMESPACE);
// Runtime allowFrom reads are SQLite-backed; legacy import helpers still keep
// their own file-read caches and are cleared by tests through that module.
}
type AllowFromStoreEntryUpdateParams = {
@@ -500,39 +482,26 @@ export async function listChannelPairingRequests(
env: NodeJS.ProcessEnv = process.env,
accountId?: string,
): Promise<PairingRequest[]> {
const filePath = resolvePairingPath(channel, env);
return await withFileLock(
filePath,
{ version: 1, requests: [] } satisfies PairingStore,
async () => {
const { requests: prunedExpired, removed: expiredRemoved } =
await readPrunedPairingRequests(filePath);
const { requests: pruned, removed: cappedRemoved } = pruneExcessRequestsByAccount(
prunedExpired,
PAIRING_PENDING_MAX,
);
if (expiredRemoved || cappedRemoved) {
await writeJsonFile(filePath, {
version: 1,
requests: pruned,
} satisfies PairingStore);
}
const normalizedAccountId = normalizePairingAccountId(accountId);
const filtered = normalizedAccountId
? pruned.filter((entry) => requestMatchesAccountId(entry, normalizedAccountId))
: pruned;
return filtered
.filter(
(r) =>
r &&
typeof r.id === "string" &&
typeof r.code === "string" &&
typeof r.createdAt === "string",
)
.slice()
.toSorted((a, b) => a.createdAt.localeCompare(b.createdAt));
},
);
return runOpenClawStateWriteTransaction((database) => {
const state = readChannelPairingStateFromDatabase(database, channel);
const { requests: prunedExpired, removed: expiredRemoved } = pruneExpiredRequests(
state.requests,
Date.now(),
);
const { requests: pruned, removed: cappedRemoved } = pruneExcessRequestsByAccount(
prunedExpired,
PAIRING_PENDING_MAX,
);
if (expiredRemoved || cappedRemoved) {
state.requests = pruned;
writeChannelPairingStateToDatabase(database, channel, state);
}
const normalizedAccountId = normalizePairingAccountId(accountId);
const filtered = normalizedAccountId
? pruned.filter((entry) => requestMatchesAccountId(entry, normalizedAccountId))
: pruned;
return filtered.slice().toSorted((a, b) => a.createdAt.localeCompare(b.createdAt));
}, sqliteOptionsForEnv(env));
}
export async function upsertChannelPairingRequest(params: {
@@ -545,94 +514,81 @@ export async function upsertChannelPairingRequest(params: {
pairingAdapter?: ChannelPairingAdapter;
}): Promise<{ code: string; created: boolean }> {
const env = params.env ?? process.env;
const filePath = resolvePairingPath(params.channel, env);
return await withFileLock(
filePath,
{ version: 1, requests: [] } satisfies PairingStore,
async () => {
const now = new Date().toISOString();
const nowMs = Date.now();
const id = normalizeId(params.id);
const normalizedAccountId = normalizePairingAccountId(params.accountId) || DEFAULT_ACCOUNT_ID;
const baseMeta =
params.meta && typeof params.meta === "object"
? Object.fromEntries(
Object.entries(params.meta)
.map(([k, v]) => [k, normalizeOptionalString(v) ?? ""] as const)
.filter(([_, v]) => Boolean(v)),
)
: undefined;
const meta = { ...baseMeta, accountId: normalizedAccountId };
return runOpenClawStateWriteTransaction((database) => {
const now = new Date().toISOString();
const nowMs = Date.now();
const id = normalizeId(params.id);
const normalizedAccountId = normalizePairingAccountId(params.accountId) || DEFAULT_ACCOUNT_ID;
const baseMeta =
params.meta && typeof params.meta === "object"
? Object.fromEntries(
Object.entries(params.meta)
.map(([k, v]) => [k, normalizeOptionalString(v) ?? ""] as const)
.filter(([_, v]) => Boolean(v)),
)
: undefined;
const meta = { ...baseMeta, accountId: normalizedAccountId };
let reqs = await readPairingRequests(filePath);
const { requests: prunedExpired, removed: expiredRemoved } = pruneExpiredRequests(
reqs,
nowMs,
);
reqs = prunedExpired;
const normalizedMatchingAccountId = normalizedAccountId;
const existingIdx = reqs.findIndex((r) => {
if (r.id !== id) {
return false;
}
return requestMatchesAccountId(r, normalizedMatchingAccountId);
});
const existingCodes = new Set(
reqs.map((req) => (normalizeOptionalString(req.code) ?? "").toUpperCase()),
);
if (existingIdx >= 0) {
const existing = reqs[existingIdx];
const existingCode = normalizeOptionalString(existing?.code) ?? "";
const code = existingCode || generateUniqueCode(existingCodes);
const next: PairingRequest = {
id,
code,
createdAt: existing?.createdAt ?? now,
lastSeenAt: now,
meta: meta ?? existing?.meta,
};
reqs[existingIdx] = next;
const { requests: capped } = pruneExcessRequestsByAccount(reqs, PAIRING_PENDING_MAX);
await writeJsonFile(filePath, {
version: 1,
requests: capped,
} satisfies PairingStore);
return { code, created: false };
const state = readChannelPairingStateFromDatabase(database, params.channel);
let reqs = state.requests;
const { requests: prunedExpired, removed: expiredRemoved } = pruneExpiredRequests(reqs, nowMs);
reqs = prunedExpired;
const normalizedMatchingAccountId = normalizedAccountId;
const existingIdx = reqs.findIndex((r) => {
if (r.id !== id) {
return false;
}
return requestMatchesAccountId(r, normalizedMatchingAccountId);
});
const existingCodes = new Set(
reqs.map((req) => (normalizeOptionalString(req.code) ?? "").toUpperCase()),
);
const { requests: capped, removed: cappedRemoved } = pruneExcessRequestsByAccount(
reqs,
PAIRING_PENDING_MAX,
);
reqs = capped;
const accountRequestCount = reqs.filter((r) =>
requestMatchesAccountId(r, normalizedMatchingAccountId),
).length;
if (PAIRING_PENDING_MAX > 0 && accountRequestCount >= PAIRING_PENDING_MAX) {
if (expiredRemoved || cappedRemoved) {
await writeJsonFile(filePath, {
version: 1,
requests: reqs,
} satisfies PairingStore);
}
return { code: "", created: false };
}
const code = generateUniqueCode(existingCodes);
if (existingIdx >= 0) {
const existing = reqs[existingIdx];
const existingCode = normalizeOptionalString(existing?.code) ?? "";
const code = existingCode || generateUniqueCode(existingCodes);
const next: PairingRequest = {
id,
code,
createdAt: now,
createdAt: existing?.createdAt ?? now,
lastSeenAt: now,
...(meta ? { meta } : {}),
meta: meta ?? existing?.meta,
};
await writeJsonFile(filePath, {
version: 1,
requests: [...reqs, next],
} satisfies PairingStore);
return { code, created: true };
},
);
reqs[existingIdx] = next;
const { requests: capped } = pruneExcessRequestsByAccount(reqs, PAIRING_PENDING_MAX);
state.requests = capped;
writeChannelPairingStateToDatabase(database, params.channel, state);
return { code, created: false };
}
const { requests: capped, removed: cappedRemoved } = pruneExcessRequestsByAccount(
reqs,
PAIRING_PENDING_MAX,
);
reqs = capped;
const accountRequestCount = reqs.filter((r) =>
requestMatchesAccountId(r, normalizedMatchingAccountId),
).length;
if (PAIRING_PENDING_MAX > 0 && accountRequestCount >= PAIRING_PENDING_MAX) {
if (expiredRemoved || cappedRemoved) {
state.requests = reqs;
writeChannelPairingStateToDatabase(database, params.channel, state);
}
return { code: "", created: false };
}
const code = generateUniqueCode(existingCodes);
const next: PairingRequest = {
id,
code,
createdAt: now,
lastSeenAt: now,
...(meta ? { meta } : {}),
};
state.requests = [...reqs, next];
writeChannelPairingStateToDatabase(database, params.channel, state);
return { code, created: true };
}, sqliteOptionsForEnv(env));
}
export async function approveChannelPairingCode(params: {
@@ -647,45 +603,130 @@ export async function approveChannelPairingCode(params: {
return null;
}
const filePath = resolvePairingPath(params.channel, env);
return await withFileLock(
filePath,
{ version: 1, requests: [] } satisfies PairingStore,
async () => {
const { requests: pruned, removed } = await readPrunedPairingRequests(filePath);
const normalizedAccountId = normalizePairingAccountId(params.accountId);
const idx = pruned.findIndex((r) => {
if (r.code.toUpperCase() !== code) {
return false;
}
return requestMatchesAccountId(r, normalizedAccountId);
});
if (idx < 0) {
if (removed) {
await writeJsonFile(filePath, {
version: 1,
requests: pruned,
} satisfies PairingStore);
}
return null;
return runOpenClawStateWriteTransaction((database) => {
const state = readChannelPairingStateFromDatabase(database, params.channel);
const { requests: pruned, removed } = pruneExpiredRequests(state.requests, Date.now());
const normalizedAccountId = normalizePairingAccountId(params.accountId);
const idx = pruned.findIndex((r) => {
if (r.code.toUpperCase() !== code) {
return false;
}
const entry = pruned[idx];
if (!entry) {
return null;
return requestMatchesAccountId(r, normalizedAccountId);
});
if (idx < 0) {
if (removed) {
state.requests = pruned;
writeChannelPairingStateToDatabase(database, params.channel, state);
}
pruned.splice(idx, 1);
await writeJsonFile(filePath, {
version: 1,
requests: pruned,
} satisfies PairingStore);
const entryAccountId = normalizeOptionalString(entry.meta?.accountId);
await addChannelAllowFromStoreEntry({
channel: params.channel,
entry: entry.id,
accountId: normalizeOptionalString(params.accountId) ?? entryAccountId,
env,
});
return { id: entry.id, entry };
},
return null;
}
const entry = pruned[idx];
if (!entry) {
return null;
}
pruned.splice(idx, 1);
state.requests = pruned;
const entryAccountId = normalizeOptionalString(entry.meta?.accountId);
const allowAccountId = resolveAllowFromAccountId(
normalizeOptionalString(params.accountId) ?? entryAccountId,
);
const currentAllow = state.allowFrom?.[allowAccountId] ?? [];
const normalizedAllow = normalizeAllowFromInput(params.channel, entry.id);
if (normalizedAllow && !currentAllow.includes(normalizedAllow)) {
state.allowFrom ??= {};
state.allowFrom[allowAccountId] = [...currentAllow, normalizedAllow];
}
writeChannelPairingStateToDatabase(database, params.channel, state);
return { id: entry.id, entry };
}, sqliteOptionsForEnv(env));
}
export async function legacyChannelPairingFilesExist(
env: NodeJS.ProcessEnv = process.env,
): Promise<boolean> {
const dir = resolvePairingCredentialsDir(env);
const entries = await fs.promises.readdir(dir).catch(() => []);
return entries.some(
(entry) => entry.endsWith(LEGACY_PAIRING_SUFFIX) || entry.endsWith(LEGACY_ALLOW_FROM_SUFFIX),
);
}
function parseAllowFromFilename(filename: string): { channel: string; accountId: string } | null {
if (!filename.endsWith(LEGACY_ALLOW_FROM_SUFFIX)) {
return null;
}
const stem = filename.slice(0, -LEGACY_ALLOW_FROM_SUFFIX.length);
const knownChannel = [...CHANNEL_IDS]
.toSorted((left, right) => right.length - left.length)
.find((channel) => stem === channel || stem.startsWith(`${channel}-`));
if (!knownChannel) {
return { channel: stem, accountId: DEFAULT_ACCOUNT_ID };
}
if (stem === knownChannel) {
return { channel: knownChannel, accountId: DEFAULT_ACCOUNT_ID };
}
const accountId = stem.slice(knownChannel.length + 1);
return {
channel: knownChannel,
accountId: accountId || DEFAULT_ACCOUNT_ID,
};
}
function parsePairingFilename(filename: string): string | null {
if (!filename.endsWith(LEGACY_PAIRING_SUFFIX)) {
return null;
}
return filename.slice(0, -LEGACY_PAIRING_SUFFIX.length);
}
async function readLegacyPairingStore(filePath: string): Promise<PairingStore | null> {
try {
const raw = await fs.promises.readFile(filePath, "utf8");
const parsed = JSON.parse(raw) as PairingStore;
return {
version: 1,
requests: Array.isArray(parsed.requests) ? parsed.requests : [],
};
} catch {
return null;
}
}
export async function importLegacyChannelPairingFilesToSqlite(
env: NodeJS.ProcessEnv = process.env,
): Promise<{ files: number; requests: number; allowFrom: number }> {
const dir = resolvePairingCredentialsDir(env);
const entries = await fs.promises.readdir(dir).catch(() => []);
let files = 0;
let requests = 0;
let allowFrom = 0;
for (const filename of entries) {
const filePath = path.join(dir, filename);
const pairingChannel = parsePairingFilename(filename);
if (pairingChannel) {
const legacy = await readLegacyPairingStore(filePath);
if (legacy) {
const state = readChannelPairingState(pairingChannel, env);
state.requests = legacy.requests;
writeChannelPairingState(pairingChannel, state, env);
requests += legacy.requests.length;
}
await fs.promises.rm(filePath, { force: true }).catch(() => undefined);
files += 1;
continue;
}
const allowFromTarget = parseAllowFromFilename(filename);
if (allowFromTarget) {
const entries = await readAllowFromStateForPath(allowFromTarget.channel, filePath);
const state = readChannelPairingState(allowFromTarget.channel, env);
state.allowFrom ??= {};
state.allowFrom[resolveAllowFromAccountId(allowFromTarget.accountId)] = entries;
writeChannelPairingState(allowFromTarget.channel, state, env);
allowFrom += entries.length;
await fs.promises.rm(filePath, { force: true }).catch(() => undefined);
files += 1;
}
}
return { files, requests, allowFrom };
}