refactor: move session management to sqlite

This commit is contained in:
Peter Steinberger
2026-05-06 22:20:16 +01:00
parent 1378f868d5
commit a47c8bf48d
97 changed files with 2637 additions and 7741 deletions

View File

@@ -1186,7 +1186,6 @@ See [Multi-Agent Sandbox & Tools](/tools/multi-agent-sandbox-tools) for preceden
mode: "warn", // warn | enforce
pruneAfter: "30d",
maxEntries: 500,
resetArchiveRetention: "30d", // duration or false
maxDiskBytes: "500mb", // optional hard budget
highWaterBytes: "400mb", // optional cleanup target
},
@@ -1227,7 +1226,6 @@ See [Multi-Agent Sandbox & Tools](/tools/multi-agent-sandbox-tools) for preceden
- `pruneAfter`: age cutoff for stale entries (default `30d`).
- `maxEntries`: maximum number of entries in the session store (default `500`). Runtime writes do not prune or cap entries; `openclaw sessions cleanup --enforce` applies the cap immediately.
- `rotateBytes`: deprecated and ignored; `openclaw doctor --fix` removes it from older configs.
- `resetArchiveRetention`: retention for `*.reset.<timestamp>` transcript archives. Defaults to `pruneAfter`; set `false` to disable.
- `maxDiskBytes`: optional sessions-directory disk budget. In `warn` mode it logs warnings; in `enforce` mode it removes oldest artifacts/sessions first.
- `highWaterBytes`: optional target after budget cleanup. Defaults to `80%` of `maxDiskBytes`.
- **`threadBindings`**: global defaults for thread-bound session features.

View File

@@ -168,7 +168,6 @@ Save to `~/.openclaw/openclaw.json` and you can DM the bot from that number.
mode: "warn",
pruneAfter: "30d",
maxEntries: 500,
resetArchiveRetention: "30d", // duration or false
maxDiskBytes: "500mb", // optional
highWaterBytes: "400mb", // optional (defaults to 80% of maxDiskBytes)
},

View File

@@ -62,15 +62,14 @@ src/agents/
│ ├── model.ts # Model resolution via ModelRegistry
│ ├── runs.ts # Active run tracking, abort, queue
│ ├── sandbox-info.ts # Sandbox info for system prompt
│ ├── session-manager-cache.ts # SessionManager instance caching
│ ├── system-prompt.ts # System prompt builder
│ ├── tool-split.ts # Split tools into builtIn vs custom
│ ├── types.ts # EmbeddedPiAgentMeta, EmbeddedPiRunResult
│ └── utils.ts # ThinkLevel mapping, error description
├── transcript/
│ ├── session-transcript-contract.ts # OpenClaw-owned transcript/session types
│ ├── session-manager.ts # OpenClaw-owned file-backed SessionManager
│ └── transcript-file-state.ts # JSONL parse/mutate/write helpers
│ ├── session-manager.ts # OpenClaw-owned SQLite-backed SessionManager
│ └── transcript-file-state.ts # PI-compatible transcript state adapter
├── pi-embedded-subscribe.ts # Session event subscription/dispatch
├── pi-embedded-subscribe.types.ts # SubscribeEmbeddedPiSessionParams
├── pi-embedded-subscribe.handlers.ts # Event handler factory
@@ -301,9 +300,9 @@ applySystemPromptOverrideToSession(session, systemPromptOverride);
## Session management
### Session files
### Session transcripts
Sessions are JSONL files with tree structure (id/parentId linking). OpenClaw owns the file-backed `SessionManager` value and keeps the PI-compatible shape behind `src/agents/transcript/session-transcript-contract.ts`:
Sessions are SQLite-backed event streams with tree structure (id/parentId linking). JSONL is legacy doctor-import/export/debug shape. OpenClaw owns the PI-compatible `SessionManager` shape behind `src/agents/transcript/session-transcript-contract.ts`:
```typescript
const sessionManager = openTranscriptSessionManager({ sessionFile: params.sessionFile });
@@ -311,16 +310,6 @@ const sessionManager = openTranscriptSessionManager({ sessionFile: params.sessio
OpenClaw wraps this with `guardSessionManager()` for tool result safety.
### Session caching
`session-manager-cache.ts` caches SessionManager instances to avoid repeated file parsing:
```typescript
await prewarmSessionFile(params.sessionFile);
sessionManager = openTranscriptSessionManager({ sessionFile: params.sessionFile });
trackSessionManagerAccess(params.sessionFile);
```
### History limiting
`limitHistoryTurns()` trims conversation history based on channel type (DM vs group).

View File

@@ -76,7 +76,9 @@ This plan has started landing in slices:
transcript. Legacy JSONL import is doctor/import/debug only: `openclaw doctor
--fix` builds the transcript database from old files and removes the JSONL
sources after successful import. Runtime paths do not import, prune, or repair
JSONL files.
JSONL files. Pre-compaction checkpoints are SQLite transcript snapshots, not
`.checkpoint.*.jsonl` copies; branch/restore and checkpoint pruning now work
against snapshot rows. The old PI session-manager cache/prewarm layer is gone.
- `AgentFilesystem` and `SqliteVirtualAgentFs` exist for scratch storage, with
`disk`, `vfs-scratch`, and `vfs-only` filesystem modes at the runtime
boundary. VFS contents can be listed and exported for support bundles. When
@@ -110,6 +112,11 @@ This plan has started landing in slices:
the primary persistent cache. The older
`cache/openrouter-models.json` file is a legacy import source and is removed
after import.
- Codex app-server thread bindings now use the shared SQLite `kv` store as the
only runtime record path. The old per-session
`.codex-app-server.json` sidecar reader/writer has been removed from runtime
and tests now seed the binding store directly. `openclaw doctor --fix`
imports old sidecars into SQLite and removes the JSON source.
- TUI last-session restore pointers now use the shared SQLite `kv` store as the
primary record path. The older `tui/last-session.json` file is a legacy
import source and is removed after import.
@@ -586,6 +593,10 @@ Phase 5: transcript ownership
- Store transcript events in SQLite.
- Import legacy JSONL through doctor only; export JSONL for debugging/support.
- Remove direct PI `SessionManager` usage from non-adapter code.
- Remove file-backed compaction checkpoint copies and the session-manager
cache/prewarm layer.
- Move Codex app-server binding state from per-session JSON sidecars to the
shared SQLite `kv` table.
Phase 6: internalize or replace PI pieces

View File

@@ -56,9 +56,8 @@ OpenClaw persists sessions in two layers:
hook payloads, silent session rotations, chat history, TUI history,
recovery, managed media indexing, token estimation, title/preview/usage
helpers, and bounded session inspection read the scoped SQLite transcript.
- Large pre-compaction debug checkpoints are skipped once the active
transcript exceeds the checkpoint size cap, avoiding a second giant
`.checkpoint.*.jsonl` copy.
- Pre-compaction checkpoints are SQLite transcript snapshots. OpenClaw does
not create `.checkpoint.*.jsonl` copies on the runtime path.
Gateway history readers should avoid materializing the whole transcript unless
the surface explicitly needs arbitrary historical access. First-page history,
@@ -88,16 +87,15 @@ OpenClaw resolves these via `src/config/sessions/*`.
## Store maintenance and disk controls
Session persistence has explicit maintenance controls (`session.maintenance`) for session entries, transcript artifacts, and trajectory sidecars:
Session persistence has explicit maintenance controls (`session.maintenance`) for session entries and trajectory sidecars:
- `mode`: `warn` (default) or `enforce`
- `pruneAfter`: stale-entry age cutoff (default `30d`)
- `maxEntries`: cap entries in the session store (default `500`)
- `resetArchiveRetention`: retention for `*.reset.<timestamp>` transcript archives (default: same as `pruneAfter`; `false` disables cleanup)
- `maxDiskBytes`: optional sessions-directory budget
- `highWaterBytes`: optional target after cleanup (default `80%` of `maxDiskBytes`)
Normal Gateway writes flow through a per-store session writer that serializes in-process mutations. SQLite is the canonical per-agent backend; `sessions.json` is a legacy doctor-import input, not a parallel export/debug store. Runtime code should prefer `updateSessionStore(...)` or `updateSessionStoreEntry(...)`. Runtime writes normalize and persist only; they do not prune, cap, import, archive, or run disk-budget cleanup. When a Gateway is reachable, non-dry-run `openclaw sessions cleanup` and `openclaw agents delete` delegate store mutations to the Gateway so cleanup joins the same writer queue. Session store reads do not import, prune, or cap entries during Gateway startup; use `openclaw doctor --fix` for legacy JSON import and `openclaw sessions cleanup --enforce` for cleanup. `openclaw sessions cleanup --enforce` applies the configured cap immediately and prunes old unreferenced transcript, checkpoint, and trajectory artifacts even when no disk budget is configured.
Normal Gateway writes flow through a per-store session writer that serializes in-process mutations. SQLite is the canonical per-agent backend; `sessions.json` is a legacy doctor-import input, not a parallel export/debug store. Runtime code should prefer `updateSessionStore(...)` or `updateSessionStoreEntry(...)`. Runtime writes normalize and persist only; they do not prune, cap, import, archive, or run disk-budget cleanup. When a Gateway is reachable, non-dry-run `openclaw sessions cleanup` and `openclaw agents delete` delegate store mutations to the Gateway so cleanup joins the same writer queue. Session store reads do not import, prune, or cap entries during Gateway startup; use `openclaw doctor --fix` for legacy JSON import and `openclaw sessions cleanup --enforce` for cleanup. `openclaw sessions cleanup --enforce` applies the configured cap immediately and prunes old unreferenced trajectory artifacts even when no disk budget is configured. Compaction checkpoint cleanup removes SQLite snapshot rows, not file artifacts.
Maintenance keeps durable external conversation pointers such as group sessions
and thread-scoped chat sessions, but synthetic runtime entries for cron, hooks,
@@ -112,8 +110,8 @@ setting remains for older import/debug paths that still touch JSONL files.
Enforcement order for disk budget cleanup (`mode: "enforce"`):
1. Remove oldest archived, orphan transcript, or orphan trajectory artifacts first.
2. If still above the target, evict oldest session entries and their transcript/trajectory files.
1. Remove oldest orphan trajectory artifacts first.
2. If still above the target, evict oldest session entries and their trajectory sidecars.
3. Keep going until usage is at or below `highWaterBytes`.
In `mode: "warn"`, OpenClaw reports potential evictions but does not mutate the store/files.
@@ -353,9 +351,9 @@ OpenClaw also enforces a safety floor for embedded runs:
`truncateAfterCompaction` is also enabled. Leave it unset or set `0` to
disable.
- When `agents.defaults.compaction.truncateAfterCompaction` is enabled,
OpenClaw rotates the active transcript to a compacted successor JSONL after
compaction. The old full transcript remains archived and linked from the
compaction checkpoint instead of being rewritten in place.
OpenClaw rewrites the active SQLite transcript to the compacted successor
after compaction. The old full transcript is available only through the
SQLite pre-compaction checkpoint snapshot while retained.
Why: leave enough headroom for multi-turn "housekeeping" (like memory writes) before compaction becomes unavoidable.

View File

@@ -7,6 +7,7 @@ import {
formatToolProgressOutput,
inferToolMetaFromArgs,
normalizeUsage,
resolveSessionAgentIds,
runAgentHarnessAfterCompactionHook,
runAgentHarnessBeforeCompactionHook,
TOOL_PROGRESS_OUTPUT_MAX_CHARS,
@@ -1046,7 +1047,18 @@ export class CodexAppServerEventProjector {
}
private async readMirroredSessionMessages(): Promise<AgentMessage[]> {
return (await readCodexMirroredSessionHistoryMessages(this.params.sessionFile)) ?? [];
const { sessionAgentId } = resolveSessionAgentIds({
agentId: this.params.agentId,
config: this.params.config,
sessionKey: this.params.sessionKey,
});
return (
(await readCodexMirroredSessionHistoryMessages({
agentId: sessionAgentId,
sessionFile: this.params.sessionFile,
sessionId: this.params.sessionId,
})) ?? []
);
}
private createAssistantMessage(text: string): AssistantMessage {

View File

@@ -13,6 +13,7 @@ import {
emitAgentEvent as emitGlobalAgentEvent,
finalizeHarnessContextEngineTurn,
formatErrorMessage,
hasSqliteSessionTranscriptEvents,
isActiveHarnessContextEngine,
isSubagentSessionKey,
normalizeAgentRuntimeTools,
@@ -541,8 +542,16 @@ export async function runCodexAppServerAttempt(
runId: params.runId,
},
});
const hadSessionFile = await pathExists(params.sessionFile);
let historyMessages = (await readMirroredSessionHistoryMessages(params.sessionFile)) ?? [];
const hadSessionFile = hasSqliteSessionTranscriptEvents({
agentId: sessionAgentId,
sessionId: params.sessionId,
});
let historyMessages =
(await readMirroredSessionHistoryMessages({
agentId: sessionAgentId,
sessionFile: params.sessionFile,
sessionId: params.sessionId,
})) ?? [];
const hookContext = {
runId: params.runId,
agentId: sessionAgentId,
@@ -571,7 +580,11 @@ export async function runCodexAppServerAttempt(
warn: (message) => embeddedAgentLog.warn(message),
});
historyMessages =
(await readMirroredSessionHistoryMessages(params.sessionFile)) ?? historyMessages;
(await readMirroredSessionHistoryMessages({
agentId: sessionAgentId,
sessionFile: params.sessionFile,
sessionId: params.sessionId,
})) ?? historyMessages;
}
const baseDeveloperInstructions = buildDeveloperInstructions(params);
// Build the workspace bootstrap block before finalizing developer
@@ -1486,8 +1499,11 @@ export async function runCodexAppServerAttempt(
}
if (activeContextEngine) {
const finalMessages =
(await readMirroredSessionHistoryMessages(params.sessionFile)) ??
historyMessages.concat(result.messagesSnapshot);
(await readMirroredSessionHistoryMessages({
agentId: sessionAgentId,
sessionFile: params.sessionFile,
sessionId: params.sessionId,
})) ?? historyMessages.concat(result.messagesSnapshot);
await finalizeHarnessContextEngineTurn({
contextEngine: activeContextEngine,
promptError: Boolean(finalPromptError),
@@ -2195,18 +2211,15 @@ function readString(record: JsonObject, key: string): string | undefined {
return typeof value === "string" ? value : undefined;
}
function readBoolean(record: JsonObject, key: string): boolean | undefined {
const value = record[key];
return typeof value === "boolean" ? value : undefined;
}
async function readMirroredSessionHistoryMessages(
sessionFile: string,
): Promise<AgentMessage[] | undefined> {
const messages = await readCodexMirroredSessionHistoryMessages(sessionFile);
async function readMirroredSessionHistoryMessages(scope: {
agentId: string;
sessionFile: string;
sessionId: string;
}): Promise<AgentMessage[] | undefined> {
const messages = await readCodexMirroredSessionHistoryMessages(scope);
if (!messages) {
embeddedAgentLog.warn("failed to read mirrored session history for codex harness hooks", {
sessionFile,
sessionFile: scope.sessionFile,
});
}
return messages;

View File

@@ -1,16 +1,18 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { writeOpenClawStateKvJson } from "openclaw/plugin-sdk/agent-harness-runtime";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
clearCodexAppServerBinding,
readCodexAppServerBinding,
resolveCodexAppServerBindingPath,
writeCodexAppServerBinding,
type CodexAppServerAuthProfileLookup,
} from "./session-binding.js";
const CODEX_APP_SERVER_BINDING_KV_SCOPE = "codex_app_server_thread_bindings";
let tempDir: string;
let previousStateDir: string | undefined;
const nativeAuthLookup: Pick<CodexAppServerAuthProfileLookup, "authProfileStore"> = {
authProfileStore: {
@@ -30,13 +32,20 @@ const nativeAuthLookup: Pick<CodexAppServerAuthProfileLookup, "authProfileStore"
describe("codex app-server session binding", () => {
beforeEach(async () => {
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-codex-binding-"));
previousStateDir = process.env.OPENCLAW_STATE_DIR;
process.env.OPENCLAW_STATE_DIR = tempDir;
});
afterEach(async () => {
if (previousStateDir === undefined) {
delete process.env.OPENCLAW_STATE_DIR;
} else {
process.env.OPENCLAW_STATE_DIR = previousStateDir;
}
await fs.rm(tempDir, { recursive: true, force: true });
});
it("round-trips the thread binding beside the PI session file", async () => {
it("round-trips the thread binding through SQLite", async () => {
const sessionFile = path.join(tempDir, "session.json");
await writeCodexAppServerBinding(sessionFile, {
threadId: "thread-123",
@@ -57,8 +66,6 @@ describe("codex app-server session binding", () => {
modelProvider: "openai",
dynamicToolsFingerprint: "tools-v1",
});
const bindingStat = await fs.stat(resolveCodexAppServerBindingPath(sessionFile));
expect(bindingStat.isFile()).toBe(true);
});
it("round-trips plugin app policy context with app ids as record keys", async () => {
@@ -91,33 +98,30 @@ describe("codex app-server session binding", () => {
it("rejects old plugin app policy entries that duplicate the app id", async () => {
const sessionFile = path.join(tempDir, "session.json");
await fs.writeFile(
resolveCodexAppServerBindingPath(sessionFile),
`${JSON.stringify({
schemaVersion: 1,
threadId: "thread-123",
sessionFile,
cwd: tempDir,
pluginAppPolicyContext: {
fingerprint: "plugin-policy-1",
apps: {
"google-calendar-app": {
appId: "google-calendar-app",
configKey: "google-calendar",
marketplaceName: "openai-curated",
pluginName: "google-calendar",
allowDestructiveActions: true,
mcpServerNames: ["google-calendar"],
},
},
pluginAppIds: {
"google-calendar": ["google-calendar-app"],
writeOpenClawStateKvJson(CODEX_APP_SERVER_BINDING_KV_SCOPE, sessionFile, {
schemaVersion: 1,
threadId: "thread-123",
sessionFile,
cwd: tempDir,
pluginAppPolicyContext: {
fingerprint: "plugin-policy-1",
apps: {
"google-calendar-app": {
appId: "google-calendar-app",
configKey: "google-calendar",
marketplaceName: "openai-curated",
pluginName: "google-calendar",
allowDestructiveActions: true,
mcpServerNames: ["google-calendar"],
},
},
createdAt: "2026-05-03T00:00:00.000Z",
updatedAt: "2026-05-03T00:00:00.000Z",
})}\n`,
);
pluginAppIds: {
"google-calendar": ["google-calendar-app"],
},
},
createdAt: "2026-05-03T00:00:00.000Z",
updatedAt: "2026-05-03T00:00:00.000Z",
});
const binding = await readCodexAppServerBinding(sessionFile);
@@ -138,10 +142,8 @@ describe("codex app-server session binding", () => {
nativeAuthLookup,
);
const raw = await fs.readFile(resolveCodexAppServerBindingPath(sessionFile), "utf8");
const binding = await readCodexAppServerBinding(sessionFile, nativeAuthLookup);
expect(raw).not.toContain('"modelProvider": "openai"');
expect(binding).toMatchObject({
threadId: "thread-123",
authProfileId: "work",
@@ -152,20 +154,17 @@ describe("codex app-server session binding", () => {
it("normalizes older Codex-native bindings that stored public OpenAI provider", async () => {
const sessionFile = path.join(tempDir, "session.json");
await fs.writeFile(
resolveCodexAppServerBindingPath(sessionFile),
`${JSON.stringify({
schemaVersion: 1,
threadId: "thread-123",
sessionFile,
cwd: tempDir,
authProfileId: "work",
model: "gpt-5.4-mini",
modelProvider: "openai",
createdAt: "2026-05-03T00:00:00.000Z",
updatedAt: "2026-05-03T00:00:00.000Z",
})}\n`,
);
writeOpenClawStateKvJson(CODEX_APP_SERVER_BINDING_KV_SCOPE, sessionFile, {
schemaVersion: 1,
threadId: "thread-123",
sessionFile,
cwd: tempDir,
authProfileId: "work",
model: "gpt-5.4-mini",
modelProvider: "openai",
createdAt: "2026-05-03T00:00:00.000Z",
updatedAt: "2026-05-03T00:00:00.000Z",
});
const binding = await readCodexAppServerBinding(sessionFile, nativeAuthLookup);
@@ -175,18 +174,15 @@ describe("codex app-server session binding", () => {
it("normalizes legacy fast service tier bindings to Codex priority", async () => {
const sessionFile = path.join(tempDir, "session.json");
await fs.writeFile(
resolveCodexAppServerBindingPath(sessionFile),
`${JSON.stringify({
schemaVersion: 1,
threadId: "thread-123",
sessionFile,
cwd: tempDir,
serviceTier: "fast",
createdAt: "2026-05-03T00:00:00.000Z",
updatedAt: "2026-05-03T00:00:00.000Z",
})}\n`,
);
writeOpenClawStateKvJson(CODEX_APP_SERVER_BINDING_KV_SCOPE, sessionFile, {
schemaVersion: 1,
threadId: "thread-123",
sessionFile,
cwd: tempDir,
serviceTier: "fast",
createdAt: "2026-05-03T00:00:00.000Z",
updatedAt: "2026-05-03T00:00:00.000Z",
});
const binding = await readCodexAppServerBinding(sessionFile);

View File

@@ -1,5 +1,10 @@
import fs from "node:fs/promises";
import { embeddedAgentLog } from "openclaw/plugin-sdk/agent-harness-runtime";
import {
deleteOpenClawStateKvJson,
embeddedAgentLog,
readOpenClawStateKvJson,
writeOpenClawStateKvJson,
type OpenClawStateJsonValue,
} from "openclaw/plugin-sdk/agent-harness-runtime";
import {
ensureAuthProfileStore,
resolveDefaultAgentDir,
@@ -17,6 +22,7 @@ import type { CodexServiceTier } from "./protocol.js";
const CODEX_APP_SERVER_NATIVE_AUTH_PROVIDER = "openai-codex";
const PUBLIC_OPENAI_MODEL_PROVIDER = "openai";
const CODEX_APP_SERVER_BINDING_KV_SCOPE = "codex_app_server_thread_bindings";
type ProviderAuthAliasLookupParams = Parameters<typeof resolveProviderIdForAuth>[1];
type ProviderAuthAliasConfig = NonNullable<ProviderAuthAliasLookupParams>["config"];
@@ -47,65 +53,70 @@ export type CodexAppServerThreadBinding = {
updatedAt: string;
};
export function resolveCodexAppServerBindingPath(sessionFile: string): string {
return `${sessionFile}.codex-app-server.json`;
function codexAppServerBindingKvKey(sessionFile: string): string {
return sessionFile.trim();
}
function codexAppServerBindingToJsonValue(
binding: CodexAppServerThreadBinding,
): OpenClawStateJsonValue {
return binding as unknown as OpenClawStateJsonValue;
}
function normalizeCodexAppServerBinding(
sessionFile: string,
value: unknown,
lookup: Omit<CodexAppServerAuthProfileLookup, "authProfileId">,
): CodexAppServerThreadBinding | undefined {
const parsed = value as Partial<CodexAppServerThreadBinding>;
if (!parsed || parsed.schemaVersion !== 1 || typeof parsed.threadId !== "string") {
return undefined;
}
const authProfileId = typeof parsed.authProfileId === "string" ? parsed.authProfileId : undefined;
return {
schemaVersion: 1,
threadId: parsed.threadId,
sessionFile,
cwd: typeof parsed.cwd === "string" ? parsed.cwd : "",
authProfileId,
model: typeof parsed.model === "string" ? parsed.model : undefined,
modelProvider: normalizeCodexAppServerBindingModelProvider({
...lookup,
authProfileId,
modelProvider: typeof parsed.modelProvider === "string" ? parsed.modelProvider : undefined,
}),
approvalPolicy: readApprovalPolicy(parsed.approvalPolicy),
sandbox: readSandboxMode(parsed.sandbox),
serviceTier: readServiceTier(parsed.serviceTier),
dynamicToolsFingerprint:
typeof parsed.dynamicToolsFingerprint === "string"
? parsed.dynamicToolsFingerprint
: undefined,
pluginAppsFingerprint:
typeof parsed.pluginAppsFingerprint === "string" ? parsed.pluginAppsFingerprint : undefined,
pluginAppsInputFingerprint:
typeof parsed.pluginAppsInputFingerprint === "string"
? parsed.pluginAppsInputFingerprint
: undefined,
pluginAppPolicyContext: readPluginAppPolicyContext(parsed.pluginAppPolicyContext),
createdAt: typeof parsed.createdAt === "string" ? parsed.createdAt : new Date().toISOString(),
updatedAt: typeof parsed.updatedAt === "string" ? parsed.updatedAt : new Date().toISOString(),
};
}
export async function readCodexAppServerBinding(
sessionFile: string,
lookup: Omit<CodexAppServerAuthProfileLookup, "authProfileId"> = {},
): Promise<CodexAppServerThreadBinding | undefined> {
const path = resolveCodexAppServerBindingPath(sessionFile);
let raw: string;
try {
raw = await fs.readFile(path, "utf8");
} catch (error) {
if (isNotFound(error)) {
return undefined;
}
embeddedAgentLog.warn("failed to read codex app-server binding", { path, error });
return undefined;
}
try {
const parsed = JSON.parse(raw) as Partial<CodexAppServerThreadBinding>;
if (parsed.schemaVersion !== 1 || typeof parsed.threadId !== "string") {
return undefined;
}
const authProfileId =
typeof parsed.authProfileId === "string" ? parsed.authProfileId : undefined;
return {
schemaVersion: 1,
threadId: parsed.threadId,
sessionFile,
cwd: typeof parsed.cwd === "string" ? parsed.cwd : "",
authProfileId,
model: typeof parsed.model === "string" ? parsed.model : undefined,
modelProvider: normalizeCodexAppServerBindingModelProvider({
...lookup,
authProfileId,
modelProvider: typeof parsed.modelProvider === "string" ? parsed.modelProvider : undefined,
}),
approvalPolicy: readApprovalPolicy(parsed.approvalPolicy),
sandbox: readSandboxMode(parsed.sandbox),
serviceTier: readServiceTier(parsed.serviceTier),
dynamicToolsFingerprint:
typeof parsed.dynamicToolsFingerprint === "string"
? parsed.dynamicToolsFingerprint
: undefined,
pluginAppsFingerprint:
typeof parsed.pluginAppsFingerprint === "string" ? parsed.pluginAppsFingerprint : undefined,
pluginAppsInputFingerprint:
typeof parsed.pluginAppsInputFingerprint === "string"
? parsed.pluginAppsInputFingerprint
: undefined,
pluginAppPolicyContext: readPluginAppPolicyContext(parsed.pluginAppPolicyContext),
createdAt: typeof parsed.createdAt === "string" ? parsed.createdAt : new Date().toISOString(),
updatedAt: typeof parsed.updatedAt === "string" ? parsed.updatedAt : new Date().toISOString(),
};
} catch (error) {
embeddedAgentLog.warn("failed to parse codex app-server binding", { path, error });
const key = codexAppServerBindingKvKey(sessionFile);
if (!key) {
return undefined;
}
return normalizeCodexAppServerBinding(
sessionFile,
readOpenClawStateKvJson(CODEX_APP_SERVER_BINDING_KV_SCOPE, key),
lookup,
);
}
export async function writeCodexAppServerBinding(
@@ -141,9 +152,10 @@ export async function writeCodexAppServerBinding(
createdAt: binding.createdAt ?? now,
updatedAt: now,
};
await fs.writeFile(
resolveCodexAppServerBindingPath(sessionFile),
`${JSON.stringify(payload, null, 2)}\n`,
writeOpenClawStateKvJson(
CODEX_APP_SERVER_BINDING_KV_SCOPE,
codexAppServerBindingKvKey(sessionFile),
codexAppServerBindingToJsonValue(payload),
);
}
@@ -205,17 +217,10 @@ function readPluginAppPolicyContext(value: unknown): PluginAppPolicyContext | un
}
export async function clearCodexAppServerBinding(sessionFile: string): Promise<void> {
try {
await fs.unlink(resolveCodexAppServerBindingPath(sessionFile));
} catch (error) {
if (!isNotFound(error)) {
embeddedAgentLog.warn("failed to clear codex app-server binding", { sessionFile, error });
}
}
}
function isNotFound(error: unknown): boolean {
return Boolean(error && typeof error === "object" && "code" in error && error.code === "ENOENT");
deleteOpenClawStateKvJson(
CODEX_APP_SERVER_BINDING_KV_SCOPE,
codexAppServerBindingKvKey(sessionFile),
);
}
export function isCodexAppServerNativeAuthProfile(

View File

@@ -1,27 +1,35 @@
import fs from "node:fs/promises";
import type { SessionEntry } from "openclaw/plugin-sdk/agent-harness-runtime";
import type { FileEntry, SessionEntry } from "openclaw/plugin-sdk/agent-harness-runtime";
import {
buildSessionContext,
loadSqliteSessionTranscriptEvents,
migrateSessionEntries,
parseSessionEntries,
resolveSqliteSessionTranscriptScopeForPath,
} from "openclaw/plugin-sdk/agent-harness-runtime";
import type { AgentMessage } from "openclaw/plugin-sdk/agent-harness-runtime";
function isMissingFileError(error: unknown): boolean {
return Boolean(
error &&
typeof error === "object" &&
"code" in error &&
(error as { code?: unknown }).code === "ENOENT",
);
}
export type CodexMirroredSessionHistoryScope = {
sessionFile: string;
agentId?: string;
sessionId?: string;
};
export async function readCodexMirroredSessionHistoryMessages(
sessionFile: string,
scope: CodexMirroredSessionHistoryScope,
): Promise<AgentMessage[] | undefined> {
try {
const raw = await fs.readFile(sessionFile, "utf-8");
const entries = parseSessionEntries(raw);
const resolvedScope =
scope.agentId && scope.sessionId
? { agentId: scope.agentId, sessionId: scope.sessionId }
: resolveSqliteSessionTranscriptScopeForPath({ transcriptPath: scope.sessionFile });
if (!resolvedScope) {
return [];
}
const entries = loadSqliteSessionTranscriptEvents(resolvedScope)
.map((entry) => entry.event)
.filter((entry): entry is FileEntry => Boolean(entry && typeof entry === "object"));
if (entries.length === 0) {
return [];
}
const firstEntry = entries[0] as { type?: unknown; id?: unknown } | undefined;
if (firstEntry?.type !== "session" || typeof firstEntry.id !== "string") {
return undefined;
@@ -31,10 +39,7 @@ export async function readCodexMirroredSessionHistoryMessages(
(entry): entry is SessionEntry => entry.type !== "session",
);
return buildSessionContext(sessionEntries).messages;
} catch (error) {
if (isMissingFileError(error)) {
return [];
}
} catch {
return undefined;
}
}

View File

@@ -10,6 +10,11 @@ import {
readRecentCodexRateLimits,
resetCodexRateLimitCacheForTests,
} from "./app-server/rate-limit-cache.js";
import {
readCodexAppServerBinding,
writeCodexAppServerBinding,
type CodexAppServerThreadBinding,
} from "./app-server/session-binding.js";
import { resetSharedCodexAppServerClientForTests } from "./app-server/shared-client.js";
import {
resetCodexDiagnosticsFeedbackStateForTests,
@@ -18,6 +23,7 @@ import {
import { handleCodexCommand } from "./commands.js";
let tempDir: string;
let previousStateDir: string | undefined;
function createContext(
args: string,
@@ -67,6 +73,23 @@ function createDeps(overrides: Partial<CodexCommandDeps> = {}): Partial<CodexCom
};
}
async function seedCodexBinding(
sessionFile: string,
binding: Partial<CodexAppServerThreadBinding> & { threadId: string },
): Promise<void> {
await writeCodexAppServerBinding(sessionFile, {
threadId: binding.threadId,
cwd: binding.cwd ?? tempDir,
authProfileId: binding.authProfileId,
model: binding.model,
modelProvider: binding.modelProvider,
approvalPolicy: binding.approvalPolicy,
sandbox: binding.sandbox,
serviceTier: binding.serviceTier,
dynamicToolsFingerprint: binding.dynamicToolsFingerprint,
});
}
function readDiagnosticsConfirmationToken(
result: PluginCommandResult,
commandPrefix = "/codex diagnostics",
@@ -125,12 +148,19 @@ function expectedDiagnosticsTargetBlock(params: {
describe("codex command", () => {
beforeEach(async () => {
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-codex-command-"));
previousStateDir = process.env.OPENCLAW_STATE_DIR;
process.env.OPENCLAW_STATE_DIR = tempDir;
});
afterEach(async () => {
resetCodexDiagnosticsFeedbackStateForTests();
resetCodexRateLimitCacheForTests();
resetSharedCodexAppServerClientForTests();
if (previousStateDir === undefined) {
delete process.env.OPENCLAW_STATE_DIR;
} else {
process.env.OPENCLAW_STATE_DIR = previousStateDir;
}
await fs.rm(tempDir, { recursive: true, force: true });
});
@@ -169,9 +199,9 @@ describe("codex command", () => {
params: { threadId: "thread-123", persistExtendedHistory: true },
},
]);
await expect(fs.readFile(`${sessionFile}.codex-app-server.json`, "utf8")).resolves.toContain(
'"threadId": "thread-123"',
);
await expect(readCodexAppServerBinding(sessionFile)).resolves.toMatchObject({
threadId: "thread-123",
});
});
it("rejects malformed resume commands before attaching a Codex thread", async () => {
@@ -607,10 +637,7 @@ describe("codex command", () => {
it("starts compaction for the attached Codex thread", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-123", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, { schemaVersion: 1, threadId: "thread-123", cwd: "/repo" });
const codexControlRequest = vi.fn(async () => ({}));
const deps = createDeps({
codexControlRequest,
@@ -628,10 +655,7 @@ describe("codex command", () => {
it("starts review with the generated app-server target shape", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-123", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, { schemaVersion: 1, threadId: "thread-123", cwd: "/repo" });
const codexControlRequest = vi.fn(async () => ({}));
await expect(
@@ -670,10 +694,11 @@ describe("codex command", () => {
it("escapes started thread-action ids before chat display", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-123 <@U123>", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-123 <@U123>",
cwd: "/repo",
});
const codexControlRequest = vi.fn(async () => ({}));
const result = await handleCodexCommand(createContext("compact", sessionFile), {
@@ -813,10 +838,7 @@ describe("codex command", () => {
it("asks before sending diagnostics feedback for the attached Codex thread", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-123", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, { schemaVersion: 1, threadId: "thread-123", cwd: "/repo" });
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-123" },
@@ -911,10 +933,11 @@ describe("codex command", () => {
it("rejects malformed diagnostics confirmation commands without consuming the token", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-confirm-args", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-confirm-args",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-confirm-args" },
@@ -958,10 +981,11 @@ describe("codex command", () => {
it("previews exec-approved diagnostics upload without exposing Codex ids", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-preview", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-preview",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-preview" },
@@ -996,10 +1020,11 @@ describe("codex command", () => {
it("sends diagnostics feedback immediately after exec approval", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-approved", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-approved",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-approved" },
@@ -1048,14 +1073,16 @@ describe("codex command", () => {
it("uploads all Codex diagnostics sessions and reports their channel/thread breakdown", async () => {
const firstSessionFile = path.join(tempDir, "session-one.jsonl");
const secondSessionFile = path.join(tempDir, "session-two.jsonl");
await fs.writeFile(
`${firstSessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-111", cwd: "/repo" }),
);
await fs.writeFile(
`${secondSessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-222", cwd: "/repo" }),
);
await seedCodexBinding(firstSessionFile, {
schemaVersion: 1,
threadId: "thread-111",
cwd: "/repo",
});
await seedCodexBinding(secondSessionFile, {
schemaVersion: 1,
threadId: "thread-222",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async (_config, _method, requestParams) => ({
ok: true as const,
value: {
@@ -1148,10 +1175,11 @@ describe("codex command", () => {
it("requires an owner for Codex diagnostics feedback uploads", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-owner", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-owner",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-owner" },
@@ -1172,10 +1200,11 @@ describe("codex command", () => {
it("refuses diagnostics confirmations without a stable sender identity", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-sender-required", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-sender-required",
cwd: "/repo",
});
await expect(
handleCodexCommand(
@@ -1191,10 +1220,11 @@ describe("codex command", () => {
it("keeps diagnostics confirmation scoped to the requesting sender", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-sender", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-sender",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-sender" },
@@ -1278,10 +1308,11 @@ describe("codex command", () => {
it("keeps diagnostics confirmation scoped to account and channel identity", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-account", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-account",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-account" },
@@ -1319,16 +1350,15 @@ describe("codex command", () => {
it("allows private-routed diagnostics confirmations from the owner DM", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-private", cwd: "/repo" }),
);
const safeCodexControlRequest = vi.fn(
async (_pluginConfig: unknown, _method: string, _requestParams: unknown) => ({
ok: true as const,
value: { threadId: "thread-private" },
}),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-private",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-private" },
}));
const deps = createDeps({ safeCodexControlRequest });
const request = await handleCodexCommand(
@@ -1373,10 +1403,11 @@ describe("codex command", () => {
it("keeps diagnostics confirmation eviction scoped to account identity", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-confirm-scope", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-confirm-scope",
cwd: "/repo",
});
const firstRequest = await handleCodexCommand(
createContext("diagnostics", sessionFile, {
@@ -1419,16 +1450,11 @@ describe("codex command", () => {
it("bounds diagnostics notes before upload", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-789", cwd: "/repo" }),
);
const safeCodexControlRequest = vi.fn(
async (_pluginConfig: unknown, _method: string, _requestParams: unknown) => ({
ok: true as const,
value: { threadId: "thread-789" },
}),
);
await seedCodexBinding(sessionFile, { schemaVersion: 1, threadId: "thread-789", cwd: "/repo" });
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-789" },
}));
const note = "x".repeat(2050);
const deps = createDeps({ safeCodexControlRequest });
@@ -1446,10 +1472,11 @@ describe("codex command", () => {
it("escapes diagnostics notes before showing approval text", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-note", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-note",
cwd: "/repo",
});
const request = await handleCodexCommand(
createContext("diagnostics <@U123> [trusted](https://evil) @here `tick`", sessionFile),
@@ -1465,10 +1492,11 @@ describe("codex command", () => {
it("throttles repeated diagnostics uploads for the same thread", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-cooldown", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-cooldown",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-cooldown" },
@@ -1507,10 +1535,11 @@ describe("codex command", () => {
const deps = createDeps({ safeCodexControlRequest });
const sessionFile = path.join(tempDir, "global-cooldown-session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-global-1", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-global-1",
cwd: "/repo",
});
const request = await handleCodexCommand(createContext("diagnostics first", sessionFile), {
deps,
});
@@ -1528,10 +1557,11 @@ describe("codex command", () => {
].join("\n"),
});
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-global-2", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-global-2",
cwd: "/repo",
});
await expect(
handleCodexCommand(createContext("diagnostics second", sessionFile), { deps }),
).resolves.toEqual({
@@ -1549,10 +1579,11 @@ describe("codex command", () => {
const deps = createDeps({ safeCodexControlRequest });
const sessionFile = path.join(tempDir, "scoped-cooldown-session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-scope-1", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-scope-1",
cwd: "/repo",
});
const firstRequest = await handleCodexCommand(
createContext("diagnostics first", sessionFile, {
accountId: "account-1",
@@ -1570,10 +1601,11 @@ describe("codex command", () => {
);
expectResultTextContains(firstConfirmResult, "Codex diagnostics sent to OpenAI servers:");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-scope-2", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-scope-2",
cwd: "/repo",
});
const secondRequest = await handleCodexCommand(
createContext("diagnostics second", sessionFile, {
accountId: "account-2",
@@ -1602,10 +1634,11 @@ describe("codex command", () => {
const deps = createDeps({ safeCodexControlRequest });
const sessionFile = path.join(tempDir, "delimiter-cooldown-session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-delimiter-1", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-delimiter-1",
cwd: "/repo",
});
const firstScope = {
accountId: "a",
channelId: "b",
@@ -1622,10 +1655,11 @@ describe("codex command", () => {
);
expectResultTextContains(firstConfirmResult, "Codex diagnostics sent to OpenAI servers:");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-delimiter-2", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-delimiter-2",
cwd: "/repo",
});
const secondScope = {
accountId: "a|channelId:b",
channel: "test|channel:x",
@@ -1653,10 +1687,11 @@ describe("codex command", () => {
const sessionFile = path.join(tempDir, "long-scope-cooldown-session.jsonl");
const sharedPrefix = "account-".repeat(40);
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-long-scope-1", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-long-scope-1",
cwd: "/repo",
});
const firstScope = {
accountId: `${sharedPrefix}first`,
channelId: "channel-long",
@@ -1672,10 +1707,11 @@ describe("codex command", () => {
);
expectResultTextContains(firstConfirmResult, "Codex diagnostics sent to OpenAI servers:");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-long-scope-2", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-long-scope-2",
cwd: "/repo",
});
const secondScope = {
accountId: `${sharedPrefix}second`,
channelId: "channel-long",
@@ -1696,10 +1732,7 @@ describe("codex command", () => {
it("sanitizes diagnostics upload errors before showing them", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "<@U123>", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, { schemaVersion: 1, threadId: "<@U123>", cwd: "/repo" });
const safeCodexControlRequest = vi.fn(async () => ({
ok: false as const,
error: "bad\n\u009b\u202e <@U123> [trusted](https://evil) @here",
@@ -1724,10 +1757,11 @@ describe("codex command", () => {
it("does not throttle diagnostics retries after upload failures", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-retry", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-retry",
cwd: "/repo",
});
const safeCodexControlRequest = vi
.fn()
.mockResolvedValueOnce({ ok: false as const, error: "temporary outage" })
@@ -1774,14 +1808,11 @@ describe("codex command", () => {
it("omits inline diagnostics resume commands for unsafe thread ids", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-123'`\n\u009b\u202e; echo bad",
cwd: "/repo",
}),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-123'`\n\u009b\u202e; echo bad",
cwd: "/repo",
});
const safeCodexControlRequest = vi.fn(async () => ({
ok: true as const,
value: { threadId: "thread-123'`\n\u009b\u202e; echo bad" },
@@ -1885,10 +1916,7 @@ describe("codex command", () => {
it("returns sanitized command failures instead of leaking app-server errors", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({ schemaVersion: 1, threadId: "thread-123", cwd: "/repo" }),
);
await seedCodexBinding(sessionFile, { schemaVersion: 1, threadId: "thread-123", cwd: "/repo" });
const failure = () => {
throw new Error("app-server failed <@U123> [trusted](https://evil) @here");
};
@@ -1920,16 +1948,13 @@ describe("codex command", () => {
it("binds the current conversation to a Codex app-server thread", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-123",
cwd: "/repo",
authProfileId: "openai-codex:work",
modelProvider: "openai",
}),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-123",
cwd: "/repo",
authProfileId: "openai-codex:work",
modelProvider: "openai",
});
const startCodexConversationThread = vi.fn(async () => ({
kind: "codex-app-server-session" as const,
version: 1 as const,
@@ -2354,15 +2379,12 @@ describe("codex command", () => {
it("escapes current bound model status before chat display", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-model",
cwd: "/repo",
model: "model_<@U123>_[trusted](https://evil)",
}),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-model",
cwd: "/repo",
model: "model_<@U123>_[trusted](https://evil)",
});
const result = await handleCodexCommand(createContext("model", sessionFile), {
deps: createDeps(),
@@ -2473,18 +2495,15 @@ describe("codex command", () => {
it("describes active binding preferences", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-123",
cwd: "/repo",
model: "gpt-5.4",
serviceTier: "fast",
approvalPolicy: "never",
sandbox: "danger-full-access",
}),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-123",
cwd: "/repo",
model: "gpt-5.4",
serviceTier: "fast",
approvalPolicy: "never",
sandbox: "danger-full-access",
});
await expect(
handleCodexCommand(
@@ -2531,15 +2550,12 @@ describe("codex command", () => {
it("escapes active binding fields before chat display", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-123 <@U123>",
cwd: "/repo",
model: "gpt [trusted](https://evil)",
}),
);
await seedCodexBinding(sessionFile, {
schemaVersion: 1,
threadId: "thread-123 <@U123>",
cwd: "/repo",
model: "gpt [trusted](https://evil)",
});
const result = await handleCodexCommand(
createContext("binding", sessionFile, {

View File

@@ -21,6 +21,11 @@ const agentRuntimeMocks = vi.hoisted(() => ({
vi.mock("./app-server/shared-client.js", () => sharedClientMocks);
vi.mock("openclaw/plugin-sdk/agent-runtime", () => agentRuntimeMocks);
import {
readCodexAppServerBinding,
writeCodexAppServerBinding,
type CodexAppServerThreadBinding,
} from "./app-server/session-binding.js";
import {
handleCodexConversationBindingResolved,
handleCodexConversationInboundClaim,
@@ -28,10 +33,30 @@ import {
} from "./conversation-binding.js";
let tempDir: string;
let previousStateDir: string | undefined;
async function seedCodexBinding(
sessionFile: string,
binding: Partial<CodexAppServerThreadBinding> & { threadId: string },
): Promise<void> {
await writeCodexAppServerBinding(sessionFile, {
threadId: binding.threadId,
cwd: binding.cwd ?? tempDir,
authProfileId: binding.authProfileId,
model: binding.model,
modelProvider: binding.modelProvider,
approvalPolicy: binding.approvalPolicy,
sandbox: binding.sandbox,
serviceTier: binding.serviceTier,
dynamicToolsFingerprint: binding.dynamicToolsFingerprint,
});
}
describe("codex conversation binding", () => {
beforeEach(async () => {
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-codex-binding-"));
previousStateDir = process.env.OPENCLAW_STATE_DIR;
process.env.OPENCLAW_STATE_DIR = tempDir;
});
afterEach(async () => {
@@ -44,6 +69,11 @@ describe("codex conversation binding", () => {
agentRuntimeMocks.resolvePersistedAuthProfileOwnerAgentDir.mockReset();
agentRuntimeMocks.resolveProviderIdForAuth.mockClear();
agentRuntimeMocks.saveAuthProfileStore.mockReset();
if (previousStateDir === undefined) {
delete process.env.OPENCLAW_STATE_DIR;
} else {
process.env.OPENCLAW_STATE_DIR = previousStateDir;
}
await fs.rm(tempDir, { recursive: true, force: true });
});
@@ -101,9 +131,9 @@ describe("codex conversation binding", () => {
expect(requests[0]?.method).toBe("thread/start");
expect(requests[0]?.params.model).toBe("gpt-5.4-mini");
expect(requests[0]?.params).not.toHaveProperty("modelProvider");
await expect(fs.readFile(`${sessionFile}.codex-app-server.json`, "utf8")).resolves.toContain(
'"authProfileId": "openai-codex:default"',
);
await expect(readCodexAppServerBinding(sessionFile)).resolves.toMatchObject({
authProfileId: "openai-codex:default",
});
});
it("preserves Codex auth and omits the public OpenAI provider for native bind threads", async () => {
@@ -120,16 +150,12 @@ describe("codex conversation binding", () => {
},
},
});
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-old",
cwd: tempDir,
authProfileId: "work",
modelProvider: "openai",
}),
);
await seedCodexBinding(sessionFile, {
threadId: "thread-old",
cwd: tempDir,
authProfileId: "work",
modelProvider: "openai",
});
const requests: Array<{ method: string; params: Record<string, unknown> }> = [];
sharedClientMocks.getSharedCodexAppServerClient.mockResolvedValue({
request: vi.fn(async (method: string, requestParams: Record<string, unknown>) => {
@@ -155,18 +181,14 @@ describe("codex conversation binding", () => {
expect(requests[0]?.method).toBe("thread/start");
expect(requests[0]?.params.model).toBe("gpt-5.4-mini");
expect(requests[0]?.params).not.toHaveProperty("modelProvider");
await expect(fs.readFile(`${sessionFile}.codex-app-server.json`, "utf8")).resolves.toContain(
'"authProfileId": "work"',
);
await expect(
fs.readFile(`${sessionFile}.codex-app-server.json`, "utf8"),
).resolves.not.toContain('"modelProvider": "openai"');
const binding = await readCodexAppServerBinding(sessionFile);
expect(binding?.authProfileId).toBe("work");
expect(binding?.modelProvider).toBeUndefined();
});
it("clears the Codex app-server sidecar when a pending bind is denied", async () => {
it("clears the Codex app-server binding when a pending bind is denied", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
const sidecar = `${sessionFile}.codex-app-server.json`;
await fs.writeFile(sidecar, JSON.stringify({ schemaVersion: 1, threadId: "thread-1" }));
await seedCodexBinding(sessionFile, { threadId: "thread-1" });
await handleCodexConversationBindingResolved({
status: "denied",
@@ -186,7 +208,7 @@ describe("codex conversation binding", () => {
},
});
await expect(fs.stat(sidecar)).rejects.toMatchObject({ code: "ENOENT" });
await expect(readCodexAppServerBinding(sessionFile)).resolves.toBeUndefined();
});
it("consumes inbound bound messages when command authorization is absent", async () => {
@@ -231,20 +253,16 @@ describe("codex conversation binding", () => {
},
},
});
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-old",
cwd: tempDir,
authProfileId: "work",
model: "gpt-5.4-mini",
modelProvider: "openai",
approvalPolicy: "on-request",
sandbox: "workspace-write",
serviceTier: "fast",
}),
);
await seedCodexBinding(sessionFile, {
threadId: "thread-old",
cwd: tempDir,
authProfileId: "work",
model: "gpt-5.4-mini",
modelProvider: "openai",
approvalPolicy: "on-request",
sandbox: "workspace-write",
serviceTier: "fast",
});
const requests: Array<{ method: string; params: Record<string, unknown> }> = [];
const notificationHandlers: Array<(notification: Record<string, unknown>) => void> = [];
sharedClientMocks.getSharedCodexAppServerClient.mockResolvedValue({
@@ -339,9 +357,7 @@ describe("codex conversation binding", () => {
approvalPolicy: "on-request",
serviceTier: "priority",
});
const savedBinding = JSON.parse(
await fs.readFile(`${sessionFile}.codex-app-server.json`, "utf8"),
);
const savedBinding = await readCodexAppServerBinding(sessionFile);
expect(savedBinding).toMatchObject({
threadId: "thread-new",
authProfileId: "work",
@@ -349,20 +365,16 @@ describe("codex conversation binding", () => {
sandbox: "workspace-write",
serviceTier: "priority",
});
expect(savedBinding).not.toHaveProperty("modelProvider");
expect(savedBinding?.modelProvider).toBeUndefined();
});
it("returns a clean failure reply when app-server turn start rejects", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-1",
cwd: tempDir,
authProfileId: "openai-codex:work",
}),
);
await seedCodexBinding(sessionFile, {
threadId: "thread-1",
cwd: tempDir,
authProfileId: "openai-codex:work",
});
const unhandledRejections: unknown[] = [];
const onUnhandledRejection = (reason: unknown) => {
unhandledRejections.push(reason);
@@ -430,14 +442,10 @@ describe("codex conversation binding", () => {
it("falls back to content when the channel body for agent is blank", async () => {
const sessionFile = path.join(tempDir, "session.jsonl");
await fs.writeFile(
`${sessionFile}.codex-app-server.json`,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-1",
cwd: tempDir,
}),
);
await seedCodexBinding(sessionFile, {
threadId: "thread-1",
cwd: tempDir,
});
let notificationHandler: ((notification: unknown) => void) | undefined;
const turnStartParams: Record<string, unknown>[] = [];
sharedClientMocks.getSharedCodexAppServerClient.mockResolvedValue({

View File

@@ -92,9 +92,7 @@ describe("codex conversation controls", () => {
"Codex model set to gpt-5.5.",
);
const raw = await fs.readFile(`${sessionFile}.codex-app-server.json`, "utf8");
const binding = await readCodexAppServerBinding(sessionFile);
expect(raw).not.toContain('"modelProvider": "openai"');
expect(binding).toMatchObject({
threadId: "thread-1",
authProfileId: "work",

View File

@@ -9,6 +9,7 @@ import { resolveGlobalMap } from "openclaw/plugin-sdk/global-singleton";
import * as memoryCoreHostRuntimeCoreModule from "openclaw/plugin-sdk/memory-core-host-runtime-core";
import * as runtimeConfigSnapshotModule from "openclaw/plugin-sdk/runtime-config-snapshot";
import * as sessionStoreRuntimeModule from "openclaw/plugin-sdk/session-store-runtime";
import { saveSessionStore } from "openclaw/plugin-sdk/session-store-runtime";
import { afterEach, describe, expect, it, vi } from "vitest";
import {
appendNarrativeEntry,
@@ -952,34 +953,26 @@ describe("generateAndAppendDreamNarrative", () => {
expect(subagent.deleteSession).toHaveBeenCalled();
});
it("scrubs stale dreaming entries and orphan transcripts after cleanup", async () => {
it("scrubs stale dreaming entries after cleanup", async () => {
const workspaceDir = await createTempWorkspace("openclaw-dreaming-narrative-");
const stateDir = await createTempWorkspace("openclaw-dreaming-state-");
const sessionsDir = path.join(stateDir, "agents", "main", "sessions");
await fs.mkdir(sessionsDir, { recursive: true });
const storePath = path.join(sessionsDir, "sessions.json");
const orphanPath = path.join(sessionsDir, "orphan.jsonl");
const livePath = path.join(sessionsDir, "still-live.jsonl");
await fs.writeFile(
storePath,
`${JSON.stringify({
"agent:main:dreaming-narrative-light-1": {
sessionId: "missing",
},
"agent:main:kept-session": {
sessionId: "still-live",
},
"agent:main:telegram:group:dreaming-narrative-room": {
sessionId: "still-missing-non-dreaming",
},
})}\n`,
"utf-8",
);
await fs.writeFile(orphanPath, '{"runId":"dreaming-narrative-light-123"}\n', "utf-8");
await fs.writeFile(livePath, '{"runId":"dreaming-narrative-light-keep"}\n', "utf-8");
const oldDate = new Date(Date.now() - 600_000);
await fs.utimes(orphanPath, oldDate, oldDate);
await fs.utimes(livePath, oldDate, oldDate);
await saveSessionStore(storePath, {
"agent:main:dreaming-narrative-light-1": {
sessionId: "missing",
updatedAt: Date.now(),
},
"agent:main:kept-session": {
sessionId: "still-live",
updatedAt: Date.now(),
},
"agent:main:telegram:group:dreaming-narrative-room": {
sessionId: "still-missing-non-dreaming",
updatedAt: Date.now(),
},
});
vi.spyOn(runtimeConfigSnapshotModule, "getRuntimeConfig").mockReturnValue({
session: {},
@@ -1003,16 +996,13 @@ describe("generateAndAppendDreamNarrative", () => {
logger,
});
const updatedStore = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
const updatedStore = sessionStoreRuntimeModule.loadSessionStore(storePath) as Record<
string,
unknown
>;
expect(updatedStore).not.toHaveProperty("agent:main:dreaming-narrative-light-1");
expect(updatedStore).toHaveProperty("agent:main:kept-session");
expect(updatedStore).toHaveProperty("agent:main:telegram:group:dreaming-narrative-room");
const sessionFiles = await fs.readdir(sessionsDir);
expect(sessionFiles).toContainEqual(expect.stringMatching(/^orphan\.jsonl\.deleted\./));
expect(sessionFiles).toContain("still-live.jsonl");
expectLogIncludes(logger.info, "dreaming cleanup scrubbed");
});

View File

@@ -98,8 +98,6 @@ const NARRATIVE_SYSTEM_PROMPT = [
// comment warned against.
const NARRATIVE_TIMEOUT_MS = 60_000;
const DREAMING_SESSION_KEY_PREFIX = "dreaming-narrative-";
const DREAMING_TRANSCRIPT_RUN_MARKER = '"runId":"dreaming-narrative-';
const DREAMING_ORPHAN_MIN_AGE_MS = 300_000;
const SAFE_SESSION_ID_RE = /^[a-z0-9][a-z0-9._-]{0,127}$/i;
const DREAMS_FILENAMES = ["DREAMS.md", "dreams.md"] as const;
const DIARY_START_MARKER = "<!-- openclaw:dreaming:diary:start -->";
@@ -760,8 +758,6 @@ async function scrubDreamingNarrativeArtifacts(logger: Logger): Promise<void> {
}
let prunedEntries = 0;
let archivedOrphans = 0;
for (const agentEntry of agentEntries) {
if (!agentEntry.isDirectory()) {
continue;
@@ -779,16 +775,12 @@ async function scrubDreamingNarrativeArtifacts(logger: Logger): Promise<void> {
continue;
}
const referencedSessionFiles = new Set<string>();
let needsStoreUpdate = false;
for (const [key, entry] of Object.entries(store)) {
const normalizedSessionFile = await normalizeSessionEntryPathForComparison({
sessionsDir,
entry,
});
if (normalizedSessionFile) {
referencedSessionFiles.add(normalizedSessionFile);
}
if (!isDreamingSessionStoreKey(key)) {
continue;
}
@@ -798,7 +790,6 @@ async function scrubDreamingNarrativeArtifacts(logger: Logger): Promise<void> {
}
if (needsStoreUpdate) {
referencedSessionFiles.clear();
prunedEntries += await updateSessionStore(storePath, async (lockedStore) => {
let prunedForAgent = 0;
for (const [key, entry] of Object.entries(lockedStore)) {
@@ -806,9 +797,6 @@ async function scrubDreamingNarrativeArtifacts(logger: Logger): Promise<void> {
sessionsDir,
entry,
});
if (normalizedSessionFile) {
referencedSessionFiles.add(normalizedSessionFile);
}
if (!isDreamingSessionStoreKey(key)) {
continue;
}
@@ -820,58 +808,11 @@ async function scrubDreamingNarrativeArtifacts(logger: Logger): Promise<void> {
return prunedForAgent;
});
}
let sessionFiles: Dirent[] = [];
try {
sessionFiles = await fs.readdir(sessionsDir, { withFileTypes: true });
} catch {
continue;
}
for (const fileEntry of sessionFiles) {
if (!fileEntry.isFile() || !fileEntry.name.endsWith(".jsonl")) {
continue;
}
const transcriptPath = path.join(sessionsDir, fileEntry.name);
const normalizedTranscriptPath =
(await normalizeSessionFileForComparison({
sessionsDir,
sessionFile: fileEntry.name,
})) ?? normalizeComparablePath(transcriptPath);
if (referencedSessionFiles.has(normalizedTranscriptPath)) {
continue;
}
let stat;
try {
stat = await fs.stat(transcriptPath);
} catch {
continue;
}
if (Date.now() - stat.mtimeMs < DREAMING_ORPHAN_MIN_AGE_MS) {
continue;
}
let content = "";
try {
content = await fs.readFile(transcriptPath, "utf-8");
} catch {
continue;
}
if (!content.includes(DREAMING_TRANSCRIPT_RUN_MARKER)) {
continue;
}
const archivedPath = `${transcriptPath}.deleted.${Date.now()}`;
try {
await fs.rename(transcriptPath, archivedPath);
archivedOrphans += 1;
} catch {
// best-effort scrubber
}
}
}
if (prunedEntries > 0 || archivedOrphans > 0) {
if (prunedEntries > 0) {
logger.info(
`memory-core: dreaming cleanup scrubbed ${prunedEntries} stale session entr${prunedEntries === 1 ? "y" : "ies"} and archived ${archivedOrphans} orphan transcript${archivedOrphans === 1 ? "" : "s"}.`,
`memory-core: dreaming cleanup scrubbed ${prunedEntries} stale session entr${prunedEntries === 1 ? "y" : "ies"}.`,
);
}
}

View File

@@ -1333,35 +1333,13 @@ describe("memory-core dreaming phases", () => {
expect(corpus).toContain("Assistant: Handled internally.");
});
it("drops archive, cron, and heartbeat chatter from fresh session corpus output", async () => {
it("drops checkpoint, cron, and heartbeat chatter from fresh session corpus output", async () => {
const workspaceDir = await createDreamingWorkspace();
vi.stubEnv("OPENCLAW_TEST_FAST", "1");
vi.stubEnv("OPENCLAW_STATE_DIR", path.join(workspaceDir, ".state"));
const sessionsDir = resolveSessionTranscriptsDirForAgent("main");
await fs.mkdir(sessionsDir, { recursive: true });
await fs.writeFile(
path.join(sessionsDir, "archived.jsonl.deleted.2026-04-16T18-06-16.529Z"),
[
JSON.stringify({
type: "message",
message: {
role: "user",
timestamp: "2026-04-16T18:01:00.000Z",
content: "[cron:job-1 Example] Run the nightly sync",
},
}),
JSON.stringify({
type: "message",
message: {
role: "assistant",
timestamp: "2026-04-16T18:02:00.000Z",
content: "Running the nightly sync now.",
},
}),
].join("\n") + "\n",
"utf-8",
);
await fs.writeFile(
path.join(sessionsDir, "ordinary.checkpoint.11111111-1111-4111-8111-111111111111.jsonl"),
JSON.stringify({
@@ -1607,7 +1585,7 @@ describe("memory-core dreaming phases", () => {
}
});
it("dedupes reset/deleted session archives instead of double-ingesting", async () => {
it("dedupes refreshed session corpus instead of double-ingesting", async () => {
const workspaceDir = await createDreamingWorkspace();
vi.stubEnv("OPENCLAW_TEST_FAST", "1");
vi.stubEnv("OPENCLAW_STATE_DIR", path.join(workspaceDir, ".state"));
@@ -1666,11 +1644,6 @@ describe("memory-core dreaming phases", () => {
await triggerLightDreaming(beforeAgentReply, workspaceDir, 5);
});
const resetPath = path.join(
sessionsDir,
"dreaming-main.jsonl.reset.2026-04-06T01-00-00.000Z",
);
await fs.writeFile(resetPath, await fs.readFile(transcriptPath, "utf-8"), "utf-8");
const newMessage = "Keep retention at 365 days.";
await fs.writeFile(
transcriptPath,
@@ -1696,7 +1669,6 @@ describe("memory-core dreaming phases", () => {
);
const dayTwo = new Date("2026-04-06T01:05:00.000Z");
await fs.utimes(transcriptPath, dayTwo, dayTwo);
await fs.utimes(resetPath, dayTwo, dayTwo);
await withDreamingTestClock(async () => {
await triggerLightDreaming(beforeAgentReply, workspaceDir, 910);

View File

@@ -1,171 +0,0 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import type { DatabaseSync } from "node:sqlite";
import type {
OpenClawConfig,
ResolvedMemorySearchConfig,
} from "openclaw/plugin-sdk/memory-core-host-engine-foundation";
import type {
MemorySource,
MemorySyncProgressUpdate,
} from "openclaw/plugin-sdk/memory-core-host-engine-storage";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { MemoryManagerSyncOps } from "./manager-sync-ops.js";
type MemoryIndexEntry = {
path: string;
absPath: string;
mtimeMs: number;
size: number;
hash: string;
content?: string;
};
type SyncParams = {
reason?: string;
force?: boolean;
forceSessions?: boolean;
sessionFile?: string;
progress?: (update: MemorySyncProgressUpdate) => void;
};
class SessionDeltaHarness extends MemoryManagerSyncOps {
protected readonly cfg = {} as OpenClawConfig;
protected readonly agentId = "main";
protected readonly workspaceDir = "/tmp/openclaw-test-workspace";
protected readonly settings = {
sync: {
sessions: {
deltaBytes: 100_000,
deltaMessages: 50,
postCompactionForce: true,
},
},
} as ResolvedMemorySearchConfig;
protected readonly batch = {
enabled: false,
wait: false,
concurrency: 1,
pollIntervalMs: 0,
timeoutMs: 0,
};
protected readonly vector = { enabled: false, available: false };
protected readonly cache = { enabled: false };
protected db = null as unknown as DatabaseSync;
readonly syncCalls: SyncParams[] = [];
addPendingSessionFile(sessionFile: string) {
this.sessionPendingFiles.add(sessionFile);
}
getDirtySessionFiles(): string[] {
return Array.from(this.sessionsDirtyFiles);
}
isSessionsDirty(): boolean {
return this.sessionsDirty;
}
async processPendingSessionDeltas(): Promise<void> {
await (
this as unknown as {
processSessionDeltaBatch: () => Promise<void>;
}
).processSessionDeltaBatch();
}
protected computeProviderKey(): string {
return "test";
}
protected async sync(params?: SyncParams): Promise<void> {
this.syncCalls.push(params ?? {});
}
protected async withTimeout<T>(
promise: Promise<T>,
_timeoutMs: number,
_message: string,
): Promise<T> {
return await promise;
}
protected getIndexConcurrency(): number {
return 1;
}
protected pruneEmbeddingCacheIfNeeded(): void {}
protected async indexFile(
_entry: MemoryIndexEntry,
_options: { source: MemorySource; content?: string },
): Promise<void> {}
}
describe("session archive delta bypass", () => {
let tmpDir = "";
beforeEach(async () => {
tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-archive-delta-"));
});
afterEach(async () => {
await fs.rm(tmpDir, { recursive: true, force: true });
});
async function writeSessionFile(name: string): Promise<string> {
const filePath = path.join(tmpDir, name);
await fs.writeFile(
filePath,
JSON.stringify({
type: "message",
message: { role: "user", content: "short archived session" },
}) + "\n",
"utf-8",
);
return filePath;
}
it.each(["reset", "deleted"] as const)(
"marks below-threshold %s archives dirty immediately",
async (reason) => {
const archivePath = await writeSessionFile(
`session-a.jsonl.${reason}.2026-05-03T05-38-59.000Z`,
);
const harness = new SessionDeltaHarness();
harness.addPendingSessionFile(archivePath);
await harness.processPendingSessionDeltas();
expect(harness.getDirtySessionFiles()).toEqual([archivePath]);
expect(harness.isSessionsDirty()).toBe(true);
expect(harness.syncCalls).toEqual([{ reason: "session-delta" }]);
},
);
it("keeps .jsonl.bak archives on the normal below-threshold delta path", async () => {
const bakPath = await writeSessionFile("session-a.jsonl.bak.2026-05-03T05-38-59.000Z");
const harness = new SessionDeltaHarness();
harness.addPendingSessionFile(bakPath);
await harness.processPendingSessionDeltas();
expect(harness.getDirtySessionFiles()).toStrictEqual([]);
expect(harness.isSessionsDirty()).toBe(false);
expect(harness.syncCalls).toStrictEqual([]);
});
it("keeps live transcripts below the configured thresholds", async () => {
const livePath = await writeSessionFile("session-a.jsonl");
const harness = new SessionDeltaHarness();
harness.addPendingSessionFile(livePath);
await harness.processPendingSessionDeltas();
expect(harness.getDirtySessionFiles()).toStrictEqual([]);
expect(harness.isSessionsDirty()).toBe(false);
expect(harness.syncCalls).toStrictEqual([]);
});
});

View File

@@ -17,8 +17,6 @@ import {
} from "openclaw/plugin-sdk/memory-core-host-engine-foundation";
import {
buildSessionEntry,
isSessionArchiveArtifactName,
isUsageCountedSessionTranscriptFileName,
listSessionFilesForAgent,
sessionPathForFile,
} from "openclaw/plugin-sdk/memory-core-host-engine-qmd";
@@ -508,24 +506,6 @@ export abstract class MemoryManagerSyncOps {
this.sessionPendingFiles.clear();
let shouldSync = false;
for (const sessionFile of pending) {
// Usage-counted session archives (`.jsonl.reset.<iso>` and
// `.jsonl.deleted.<iso>`) are one-shot mutation events: the file is
// written once by the archive rotation and then never touched again.
// They carry no incremental `append` semantics, so the delta-bytes /
// delta-messages thresholds (designed for live transcripts accumulating
// appended messages) cannot gate them correctly — a short archive
// below the threshold would simply never reindex. Mark them dirty
// directly and skip the delta accounting.
const baseName = path.basename(sessionFile);
if (
isSessionArchiveArtifactName(baseName) &&
isUsageCountedSessionTranscriptFileName(baseName)
) {
this.sessionsDirtyFiles.add(sessionFile);
this.sessionsDirty = true;
shouldSync = true;
continue;
}
const delta = await this.updateSessionDelta(sessionFile);
if (!delta) {
continue;

View File

@@ -49,7 +49,7 @@ describe("filterMemorySearchHitsBySessionVisibility", () => {
sandboxed: false,
hits,
});
expect(filtered).toStrictEqual([]);
expect(filtered).toEqual([]);
});
it("keeps non-session hits unchanged", async () => {
@@ -148,59 +148,6 @@ describe("filterMemorySearchHitsBySessionVisibility", () => {
sandboxed: false,
hits: [hit],
});
expect(filtered).toStrictEqual([]);
});
it("keeps same-agent deleted archive hits using owner metadata when the live store entry is gone", async () => {
combinedSessionStore = {};
const hit: MemorySearchResult = {
path: "sessions/main/deleted-stem.jsonl.deleted.2026-02-16T22-27-33.000Z",
source: "sessions",
score: 1,
snippet: "x",
startLine: 1,
endLine: 2,
};
const cfg = asOpenClawConfig({
tools: {
sessions: { visibility: "agent" },
},
});
const filtered = await filterMemorySearchHitsBySessionVisibility({
cfg,
requesterSessionKey: "agent:main:main",
sandboxed: false,
hits: [hit],
});
expect(filtered).toEqual([hit]);
});
it("still denies cross-agent deleted archive hits resolved from owner metadata when a2a is disabled", async () => {
combinedSessionStore = {};
const hit: MemorySearchResult = {
path: "sessions/peer/deleted-stem.jsonl.deleted.2026-02-16T22-27-33.000Z",
source: "sessions",
score: 1,
snippet: "x",
startLine: 1,
endLine: 2,
};
const cfg = asOpenClawConfig({
tools: {
sessions: { visibility: "all" },
agentToAgent: { enabled: false },
},
});
const filtered = await filterMemorySearchHitsBySessionVisibility({
cfg,
requesterSessionKey: "agent:main:main",
sandboxed: false,
hits: [hit],
});
expect(filtered).toStrictEqual([]);
expect(filtered).toEqual([]);
});
});

View File

@@ -49,9 +49,6 @@ export async function filterMemorySearchHitsBySessionVisibility(params: {
const keys = resolveTranscriptStemToSessionKeys({
store: combinedSessionStore,
stem: identity.stem,
...(identity.archived && identity.ownerAgentId
? { archivedOwnerAgentId: identity.ownerAgentId }
: {}),
});
if (keys.length === 0) {
continue;

View File

@@ -13,7 +13,6 @@ export {
type SessionTranscriptClassification,
} from "./host/session-files.js";
export {
isSessionArchiveArtifactName,
isUsageCountedSessionTranscriptFileName,
parseUsageCountedSessionIdFromFileName,
} from "./host/openclaw-runtime-session.js";

View File

@@ -7,7 +7,6 @@ export {
isCronRunSessionKey,
isExecCompletionEvent,
isHeartbeatUserMessage,
isSessionArchiveArtifactName,
isSilentReplyPayloadText,
isUsageCountedSessionTranscriptFileName,
onSessionTranscriptUpdate,

View File

@@ -50,7 +50,6 @@ export type { OpenClawConfig } from "../../../../src/config/config.js";
export { resolveStateDir } from "../../../../src/config/paths.js";
export {
isCompactionCheckpointTranscriptFileName,
isSessionArchiveArtifactName,
isUsageCountedSessionTranscriptFileName,
parseUsageCountedSessionIdFromFileName,
} from "../../../../src/config/sessions/artifacts.js";

View File

@@ -45,25 +45,18 @@ function requireSessionEntry(entry: SessionFileEntry | null): SessionFileEntry {
}
describe("listSessionFilesForAgent", () => {
it("includes reset and deleted transcripts in session file listing", async () => {
it("includes primary transcripts in session file listing", async () => {
const sessionsDir = path.join(tmpDir, "agents", "main", "sessions");
fsSync.mkdirSync(path.join(sessionsDir, "archive"), { recursive: true });
const included = [
"active.jsonl",
"active.jsonl.reset.2026-02-16T22-26-33.000Z",
"active.jsonl.deleted.2026-02-16T22-27-33.000Z",
];
const included = ["active.jsonl"];
const excluded = ["active.jsonl.bak.2026-02-16T22-28-33.000Z", "sessions.json", "notes.md"];
excluded.push("active.checkpoint.11111111-1111-4111-8111-111111111111.jsonl");
for (const fileName of [...included, ...excluded]) {
fsSync.writeFileSync(path.join(sessionsDir, fileName), "");
}
fsSync.writeFileSync(
path.join(sessionsDir, "archive", "nested.jsonl.deleted.2026-02-16T22-29-33.000Z"),
"",
);
fsSync.writeFileSync(path.join(sessionsDir, "archive", "nested.jsonl"), "");
const files = await listSessionFilesForAgent("main");
@@ -75,17 +68,9 @@ describe("listSessionFilesForAgent", () => {
describe("sessionPathForFile", () => {
it("includes the owning agent id when the transcript lives under an agent sessions dir", () => {
const absPath = path.join(
tmpDir,
"agents",
"main",
"sessions",
"deleted-session.jsonl.deleted.2026-02-16T22-27-33.000Z",
);
const absPath = path.join(tmpDir, "agents", "main", "sessions", "active-session.jsonl");
expect(sessionPathForFile(absPath)).toBe(
"sessions/main/deleted-session.jsonl.deleted.2026-02-16T22-27-33.000Z",
);
expect(sessionPathForFile(absPath)).toBe("sessions/main/active-session.jsonl");
});
it("keeps the legacy basename-only path when the agent owner cannot be derived", () => {
@@ -143,13 +128,10 @@ describe("buildSessionEntry", () => {
const entry = requireSessionEntry(await buildSessionEntry(filePath));
expect(entry.content).toBe("");
expect(entry.lineMap).toStrictEqual([]);
expect(entry.lineMap).toEqual([]);
});
it("indexes usage-counted reset/deleted archives but still skips bak and checkpoint artifacts", async () => {
const resetPath = path.join(tmpDir, "ordinary.jsonl.reset.2026-02-16T22-26-33.000Z");
const deletedPath = path.join(tmpDir, "ordinary.jsonl.deleted.2026-02-16T22-27-33.000Z");
const bakPath = path.join(tmpDir, "ordinary.jsonl.bak.2026-02-16T22-28-33.000Z");
it("skips checkpoint artifacts so snapshots do not double-index session content", async () => {
const checkpointPath = path.join(
tmpDir,
"ordinary.checkpoint.11111111-1111-4111-8111-111111111111.jsonl",
@@ -158,29 +140,12 @@ describe("buildSessionEntry", () => {
type: "message",
message: { role: "user", content: "Archived hello" },
});
fsSync.writeFileSync(resetPath, content);
fsSync.writeFileSync(deletedPath, content);
fsSync.writeFileSync(bakPath, content);
fsSync.writeFileSync(checkpointPath, content);
const resetEntry = requireSessionEntry(await buildSessionEntry(resetPath));
const deletedEntry = requireSessionEntry(await buildSessionEntry(deletedPath));
const bakEntry = requireSessionEntry(await buildSessionEntry(bakPath));
const checkpointEntry = requireSessionEntry(await buildSessionEntry(checkpointPath));
// Usage-counted archives (reset, deleted) must surface real content so
// post-reset memory_search can recover prior session history.
expect(resetEntry.content).toContain("User: Archived hello");
expect(resetEntry.lineMap).toEqual([1]);
expect(deletedEntry.content).toContain("User: Archived hello");
expect(deletedEntry.lineMap).toEqual([1]);
// .bak and compaction checkpoints remain opaque pre-archive / snapshot
// artifacts and stay empty so they do not get double-indexed.
expect(bakEntry.content).toBe("");
expect(bakEntry.lineMap).toStrictEqual([]);
expect(checkpointEntry.content).toBe("");
expect(checkpointEntry.lineMap).toStrictEqual([]);
expect(checkpointEntry.lineMap).toEqual([]);
});
it("keeps cron-run deleted archives opaque when the live session store entry is gone", async () => {
@@ -203,7 +168,7 @@ describe("buildSessionEntry", () => {
const entry = requireSessionEntry(await buildSessionEntry(archivePath));
expect(entry.content).toBe("");
expect(entry.lineMap).toStrictEqual([]);
expect(entry.lineMap).toEqual([]);
expect(entry.generatedByCronRun).toBe(true);
});
@@ -224,7 +189,7 @@ describe("buildSessionEntry", () => {
const entry = requireSessionEntry(await buildSessionEntry(archivePath));
expect(entry.content).toBe("");
expect(entry.lineMap).toStrictEqual([]);
expect(entry.lineMap).toEqual([]);
expect(entry.generatedByCronRun).toBe(true);
});

View File

@@ -12,7 +12,6 @@ import {
isCronRunSessionKey,
isExecCompletionEvent,
isHeartbeatUserMessage,
isSessionArchiveArtifactName,
isSilentReplyPayloadText,
isUsageCountedSessionTranscriptFileName,
parseUsageCountedSessionIdFromFileName,
@@ -73,29 +72,9 @@ function shouldSkipTranscriptFileForDreaming(absPath: string): boolean {
if (isCompactionCheckpointTranscriptFileName(fileName)) {
return true;
}
// Legacy backups and `.jsonl.bak.<iso>` rotations are opaque pre-archive
// copies, not a user-facing session artifact; skip them too.
if (
isSessionArchiveArtifactName(fileName) &&
!isUsageCountedSessionTranscriptFileName(fileName)
) {
return true;
}
// Usage-counted archives (`.jsonl.reset.<iso>` / `.jsonl.deleted.<iso>`) are
// the rotated-but-retained copies of real sessions and must stay indexed so
// `memory_search` can surface hits on post-reset / post-delete history.
return false;
}
function isUsageCountedSessionArchiveTranscriptPath(absPath: string): boolean {
const fileName = path.basename(absPath);
return (
isUsageCountedSessionTranscriptFileName(fileName) &&
isSessionArchiveArtifactName(fileName) &&
parseUsageCountedSessionIdFromFileName(fileName) !== null
);
}
function isDreamingNarrativeBootstrapRecord(record: unknown): boolean {
if (!record || typeof record !== "object" || Array.isArray(record)) {
return false;
@@ -280,15 +259,8 @@ function classifySessionTranscriptFromSessionStore(absPath: string): {
} {
const sessionsDir = path.dirname(absPath);
const normalizedAbsPath = normalizeComparablePath(absPath);
const primarySessionId = parseUsageCountedSessionIdFromFileName(path.basename(absPath));
const normalizedPrimaryPath =
primarySessionId && isSessionArchiveArtifactName(path.basename(absPath))
? normalizeComparablePath(path.join(sessionsDir, `${primarySessionId}.jsonl`))
: null;
const classification = loadSessionTranscriptClassificationForSessionsDir(sessionsDir);
const hasClassifiedPath = (paths: ReadonlySet<string>) =>
paths.has(normalizedAbsPath) ||
(normalizedPrimaryPath !== null && paths.has(normalizedPrimaryPath));
const hasClassifiedPath = (paths: ReadonlySet<string>) => paths.has(normalizedAbsPath);
return {
generatedByDreamingNarrative: hasClassifiedPath(
classification.dreamingNarrativeTranscriptPaths,
@@ -632,16 +604,6 @@ export async function buildSessionEntry(
if (rawText === null) {
continue;
}
if (
!generatedByCronRun &&
allowArchiveContentCronClassification &&
isGeneratedCronPromptMessage(normalizeSessionText(rawText), message.role)
) {
generatedByCronRun = true;
collected.length = 0;
lineMap.length = 0;
messageTimestampsMs.length = 0;
}
const text = sanitizeSessionText(rawText, message.role);
if (!text) {
// Assistant-side machinery (silent replies, system wrappers) is already

View File

@@ -725,11 +725,6 @@ export async function loadCompactHooksHarness(): Promise<{
),
}));
vi.doMock("./session-manager-cache.js", () => ({
prewarmSessionFile: vi.fn(async () => {}),
trackSessionManagerAccess: vi.fn(),
}));
vi.doMock("./system-prompt.js", () => ({
applySystemPromptOverrideToSession: vi.fn(),
buildEmbeddedSystemPrompt: vi.fn(() => ""),

View File

@@ -122,8 +122,13 @@ export async function compactEmbeddedPiSession(
// Fire before_compaction / after_compaction hooks here so plugin subscribers
// are notified regardless of which engine is active.
const engineOwnsCompaction = contextEngine.info.ownsCompaction === true;
const { sessionAgentId } = resolveSessionAgentIds({
sessionKey: params.sessionKey,
config: params.config,
});
checkpointSnapshot = engineOwnsCompaction
? await captureCompactionCheckpointSnapshotAsync({
agentId: sessionAgentId,
sessionFile: params.sessionFile,
})
: null;
@@ -131,10 +136,6 @@ export async function compactEmbeddedPiSession(
? asCompactionHookRunner(getGlobalHookRunner())
: null;
const hookSessionKey = params.sessionKey?.trim() || params.sessionId;
const { sessionAgentId } = resolveSessionAgentIds({
sessionKey: params.sessionKey,
config: params.config,
});
const resolvedMessageProvider = params.messageChannel ?? params.messageProvider;
const hookCtx = {
sessionId: params.sessionId,

View File

@@ -144,7 +144,6 @@ import { readPiModelContextTokens } from "./model-context-tokens.js";
import { resolveModelAsync } from "./model.js";
import { sanitizeSessionHistory, validateReplayTurns } from "./replay-history.js";
import { buildEmbeddedSandboxInfo } from "./sandbox-info.js";
import { prewarmSessionFile, trackSessionManagerAccess } from "./session-manager-cache.js";
import { resolveEmbeddedRunSkillEntries } from "./skills-runtime.js";
import {
resolveEmbeddedAgentBaseStreamFn,
@@ -963,7 +962,6 @@ async function compactEmbeddedPiSessionDirectOnce(
debug: (message) => log.debug(message),
warn: (message) => log.warn(message),
});
await prewarmSessionFile(params.sessionFile);
const transcriptPolicy = runtimePlan.transcript.resolvePolicy(runtimePlanModelContext);
const sessionManager = guardSessionManager(
openTranscriptSessionManager({
@@ -987,11 +985,11 @@ async function compactEmbeddedPiSessionDirectOnce(
},
);
checkpointSnapshot = await captureCompactionCheckpointSnapshotAsync({
agentId: sessionAgentId,
sessionManager,
sessionFile: params.sessionFile,
});
compactionSessionManager = sessionManager;
trackSessionManagerAccess(params.sessionFile);
const settingsManager = createPreparedEmbeddedPiSettingsManager({
cwd: effectiveWorkspace,
agentDir,

View File

@@ -429,11 +429,6 @@ vi.mock("../../session-file-repair.js", () => ({
repairSessionFileIfNeeded: async () => {},
}));
vi.mock("../session-manager-cache.js", () => ({
prewarmSessionFile: async () => {},
trackSessionManagerAccess: () => {},
}));
vi.mock("../../session-write-lock.js", () => ({
acquireSessionWriteLock: (params: Parameters<AcquireSessionWriteLockFn>[0]) =>
hoisted.acquireSessionWriteLockMock(params),

View File

@@ -11,6 +11,7 @@ import {
runQuotaSuspensionMaintenance,
updateSessionStoreEntry,
} from "../../../config/sessions/store.js";
import { hasSqliteSessionTranscriptEvents } from "../../../config/sessions/transcript-store.sqlite.js";
import { resolveContextEngineOwnerPluginId } from "../../../context-engine/registry.js";
import type { AssembleResult } from "../../../context-engine/types.js";
import { emitTrustedDiagnosticEvent } from "../../../infra/diagnostic-events.js";
@@ -230,7 +231,6 @@ import {
updateActiveEmbeddedRunSnapshot,
} from "../runs.js";
import { buildEmbeddedSandboxInfo } from "../sandbox-info.js";
import { prewarmSessionFile, trackSessionManagerAccess } from "../session-manager-cache.js";
import { resolveEmbeddedRunSkillEntries } from "../skills-runtime.js";
import {
describeEmbeddedAgentStreamStrategy,
@@ -1621,10 +1621,10 @@ export async function runEmbeddedAttempt(
debug: (message) => log.debug(message),
warn: (message) => log.warn(message),
});
const hadSessionFile = await fs
.stat(params.sessionFile)
.then(() => true)
.catch(() => false);
const hadSessionFile = hasSqliteSessionTranscriptEvents({
agentId: sessionAgentId,
sessionId: params.sessionId,
});
const transcriptPolicy = resolveAttemptTranscriptPolicy({
runtimePlan: params.runtimePlan,
@@ -1635,7 +1635,6 @@ export async function runEmbeddedAttempt(
env: process.env,
});
await prewarmSessionFile(params.sessionFile);
sessionManager = guardSessionManager(
openTranscriptSessionManager({
sessionFile: params.sessionFile,
@@ -1662,8 +1661,6 @@ export async function runEmbeddedAttempt(
},
},
);
trackSessionManagerAccess(params.sessionFile);
await runAttemptContextEngineBootstrap({
hadSessionFile,
contextEngine: activeContextEngine,
@@ -2470,7 +2467,7 @@ export async function runEmbeddedAttempt(
agentId: sessionAgentId,
});
await runQuotaSuspensionMaintenance({ storePath });
const store = loadSessionStore(storePath, { skipCache: true });
const store = loadSessionStore(storePath);
const sessionEntry = store[params.sessionKey];
const suspension = sessionEntry?.quotaSuspension;
if (suspension?.state === "resuming") {

View File

@@ -1,31 +0,0 @@
import { describe, expect, it } from "vitest";
import { createSessionManagerCache } from "./session-manager-cache.js";
describe("session manager cache", () => {
it("prunes expired entries during later cache activity even without revisiting them", () => {
let now = 1_000;
const cache = createSessionManagerCache({
clock: () => now,
ttlMs: 5_000,
});
cache.trackSessionManagerAccess("/tmp/stale-session.jsonl");
expect(cache.keys()).toEqual(["/tmp/stale-session.jsonl"]);
now = 7_000;
cache.trackSessionManagerAccess("/tmp/fresh-session.jsonl");
expect(cache.keys()).toEqual(["/tmp/fresh-session.jsonl"]);
});
it("can disable caching via the injected TTL resolver", () => {
const cache = createSessionManagerCache({
ttlMs: 0,
});
cache.trackSessionManagerAccess("/tmp/session.jsonl");
expect(cache.isSessionManagerCached("/tmp/session.jsonl")).toBe(false);
expect(cache.keys()).toStrictEqual([]);
});
});

View File

@@ -1,93 +0,0 @@
import { Buffer } from "node:buffer";
import fs from "node:fs/promises";
import {
createExpiringMapCache,
isCacheEnabled,
resolveCacheTtlMs,
} from "../../config/cache-utils.js";
const DEFAULT_SESSION_MANAGER_TTL_MS = 45_000; // 45 seconds
const MIN_SESSION_MANAGER_CACHE_PRUNE_INTERVAL_MS = 1_000;
const MAX_SESSION_MANAGER_CACHE_PRUNE_INTERVAL_MS = 30_000;
function getSessionManagerTtl(): number {
return resolveCacheTtlMs({
envValue: process.env.OPENCLAW_SESSION_MANAGER_CACHE_TTL_MS,
defaultTtlMs: DEFAULT_SESSION_MANAGER_TTL_MS,
});
}
function resolveSessionManagerCachePruneInterval(ttlMs: number): number {
return Math.min(
Math.max(ttlMs, MIN_SESSION_MANAGER_CACHE_PRUNE_INTERVAL_MS),
MAX_SESSION_MANAGER_CACHE_PRUNE_INTERVAL_MS,
);
}
export type SessionManagerCache = {
clear: () => void;
isSessionManagerCached: (sessionFile: string) => boolean;
keys: () => string[];
prewarmSessionFile: (sessionFile: string) => Promise<void>;
trackSessionManagerAccess: (sessionFile: string) => void;
};
export function createSessionManagerCache(options?: {
clock?: () => number;
fsModule?: Pick<typeof fs, "open">;
ttlMs?: number | (() => number);
}): SessionManagerCache {
const getTtlMs = () =>
typeof options?.ttlMs === "function"
? options.ttlMs()
: (options?.ttlMs ?? getSessionManagerTtl());
const cache = createExpiringMapCache<string, true>({
ttlMs: getTtlMs,
pruneIntervalMs: resolveSessionManagerCachePruneInterval,
clock: options?.clock,
});
const fsModule = options?.fsModule ?? fs;
return {
clear: () => {
cache.clear();
},
isSessionManagerCached: (sessionFile) => cache.get(sessionFile) === true,
keys: () => cache.keys(),
prewarmSessionFile: async (sessionFile) => {
if (!isCacheEnabled(getTtlMs())) {
return;
}
if (cache.get(sessionFile) === true) {
return;
}
try {
// Read a small chunk to encourage OS page cache warmup.
const handle = await fsModule.open(sessionFile, "r");
try {
const buffer = Buffer.alloc(4096);
await handle.read(buffer, 0, buffer.length, 0);
} finally {
await handle.close();
}
cache.set(sessionFile, true);
} catch {
// File doesn't exist yet, SessionManager will create it
}
},
trackSessionManagerAccess: (sessionFile) => {
cache.set(sessionFile, true);
},
};
}
const sessionManagerCache = createSessionManagerCache();
export function trackSessionManagerAccess(sessionFile: string): void {
sessionManagerCache.trackSessionManagerAccess(sessionFile);
}
export async function prewarmSessionFile(sessionFile: string): Promise<void> {
await sessionManagerCache.prewarmSessionFile(sessionFile);
}

View File

@@ -1,4 +1,3 @@
import fs from "node:fs/promises";
import {
hasConfiguredModelFallbacks,
resolveAgentConfig,
@@ -22,8 +21,11 @@ import {
type SessionEntry,
updateSessionStoreEntry,
} from "../../config/sessions.js";
import {
hasSqliteSessionTranscriptEvents,
loadSqliteSessionTranscriptEvents,
} from "../../config/sessions/transcript-store.sqlite.js";
import type { TypingMode } from "../../config/types.js";
import { resolveSessionTranscriptCandidates } from "../../gateway/session-utils.fs.js";
import { logVerbose } from "../../globals.js";
import { emitAgentEvent } from "../../infra/agent-events.js";
import { emitTrustedDiagnosticEvent, isDiagnosticsEnabled } from "../../infra/diagnostic-events.js";
@@ -517,9 +519,8 @@ function formatContextManagementTraceBlock(
}
async function accumulateSessionUsageFromTranscript(params: {
agentId?: string;
sessionId?: string;
storePath?: string;
sessionFile?: string;
}): Promise<
| {
input?: number;
@@ -535,30 +536,20 @@ async function accumulateSessionUsageFromTranscript(params: {
return undefined;
}
try {
const candidates = resolveSessionTranscriptCandidates(
sessionId,
params.storePath,
params.sessionFile,
);
let transcriptText: string | undefined;
for (const candidate of candidates) {
try {
transcriptText = await fs.readFile(candidate, "utf-8");
break;
} catch {
continue;
}
}
if (!transcriptText) {
const agentId = normalizeOptionalString(params.agentId);
if (!agentId || !hasSqliteSessionTranscriptEvents({ agentId, sessionId })) {
return undefined;
}
const transcriptLines = loadSqliteSessionTranscriptEvents({ agentId, sessionId }).map((entry) =>
JSON.stringify(entry.event),
);
let input = 0;
let output = 0;
let cacheRead = 0;
let cacheWrite = 0;
let sawUsage = false;
for (const line of transcriptText.split(/\r?\n/)) {
for (const line of transcriptLines) {
if (!line.trim()) {
continue;
}
@@ -1838,9 +1829,8 @@ export async function runReplyAgent(params: {
const sessionUsage =
traceAuthorized && activeSessionEntry?.traceLevel === "raw"
? await accumulateSessionUsageFromTranscript({
agentId: followupRun.run.agentId,
sessionId: runResult.meta?.agentMeta?.sessionId ?? followupRun.run.sessionId,
storePath,
sessionFile: followupRun.run.sessionFile,
})
: undefined;
const traceEnabledForSender =

View File

@@ -2,11 +2,6 @@ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { HookRunner } from "../../plugins/hooks.js";
import type { HandleCommandsParams } from "./commands-types.js";
const fsMocks = vi.hoisted(() => ({
readFile: vi.fn(),
readdir: vi.fn(),
}));
const hookRunnerMocks = vi.hoisted(() => ({
hasHooks: vi.fn<HookRunner["hasHooks"]>(),
runBeforeReset: vi.fn<HookRunner["runBeforeReset"]>(),
@@ -17,20 +12,6 @@ const sqliteTranscriptMocks = vi.hoisted(() => ({
hasSqliteSessionTranscriptEvents: vi.fn(() => false),
}));
vi.mock("node:fs/promises", async () => {
const actual = await vi.importActual<typeof import("node:fs/promises")>("node:fs/promises");
return {
...actual,
default: {
...actual,
readFile: fsMocks.readFile,
readdir: fsMocks.readdir,
},
readFile: fsMocks.readFile,
readdir: fsMocks.readdir,
};
});
vi.mock("../../config/sessions/transcript-store.sqlite.js", () => ({
exportSqliteSessionTranscriptJsonl: sqliteTranscriptMocks.exportSqliteSessionTranscriptJsonl,
hasSqliteSessionTranscriptEvents: sqliteTranscriptMocks.hasSqliteSessionTranscriptEvents,
@@ -75,14 +56,10 @@ describe("emitResetCommandHooks", () => {
}
beforeEach(() => {
fsMocks.readFile.mockReset();
fsMocks.readdir.mockReset();
hookRunnerMocks.hasHooks.mockReset();
hookRunnerMocks.runBeforeReset.mockReset();
hookRunnerMocks.hasHooks.mockImplementation((hookName) => hookName === "before_reset");
hookRunnerMocks.runBeforeReset.mockResolvedValue(undefined);
fsMocks.readFile.mockResolvedValue("");
fsMocks.readdir.mockResolvedValue([]);
sqliteTranscriptMocks.exportSqliteSessionTranscriptJsonl.mockReturnValue("");
sqliteTranscriptMocks.hasSqliteSessionTranscriptEvents.mockReturnValue(false);
});
@@ -121,16 +98,7 @@ describe("emitResetCommandHooks", () => {
});
});
it("recovers the archived transcript when the original reset transcript path is gone", async () => {
fsMocks.readFile.mockRejectedValueOnce(Object.assign(new Error("ENOENT"), { code: "ENOENT" }));
fsMocks.readdir.mockResolvedValueOnce(["prev-session.jsonl.reset.2026-02-16T22-26-33.000Z"]);
fsMocks.readFile.mockResolvedValueOnce(
`${JSON.stringify({
type: "message",
id: "m1",
message: { role: "user", content: "Recovered from archive" },
})}\n`,
);
it("fires before_reset with empty messages when no scoped SQLite transcript exists", async () => {
const command = {
surface: "telegram",
senderId: "vac",
@@ -156,8 +124,8 @@ describe("emitResetCommandHooks", () => {
await vi.waitFor(() => expect(hookRunnerMocks.runBeforeReset).toHaveBeenCalledTimes(1));
expect(hookRunnerMocks.runBeforeReset).toHaveBeenCalledWith(
expect.objectContaining({
sessionFile: "/tmp/prev-session.jsonl.reset.2026-02-16T22-26-33.000Z",
messages: [{ role: "user", content: "Recovered from archive" }],
sessionFile: "/tmp/prev-session.jsonl",
messages: [],
reason: "new",
}),
expect.objectContaining({
@@ -210,7 +178,6 @@ describe("emitResetCommandHooks", () => {
agentId: "target",
sessionId: "prev-session",
});
expect(fsMocks.readFile).not.toHaveBeenCalled();
expect(hookRunnerMocks.runBeforeReset).toHaveBeenCalledWith(
expect.objectContaining({
sessionFile: "/tmp/prev-session.jsonl",

View File

@@ -1,5 +1,3 @@
import fs from "node:fs/promises";
import path from "node:path";
import {
exportSqliteSessionTranscriptJsonl,
hasSqliteSessionTranscriptEvents,
@@ -37,21 +35,6 @@ function parseTranscriptMessages(content: string): unknown[] {
return messages;
}
async function findLatestArchivedTranscript(sessionFile: string): Promise<string | undefined> {
try {
const dir = path.dirname(sessionFile);
const base = path.basename(sessionFile);
const resetPrefix = `${base}.reset.`;
const archived = (await fs.readdir(dir))
.filter((name) => name.startsWith(resetPrefix))
.toSorted();
const latest = archived[archived.length - 1];
return latest ? path.join(dir, latest) : undefined;
} catch {
return undefined;
}
}
type BeforeResetTranscriptScope = {
agentId?: string;
sessionFile?: string;
@@ -105,45 +88,10 @@ async function loadBeforeResetTranscript(params: {
return scopedTranscript;
}
const sessionFile = params.sessionFile;
if (!sessionFile) {
logVerbose("before_reset: no session file available, firing hook with empty messages");
return { sessionFile, messages: [] };
}
try {
return {
sessionFile,
messages: parseTranscriptMessages(await fs.readFile(sessionFile, "utf-8")),
};
} catch (err: unknown) {
if ((err as { code?: unknown })?.code !== "ENOENT") {
logVerbose(
`before_reset: failed to read session file ${sessionFile}; firing hook with empty messages (${String(err)})`,
);
return { sessionFile, messages: [] };
}
}
const archivedSessionFile = await findLatestArchivedTranscript(sessionFile);
if (!archivedSessionFile) {
logVerbose(
`before_reset: failed to find archived transcript for ${sessionFile}; firing hook with empty messages`,
);
return { sessionFile, messages: [] };
}
try {
return {
sessionFile: archivedSessionFile,
messages: parseTranscriptMessages(await fs.readFile(archivedSessionFile, "utf-8")),
};
} catch (err: unknown) {
logVerbose(
`before_reset: failed to read archived session file ${archivedSessionFile}; firing hook with empty messages (${String(err)})`,
);
return { sessionFile: archivedSessionFile, messages: [] };
}
logVerbose(
"before_reset: no scoped SQLite transcript available, firing hook with empty messages",
);
return { sessionFile: params.sessionFile, messages: [] };
}
export async function emitResetCommandHooks(params: {

View File

@@ -3,7 +3,8 @@ import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../../config/config.js";
import type { SessionEntry } from "../../config/sessions.js";
import { saveSessionStore, type SessionEntry } from "../../config/sessions.js";
import { replaceSqliteSessionTranscriptEvents } from "../../config/sessions/transcript-store.sqlite.js";
import type { HookRunner } from "../../plugins/hooks.js";
import { initSessionState } from "./session.js";
@@ -69,8 +70,7 @@ async function writeStore(
storePath: string,
store: Record<string, SessionEntry | Record<string, unknown>>,
): Promise<void> {
await fs.mkdir(path.dirname(storePath), { recursive: true });
await fs.writeFile(storePath, JSON.stringify(store), "utf-8");
await saveSessionStore(storePath, store as Record<string, SessionEntry>);
}
async function writeTranscript(
@@ -79,15 +79,18 @@ async function writeTranscript(
text = "hello",
): Promise<string> {
const transcriptPath = path.join(path.dirname(storePath), `${sessionId}.jsonl`);
await fs.writeFile(
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId,
transcriptPath,
`${JSON.stringify({
type: "message",
id: `${sessionId}-m1`,
message: { role: "user", content: text },
})}\n`,
"utf-8",
);
events: [
{
type: "message",
id: `${sessionId}-m1`,
message: { role: "user", content: text },
},
],
});
return transcriptPath;
}
@@ -183,7 +186,7 @@ describe("session hook context wiring", () => {
it("passes sessionKey to session_end hook context on reset", async () => {
const sessionKey = "agent:main:telegram:direct:123";
const { storePath } = await createStoredSession({
const { storePath, transcriptPath } = await createStoredSession({
prefix: "openclaw-session-hook-end",
sessionKey,
sessionId: "old-session",
@@ -202,10 +205,9 @@ describe("session hook context wiring", () => {
expectFields(event, {
sessionKey,
reason: "new",
transcriptArchived: true,
});
expectFields(context, { sessionKey, agentId: "main", sessionId: event?.sessionId });
expect(event?.sessionFile).toContain(".jsonl.reset.");
expect(event?.sessionFile).toBe(transcriptPath);
const [startEvent, startContext] = hookRunnerMocks.runSessionStart.mock.calls[0] ?? [];
expectFields(startEvent, { resumedFrom: "old-session" });
@@ -258,7 +260,7 @@ describe("session hook context wiring", () => {
expectFields(event, { reason: "new" });
});
it("marks daily stale rollovers and exposes the archived transcript path", async () => {
it("marks daily stale rollovers and exposes the stable transcript path", async () => {
vi.useFakeTimers();
try {
vi.setSystemTime(new Date(2026, 0, 18, 5, 0, 0));
@@ -275,9 +277,8 @@ describe("session hook context wiring", () => {
const [startEvent] = hookRunnerMocks.runSessionStart.mock.calls[0] ?? [];
expectFields(event, {
reason: "daily",
transcriptArchived: true,
});
expect(event?.sessionFile).toContain(".jsonl.reset.");
expect(event?.sessionFile).toContain("daily-session.jsonl");
expect(event?.nextSessionId).toBe(startEvent?.sessionId);
} finally {
vi.useRealTimers();

View File

@@ -55,7 +55,6 @@ export function buildSessionEndHookPayload(params: {
durationMs?: number;
reason?: PluginHookSessionEndReason;
sessionFile?: string;
transcriptArchived?: boolean;
nextSessionId?: string;
nextSessionKey?: string;
}): {
@@ -70,7 +69,6 @@ export function buildSessionEndHookPayload(params: {
durationMs: params.durationMs,
reason: params.reason,
sessionFile: params.sessionFile,
transcriptArchived: params.transcriptArchived,
nextSessionId: params.nextSessionId,
nextSessionKey: params.nextSessionKey,
},

View File

@@ -87,9 +87,8 @@ describe("session-updates lifecycle hooks", () => {
sessionId: "s1",
sessionKey,
reason: "compaction",
transcriptArchived: false,
});
expect(endEvent?.sessionFile).toBe(await fs.realpath(transcriptPath));
expect(endEvent?.sessionFile).toBe(path.resolve(transcriptPath));
expect(endContext).toMatchObject({
sessionId: "s1",
sessionKey,

View File

@@ -78,7 +78,6 @@ function emitCompactionSessionLifecycleHooks(params: {
cfg: params.cfg,
reason: "compaction",
sessionFile: transcript.sessionFile,
transcriptArchived: transcript.transcriptArchived,
nextSessionId: params.nextEntry.sessionId,
});
void hookRunner.runSessionEnd(payload.event, payload.context).catch((err) => {

View File

@@ -8,7 +8,11 @@ import {
getOrCreateSessionMcpRuntime,
} from "../../agents/pi-bundle-mcp-tools.js";
import type { OpenClawConfig } from "../../config/config.js";
import { saveSessionStore, type SessionEntry } from "../../config/sessions.js";
import { loadSessionStore, saveSessionStore, type SessionEntry } from "../../config/sessions.js";
import {
loadSqliteSessionTranscriptEvents,
replaceSqliteSessionTranscriptEvents,
} from "../../config/sessions/transcript-store.sqlite.js";
import { formatZonedTimestamp } from "../../infra/format-time/format-datetime.ts";
import {
__testing as sessionBindingTesting,
@@ -154,8 +158,11 @@ async function writeSessionStoreFast(
storePath: string,
store: Record<string, SessionEntry | Record<string, unknown>>,
): Promise<void> {
await fs.mkdir(path.dirname(storePath), { recursive: true });
await fs.writeFile(storePath, JSON.stringify(store), "utf-8");
await saveSessionStore(storePath, store as Record<string, SessionEntry>);
}
function readSessionStoreFast(storePath: string): Record<string, SessionEntry> {
return loadSessionStore(storePath);
}
function setMinimalCurrentConversationBindingRegistryForTests(): void {
@@ -2312,7 +2319,7 @@ describe("initSessionState preserves behavior overrides across /new and /reset",
expect(result.sessionEntry.cliSessionBindings).toBeUndefined();
expect(result.sessionEntry.claudeCliSessionId).toBeUndefined();
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored = readSessionStoreFast(storePath);
expect(stored[sessionKey].cliSessionIds).toBeUndefined();
expect(stored[sessionKey].cliSessionBindings).toBeUndefined();
expect(stored[sessionKey].claudeCliSessionId).toBeUndefined();
@@ -2548,7 +2555,7 @@ describe("initSessionState preserves behavior overrides across /new and /reset",
expect(result.sessionId).toBe(existingSessionId);
});
it("archives the old session store entry on /new", async () => {
it("deletes the old SQLite transcript on /new", async () => {
const storePath = await createStorePath("openclaw-archive-old-");
const sessionKey = "agent:main:telegram:dm:user-archive";
const existingSessionId = "existing-session-archive";
@@ -2557,9 +2564,14 @@ describe("initSessionState preserves behavior overrides across /new and /reset",
storePath,
sessionKey,
sessionId: existingSessionId,
overrides: { verboseLevel: "on" },
overrides: { sessionFile: transcriptPath, verboseLevel: "on" },
});
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: existingSessionId,
transcriptPath,
events: [{ type: "message" }],
});
await fs.writeFile(transcriptPath, '{"type":"message"}\n', "utf8");
const cfg = {
session: { store: storePath, idleMinutes: 999 },
@@ -2583,14 +2595,12 @@ describe("initSessionState preserves behavior overrides across /new and /reset",
expect(result.isNewSession).toBe(true);
expect(result.resetTriggered).toBe(true);
expect(await fs.stat(transcriptPath).catch(() => null)).toBeNull();
const archived = (await fs.readdir(path.dirname(storePath))).filter((entry) =>
entry.startsWith(`${existingSessionId}.jsonl.reset.`),
);
expect(archived).toHaveLength(1);
expect(
loadSqliteSessionTranscriptEvents({ agentId: "main", sessionId: existingSessionId }),
).toEqual([]);
});
it("archives the old session transcript on daily/scheduled reset (stale session)", async () => {
it("deletes the old SQLite transcript on daily/scheduled reset (stale session)", async () => {
// Daily resets occur when the session becomes stale (not via /new or /reset command).
// Previously, previousSessionEntry was only set when resetTriggered=true, leaving
// old transcript files orphaned on disk. Refs #35481.
@@ -2600,16 +2610,22 @@ describe("initSessionState preserves behavior overrides across /new and /reset",
vi.setSystemTime(new Date(2026, 0, 18, 5, 0, 0));
const storePath = await createStorePath("openclaw-stale-archive-");
const sessionKey = "agent:main:telegram:dm:archive-stale-user";
const existingSessionId = "stale-session-to-be-archived";
const existingSessionId = "stale-session-to-delete";
const transcriptPath = path.join(path.dirname(storePath), `${existingSessionId}.jsonl`);
await writeSessionStoreFast(storePath, {
[sessionKey]: {
sessionId: existingSessionId,
sessionFile: transcriptPath,
updatedAt: new Date(2026, 0, 18, 3, 0, 0).getTime(),
},
});
await fs.writeFile(transcriptPath, '{"type":"message"}\n', "utf8");
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: existingSessionId,
transcriptPath,
events: [{ type: "message" }],
});
const cfg = { session: { store: storePath } } as OpenClawConfig;
const result = await initSessionState({
@@ -2631,11 +2647,9 @@ describe("initSessionState preserves behavior overrides across /new and /reset",
expect(result.isNewSession).toBe(true);
expect(result.resetTriggered).toBe(false);
expect(result.sessionId).not.toBe(existingSessionId);
expect(await fs.stat(transcriptPath).catch(() => null)).toBeNull();
const archived = (await fs.readdir(path.dirname(storePath))).filter((entry) =>
entry.startsWith(`${existingSessionId}.jsonl.reset.`),
);
expect(archived).toHaveLength(1);
expect(
loadSqliteSessionTranscriptEvents({ agentId: "main", sessionId: existingSessionId }),
).toEqual([]);
} finally {
vi.useRealTimers();
}
@@ -2658,6 +2672,7 @@ describe("initSessionState preserves behavior overrides across /new and /reset",
await writeSessionStoreFast(storePath, {
[sessionKey]: {
sessionId: existingSessionId,
sessionFile: transcriptPath,
updatedAt: new Date(2026, 0, 18, 3, 0, 0).getTime(),
modelProvider: "claude-cli",
model: "claude-opus-4-6",
@@ -2670,7 +2685,12 @@ describe("initSessionState preserves behavior overrides across /new and /reset",
claudeCliSessionId: cliBinding.sessionId,
},
});
await fs.writeFile(transcriptPath, '{"type":"message"}\n', "utf8");
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: existingSessionId,
transcriptPath,
events: [{ type: "message" }],
});
const cfg = { session: { store: storePath } } as OpenClawConfig;
const result = await initSessionState({
@@ -2692,14 +2712,9 @@ describe("initSessionState preserves behavior overrides across /new and /reset",
expect(result.isNewSession).toBe(false);
expect(result.sessionId).toBe(existingSessionId);
expect(result.sessionEntry.cliSessionBindings?.["claude-cli"]).toEqual(cliBinding);
const transcriptStat = await fs.stat(transcriptPath).catch(() => null);
if (!transcriptStat) {
throw new Error("expected transcript file to remain after stale reset");
}
const archived = (await fs.readdir(path.dirname(storePath))).filter((entry) =>
entry.startsWith(`${existingSessionId}.jsonl.reset.`),
);
expect(archived).toHaveLength(0);
expect(
loadSqliteSessionTranscriptEvents({ agentId: "main", sessionId: existingSessionId }),
).toHaveLength(1);
} finally {
vi.useRealTimers();
}
@@ -2877,12 +2892,9 @@ describe("persistSessionUsageUpdate", () => {
sessionKey: string;
entry: Record<string, unknown>;
}) {
await fs.mkdir(path.dirname(params.storePath), { recursive: true });
await fs.writeFile(
params.storePath,
JSON.stringify({ [params.sessionKey]: params.entry }, null, 2),
"utf-8",
);
await writeSessionStoreFast(params.storePath, {
[params.sessionKey]: params.entry,
});
}
it("uses lastCallUsage for totalTokens when provided", async () => {
@@ -2905,7 +2917,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored = readSessionStoreFast(storePath);
expect(stored[sessionKey].totalTokens).toBe(12_000);
expect(stored[sessionKey].totalTokensFresh).toBe(true);
expect(stored[sessionKey].inputTokens).toBe(180_000);
@@ -2939,7 +2951,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored = readSessionStoreFast(storePath);
expect(stored[sessionKey].inputTokens).toBe(100_000);
expect(stored[sessionKey].outputTokens).toBe(8_000);
expect(stored[sessionKey].cacheRead).toBe(18_000);
@@ -2962,7 +2974,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored = readSessionStoreFast(storePath);
expect(stored[sessionKey].totalTokens).toBeUndefined();
expect(stored[sessionKey].totalTokensFresh).toBe(false);
});
@@ -2984,7 +2996,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored = readSessionStoreFast(storePath);
expect(stored[sessionKey].totalTokens).toBe(42_000);
expect(stored[sessionKey].totalTokensFresh).toBe(true);
});
@@ -3013,7 +3025,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored = readSessionStoreFast(storePath);
expect(stored[sessionKey].totalTokens).toBe(32_000);
expect(stored[sessionKey].totalTokensFresh).toBe(true);
expect(stored[sessionKey].cliSessionIds?.["claude-cli"]).toBe("cli-session-1");
@@ -3047,7 +3059,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored = readSessionStoreFast(storePath);
expect(stored[sessionKey].totalTokens).toBe(39_000);
expect(stored[sessionKey].totalTokensFresh).toBe(true);
expect(stored[sessionKey].inputTokens).toBe(1_234);
@@ -3071,7 +3083,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored = readSessionStoreFast(storePath);
expect(stored[sessionKey].totalTokens).toBe(250_000);
expect(stored[sessionKey].totalTokensFresh).toBe(true);
});
@@ -3122,7 +3134,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored1 = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored1 = readSessionStoreFast(storePath);
expect(stored1[sessionKey].estimatedCostUsd).toBeCloseTo(0.007725, 8);
// Second persist with SAME cumulative usage (e.g., heartbeat or redundant persist)
@@ -3139,7 +3151,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored2 = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored2 = readSessionStoreFast(storePath);
// Cost should still be $0.007725, NOT $0.01545
expect(stored2[sessionKey].estimatedCostUsd).toBeCloseTo(0.007725, 8);
});
@@ -3186,7 +3198,7 @@ describe("persistSessionUsageUpdate", () => {
contextTokensUsed: 200_000,
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
const stored = readSessionStoreFast(storePath);
expect(stored[sessionKey].estimatedCostUsd).toBe(0);
});
});
@@ -3285,10 +3297,7 @@ describe("initSessionState dmScope delivery migration", () => {
});
expect(result.sessionKey).toBe("agent:main:telegram:direct:6101296751");
const persisted = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
string,
SessionEntry
>;
const persisted = readSessionStoreFast(storePath);
expect(persisted["agent:main:main"]?.sessionId).toBe("legacy-main");
expect(persisted["agent:main:main"]?.deliveryContext).toBeUndefined();
expect(persisted["agent:main:main"]?.lastChannel).toBeUndefined();
@@ -3330,10 +3339,7 @@ describe("initSessionState dmScope delivery migration", () => {
commandAuthorized: true,
});
const persisted = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
string,
SessionEntry
>;
const persisted = readSessionStoreFast(storePath);
expect(persisted["agent:main:main"]?.deliveryContext).toEqual({
channel: "telegram",
to: "1111",
@@ -3397,10 +3403,7 @@ describe("initSessionState internal channel routing preservation", () => {
accountId: "default",
});
const persisted = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
string,
SessionEntry
>;
const persisted = readSessionStoreFast(storePath);
expect(persisted[sessionKey]?.lastThreadId).toBeUndefined();
expect(persisted[sessionKey]?.deliveryContext).toEqual({
channel: "mattermost",

View File

@@ -24,6 +24,7 @@ import { resolveAndPersistSessionFile } from "../../config/sessions/session-file
import { resolveSessionKey } from "../../config/sessions/session-key.js";
import { loadSessionStore, updateSessionStore } from "../../config/sessions/store.js";
import { parseSessionThreadInfoFast } from "../../config/sessions/thread-info.js";
import { deleteSqliteSessionTranscript } from "../../config/sessions/transcript-store.sqlite.js";
import {
DEFAULT_RESET_TRIGGERS,
type GroupKeyResolution,
@@ -32,6 +33,7 @@ import {
} from "../../config/sessions/types.js";
import type { OpenClawConfig } from "../../config/types.openclaw.js";
import type { TtsAutoMode } from "../../config/types.tts.js";
import { resolveStableSessionEndTranscript } from "../../gateway/session-transcript-files.fs.js";
import { getSessionBindingService } from "../../infra/outbound/session-binding-service.js";
import { createSubsystemLogger } from "../../logging/subsystem.js";
import { closeTrackedBrowserTabsForSessions } from "../../plugin-sdk/browser-maintenance.js";
@@ -39,7 +41,6 @@ import { getGlobalHookRunner } from "../../plugins/hook-runner-global.js";
import type { PluginHookSessionEndReason } from "../../plugins/hook-types.js";
import { isAcpSessionKey, normalizeMainKey } from "../../routing/session-key.js";
import { isInterSessionInputProvenance } from "../../sessions/input-provenance.js";
import { createLazyImportLoader } from "../../shared/lazy-promise.js";
import {
normalizeLowercaseStringOrEmpty,
normalizeOptionalLowercaseString,
@@ -64,13 +65,6 @@ import { buildSessionEndHookPayload, buildSessionStartHookPayload } from "./sess
import { clearSessionResetRuntimeState } from "./session-reset-cleanup.js";
const log = createSubsystemLogger("session-init");
const sessionArchiveRuntimeLoader = createLazyImportLoader(
() => import("../../gateway/session-archive.runtime.js"),
);
function loadSessionArchiveRuntime() {
return sessionArchiveRuntimeLoader.load();
}
function stripThreadIdFromDeliveryContext(
context: SessionEntry["deliveryContext"],
@@ -457,10 +451,8 @@ export async function initSessionState(params: {
(isSystemEvent && canReuseExistingEntry) ||
(entryFreshness?.fresh ?? false) ||
(softResetAllowed && canReuseExistingEntry);
// Capture the current session entry before any reset so its transcript can be
// archived afterward. We need to do this for both explicit resets (/new, /reset)
// and for scheduled/daily resets where the session has become stale (!freshEntry).
// Without this, daily-reset transcripts are left as orphaned files on disk (#35481).
// Capture the current session entry before any reset so hooks and cleanup can
// reference it. This covers explicit resets and scheduled/daily stale rollovers.
const previousSessionEntry = (resetTriggered || !freshEntry) && entry ? { ...entry } : undefined;
const previousSessionEndReason = resetTriggered
? resolveExplicitSessionEndReason(matchedResetTriggerLower)
@@ -785,27 +777,16 @@ export async function initSessionState(params: {
}
});
// Archive old transcript so it doesn't accumulate on disk (#14869).
// Resolve the previous transcript before rotating session metadata.
let previousSessionTranscript: {
sessionFile?: string;
transcriptArchived?: boolean;
} = {};
if (previousSessionEntry?.sessionId) {
const { archiveSessionTranscriptsDetailed, resolveStableSessionEndTranscript } =
await loadSessionArchiveRuntime();
const archivedTranscripts = archiveSessionTranscriptsDetailed({
sessionId: previousSessionEntry.sessionId,
storePath,
sessionFile: previousSessionEntry.sessionFile,
agentId,
reason: "reset",
});
previousSessionTranscript = resolveStableSessionEndTranscript({
sessionId: previousSessionEntry.sessionId,
storePath,
sessionFile: previousSessionEntry.sessionFile,
agentId,
archivedTranscripts,
});
await retireSessionMcpRuntime({
sessionId: previousSessionEntry.sessionId,
@@ -861,7 +842,6 @@ export async function initSessionState(params: {
cfg,
reason: previousSessionEndReason,
sessionFile: previousSessionTranscript.sessionFile,
transcriptArchived: previousSessionTranscript.transcriptArchived,
nextSessionId: effectiveSessionId,
});
void hookRunner.runSessionEnd(payload.event, payload.context).catch(() => {});
@@ -880,6 +860,19 @@ export async function initSessionState(params: {
}
}
if (
previousSessionEntry?.sessionId &&
previousSessionEntry.sessionId !== sessionId &&
!Object.values(loadSessionStore(storePath)).some(
(candidate) => candidate.sessionId === previousSessionEntry.sessionId,
)
) {
deleteSqliteSessionTranscript({
agentId,
sessionId: previousSessionEntry.sessionId,
});
}
return {
sessionCtx,
sessionEntry,

View File

@@ -6,7 +6,7 @@ import {
resolveOAuthDir,
resolveStateDir,
} from "../config/config.js";
import { formatSessionArchiveTimestamp } from "../config/sessions/artifacts.js";
import { formatFilesystemTimestamp } from "../config/sessions/artifacts.js";
import { pathExists, shortenHomePath } from "../utils.js";
import { buildCleanupPlan, isPathWithin } from "./cleanup-utils.js";
@@ -59,7 +59,7 @@ function backupAssetPriority(kind: BackupAssetKind): number {
}
export function buildBackupArchiveRoot(nowMs = Date.now()): string {
return `${formatSessionArchiveTimestamp(nowMs)}-openclaw-backup`;
return `${formatFilesystemTimestamp(nowMs)}-openclaw-backup`;
}
export function buildBackupArchiveBasename(nowMs = Date.now()): string {

View File

@@ -1,6 +1,6 @@
import fs from "node:fs";
import { isHeartbeatOkResponse, isHeartbeatUserMessage } from "../auto-reply/heartbeat-filter.js";
import { formatSessionArchiveTimestamp } from "../config/sessions/artifacts.js";
import { formatFilesystemTimestamp } from "../config/sessions/artifacts.js";
import { resolveMainSessionKey } from "../config/sessions/main-session.js";
import {
resolveSessionFilePath,
@@ -156,7 +156,7 @@ function resolveHeartbeatMainRecoveryKey(params: {
if (!parsed) {
return null;
}
const stamp = formatSessionArchiveTimestamp(params.nowMs).toLowerCase();
const stamp = formatFilesystemTimestamp(params.nowMs).toLowerCase();
const base = `agent:${parsed.agentId}:heartbeat-recovered-${stamp}`;
if (!params.store[base]) {
return base;
@@ -285,7 +285,7 @@ export async function repairHeartbeatPoisonedMainSession(params: {
entry: currentEntry,
transcriptPath,
});
if (!currentCandidate) {
if (!currentCandidate && currentEntry?.sessionId !== mainEntry.sessionId) {
return;
}
if (moveHeartbeatMainSessionEntry({ store: currentStore, mainKey, recoveredKey })) {

View File

@@ -10,6 +10,7 @@ vi.mock("../terminal/note.js", () => ({
}));
import { loadSqliteSessionTranscriptEvents } from "../config/sessions/transcript-store.sqlite.js";
import { readOpenClawStateKvJson } from "../state/openclaw-state-kv.js";
import {
noteSessionTranscriptHealth,
repairBrokenSessionTranscriptFile,
@@ -178,6 +179,36 @@ describe("doctor session transcript repair", () => {
expect(message).toContain("Imported 1 transcript file into SQLite");
});
it("imports legacy Codex app-server binding sidecars during repair mode", async () => {
const sessionsDir = path.join(root, "agents", "main", "sessions");
await fs.mkdir(sessionsDir, { recursive: true });
const sessionFile = path.join(sessionsDir, "session.jsonl");
const sidecarPath = `${sessionFile}.codex-app-server.json`;
await fs.writeFile(
sidecarPath,
JSON.stringify({
schemaVersion: 1,
threadId: "thread-123",
cwd: root,
model: "gpt-5.5",
}),
);
await noteSessionTranscriptHealth({ shouldRepair: true, sessionDirs: [sessionsDir] });
await expect(fs.access(sidecarPath)).rejects.toThrow();
expect(readOpenClawStateKvJson("codex_app_server_thread_bindings", sessionFile)).toMatchObject({
schemaVersion: 1,
threadId: "thread-123",
sessionFile,
cwd: root,
model: "gpt-5.5",
});
const [message, title] = note.mock.calls[0] as [string, string];
expect(title).toBe("Session transcripts");
expect(message).toContain("Imported 1 Codex app-server binding sidecar into SQLite");
});
it("ignores ordinary branch history without internal runtime context", async () => {
const filePath = await writeTranscript([
{ type: "session", version: 3, id: "session-1", timestamp: "2026-04-25T00:00:00Z" },

View File

@@ -9,9 +9,16 @@ import { resolveAgentSessionDirs } from "../agents/session-dirs.js";
import { resolveStateDir } from "../config/paths.js";
import { replaceSqliteSessionTranscriptEvents } from "../config/sessions/transcript-store.sqlite.js";
import { DEFAULT_AGENT_ID, normalizeAgentId } from "../routing/session-key.js";
import {
writeOpenClawStateKvJson,
type OpenClawStateJsonValue,
} from "../state/openclaw-state-kv.js";
import { note } from "../terminal/note.js";
import { shortenHomePath } from "../utils.js";
const CODEX_APP_SERVER_BINDING_SIDECAR_SUFFIX = ".codex-app-server.json";
const CODEX_APP_SERVER_BINDING_KV_SCOPE = "codex_app_server_thread_bindings";
type TranscriptEntry = Record<string, unknown> & {
id?: unknown;
parentId?: unknown;
@@ -35,6 +42,14 @@ type TranscriptMigrationResult = TranscriptRepairResult & {
sessionId?: string;
};
type CodexAppServerBindingMigrationResult = {
filePath: string;
sessionFile: string;
imported: boolean;
removedSource: boolean;
reason?: string;
};
function parseTranscriptEntries(raw: string): TranscriptEntry[] {
const entries: TranscriptEntry[] = [];
for (const line of raw.split(/\r?\n/)) {
@@ -345,6 +360,107 @@ async function listSessionTranscriptFiles(sessionDirs: string[]): Promise<string
return files.toSorted((a, b) => a.localeCompare(b));
}
async function listCodexAppServerBindingSidecars(sessionDirs: string[]): Promise<string[]> {
const files: string[] = [];
for (const sessionsDir of sessionDirs) {
let entries: Dirent[] = [];
try {
entries = await fs.readdir(sessionsDir, { withFileTypes: true });
} catch {
continue;
}
for (const entry of entries) {
if (entry.isFile() && entry.name.endsWith(CODEX_APP_SERVER_BINDING_SIDECAR_SUFFIX)) {
files.push(path.join(sessionsDir, entry.name));
}
}
}
return files.toSorted((a, b) => a.localeCompare(b));
}
function resolveCodexAppServerBindingSessionFile(sidecarPath: string): string {
return sidecarPath.slice(0, -CODEX_APP_SERVER_BINDING_SIDECAR_SUFFIX.length);
}
function normalizeCodexAppServerBindingPayload(
sessionFile: string,
value: unknown,
): OpenClawStateJsonValue | undefined {
if (!value || typeof value !== "object" || Array.isArray(value)) {
return undefined;
}
const parsed = value as Record<string, unknown>;
if (
parsed.schemaVersion !== 1 ||
typeof parsed.threadId !== "string" ||
!parsed.threadId.trim()
) {
return undefined;
}
return {
schemaVersion: 1,
sessionFile,
threadId: parsed.threadId,
cwd: typeof parsed.cwd === "string" ? parsed.cwd : "",
authProfileId: typeof parsed.authProfileId === "string" ? parsed.authProfileId : undefined,
model: typeof parsed.model === "string" ? parsed.model : undefined,
modelProvider: typeof parsed.modelProvider === "string" ? parsed.modelProvider : undefined,
approvalPolicy: typeof parsed.approvalPolicy === "string" ? parsed.approvalPolicy : undefined,
sandbox: typeof parsed.sandbox === "string" ? parsed.sandbox : undefined,
serviceTier: typeof parsed.serviceTier === "string" ? parsed.serviceTier : undefined,
dynamicToolsFingerprint:
typeof parsed.dynamicToolsFingerprint === "string"
? parsed.dynamicToolsFingerprint
: undefined,
createdAt: typeof parsed.createdAt === "string" ? parsed.createdAt : new Date().toISOString(),
updatedAt: typeof parsed.updatedAt === "string" ? parsed.updatedAt : new Date().toISOString(),
} as OpenClawStateJsonValue;
}
async function migrateCodexAppServerBindingSidecar(params: {
filePath: string;
shouldRepair: boolean;
}): Promise<CodexAppServerBindingMigrationResult> {
const sessionFile = resolveCodexAppServerBindingSessionFile(params.filePath);
try {
const raw = await fs.readFile(params.filePath, "utf-8");
const payload = normalizeCodexAppServerBindingPayload(sessionFile, JSON.parse(raw));
if (!payload) {
return {
filePath: params.filePath,
sessionFile,
imported: false,
removedSource: false,
reason: "invalid binding payload",
};
}
if (!params.shouldRepair) {
return {
filePath: params.filePath,
sessionFile,
imported: false,
removedSource: false,
};
}
writeOpenClawStateKvJson(CODEX_APP_SERVER_BINDING_KV_SCOPE, sessionFile, payload);
await fs.rm(params.filePath, { force: true });
return {
filePath: params.filePath,
sessionFile,
imported: true,
removedSource: true,
};
} catch (error) {
return {
filePath: params.filePath,
sessionFile,
imported: false,
removedSource: false,
reason: String(error),
};
}
}
export async function noteSessionTranscriptHealth(params?: {
shouldRepair?: boolean;
sessionDirs?: string[];
@@ -359,7 +475,8 @@ export async function noteSessionTranscriptHealth(params?: {
}
const files = await listSessionTranscriptFiles(sessionDirs);
if (files.length === 0) {
const codexBindingSidecars = await listCodexAppServerBindingSidecars(sessionDirs);
if (files.length === 0 && codexBindingSidecars.length === 0) {
return;
}
@@ -367,31 +484,59 @@ export async function noteSessionTranscriptHealth(params?: {
for (const filePath of files) {
results.push(await migrateSessionTranscriptFileToSqlite({ filePath, shouldRepair }));
}
const codexBindingResults: CodexAppServerBindingMigrationResult[] = [];
for (const filePath of codexBindingSidecars) {
codexBindingResults.push(await migrateCodexAppServerBindingSidecar({ filePath, shouldRepair }));
}
const broken = results.filter((result) => result.broken);
const imported = results.filter((result) => result.imported);
const failed = results.filter((result) => result.reason && !result.imported);
const importedCodexBindings = codexBindingResults.filter((result) => result.imported);
const failedCodexBindings = codexBindingResults.filter(
(result) => result.reason && !result.imported,
);
const repairedCount = broken.filter((result) => result.repaired).length;
const legacyCount = results.length;
const lines = [
`- Found ${legacyCount} legacy transcript JSONL file${legacyCount === 1 ? "" : "s"} outside the SQLite session database.`,
...results.slice(0, 20).map((result) => {
const status = result.imported
? result.repaired
? "imported with active-branch repair"
: "imported"
: result.broken
? "needs import + repair"
: "needs import";
const reason = result.reason ? ` reason=${result.reason}` : "";
return `- ${shortenHomePath(result.filePath)} ${status} entries=${result.originalEntries}${reason}`;
}),
];
const lines: string[] = [];
if (legacyCount > 0) {
lines.push(
`- Found ${legacyCount} legacy transcript JSONL file${legacyCount === 1 ? "" : "s"} outside the SQLite session database.`,
);
lines.push(
...results.slice(0, 20).map((result) => {
const status = result.imported
? result.repaired
? "imported with active-branch repair"
: "imported"
: result.broken
? "needs import + repair"
: "needs import";
const reason = result.reason ? ` reason=${result.reason}` : "";
return `- ${shortenHomePath(result.filePath)} ${status} entries=${result.originalEntries}${reason}`;
}),
);
}
if (results.length > 20) {
lines.push(`- ...and ${results.length - 20} more.`);
}
if (codexBindingResults.length > 0) {
lines.push(
`- Found ${codexBindingResults.length} legacy Codex app-server binding sidecar${codexBindingResults.length === 1 ? "" : "s"} outside the SQLite state database.`,
);
lines.push(
...codexBindingResults.slice(0, 20).map((result) => {
const status = result.imported ? "imported" : "needs import";
const reason = result.reason ? ` reason=${result.reason}` : "";
return `- ${shortenHomePath(result.filePath)} ${status}${reason}`;
}),
);
if (codexBindingResults.length > 20) {
lines.push(`- ...and ${codexBindingResults.length - 20} more.`);
}
}
if (!shouldRepair) {
lines.push('- Run "openclaw doctor --fix" to import legacy transcripts into SQLite.');
lines.push('- Run "openclaw doctor --fix" to import legacy session files into SQLite.');
} else if (imported.length > 0) {
lines.push(
`- Imported ${imported.length} transcript file${imported.length === 1 ? "" : "s"} into SQLite and removed the JSONL source${imported.length === 1 ? "" : "s"}.`,
@@ -402,11 +547,21 @@ export async function noteSessionTranscriptHealth(params?: {
);
}
}
if (shouldRepair && importedCodexBindings.length > 0) {
lines.push(
`- Imported ${importedCodexBindings.length} Codex app-server binding sidecar${importedCodexBindings.length === 1 ? "" : "s"} into SQLite and removed the JSON source${importedCodexBindings.length === 1 ? "" : "s"}.`,
);
}
if (failed.length > 0) {
lines.push(
`- Could not import ${failed.length} transcript file${failed.length === 1 ? "" : "s"}; left source file${failed.length === 1 ? "" : "s"} in place.`,
);
}
if (failedCodexBindings.length > 0) {
lines.push(
`- Could not import ${failedCodexBindings.length} Codex app-server binding sidecar${failedCodexBindings.length === 1 ? "" : "s"}; left source file${failedCodexBindings.length === 1 ? "" : "s"} in place.`,
);
}
note(lines.join("\n"), "Session transcripts");
}

View File

@@ -8,7 +8,8 @@ import {
resolveStorePath,
resolveSessionTranscriptsDirForAgent,
} from "../config/sessions/paths.js";
import { loadSessionStore } from "../config/sessions/store.js";
import { loadSessionStore, saveSessionStore } from "../config/sessions/store.js";
import { replaceSqliteSessionTranscriptEvents } from "../config/sessions/transcript-store.sqlite.js";
import type { SessionEntry } from "../config/sessions/types.js";
import { closeOpenClawStateDatabaseForTest } from "../state/openclaw-state-db.js";
import {
@@ -120,13 +121,13 @@ async function runStateIntegrity(cfg: OpenClawConfig) {
return confirmRuntimeRepair;
}
function writeSessionStore(
async function writeSessionStore(
cfg: OpenClawConfig,
sessions: Record<string, { sessionId: string; updatedAt: number } & Record<string, unknown>>,
) {
setupSessionState(cfg, process.env, process.env.HOME ?? "");
const storePath = resolveStorePath(cfg.session?.store, { agentId: "main" });
fs.writeFileSync(storePath, JSON.stringify(sessions, null, 2));
await saveSessionStore(storePath, sessions as Record<string, SessionEntry>);
}
async function runStateIntegrityText(cfg: OpenClawConfig): Promise<string> {
@@ -294,7 +295,7 @@ describe("doctor state integrity oauth dir checks", () => {
it("warns about tombstoned subagent restart recovery sessions", async () => {
const cfg: OpenClawConfig = {};
writeSessionStore(cfg, {
await writeSessionStore(cfg, {
"agent:main:subagent:wedged-child": {
sessionId: "session-wedged-child",
updatedAt: Date.now(),
@@ -324,7 +325,7 @@ describe("doctor state integrity oauth dir checks", () => {
it("clears stale aborted recovery flags for tombstoned subagent sessions when approved", async () => {
const cfg: OpenClawConfig = {};
const sessionKey = "agent:main:subagent:wedged-child";
writeSessionStore(cfg, {
await writeSessionStore(cfg, {
[sessionKey]: {
sessionId: "session-wedged-child",
updatedAt: 0,
@@ -415,31 +416,28 @@ describe("doctor state integrity oauth dir checks", () => {
}
});
it("detects orphan transcripts and offers archival remediation", async () => {
it("detects orphan transcripts and offers delete remediation", async () => {
const cfg: OpenClawConfig = {};
setupSessionState(cfg, process.env, process.env.HOME ?? "");
const sessionsDir = resolveSessionTranscriptsDirForAgent("main", process.env, () => tempHome);
fs.writeFileSync(path.join(sessionsDir, "orphan-session.jsonl"), '{"type":"session"}\n');
const confirmRuntimeRepair = vi.fn(async (params: { message: string }) =>
params.message.includes("This only renames them to *.deleted.<timestamp>."),
params.message.includes("Delete 1 orphan transcript file"),
);
await noteStateIntegrity(cfg, { confirmRuntimeRepair, note: noteMock });
expect(stateIntegrityText()).toContain(
"These .jsonl files are no longer referenced by sessions.json",
);
expect(stateIntegrityText()).toContain("Examples: orphan-session.jsonl");
const archivePrompt = repairPromptCalls(confirmRuntimeRepair).find((prompt) =>
prompt.message?.includes("This only renames them to *.deleted.<timestamp>."),
const deletePrompt = repairPromptCalls(confirmRuntimeRepair).find((prompt) =>
prompt.message?.includes("Delete 1 orphan transcript file"),
);
expect(archivePrompt?.requiresInteractiveConfirmation).toBe(true);
expect(deletePrompt?.requiresInteractiveConfirmation).toBe(true);
const files = fs.readdirSync(sessionsDir);
const archivedOrphanTranscripts = files.filter((name) =>
name.startsWith("orphan-session.jsonl.deleted."),
);
expect(archivedOrphanTranscripts.length).toBeGreaterThan(0);
expect(files).not.toContain("orphan-session.jsonl");
});
it("does not auto-archive orphan transcripts from non-interactive repair mode", async () => {
it("does not auto-delete orphan transcripts from non-interactive repair mode", async () => {
const cfg: OpenClawConfig = {};
setupSessionState(cfg, process.env, process.env.HOME ?? "");
const sessionsDir = resolveSessionTranscriptsDirForAgent("main", process.env, () => tempHome);
@@ -456,10 +454,6 @@ describe("doctor state integrity oauth dir checks", () => {
expect(archivePrompt?.initialValue).toBe(false);
const files = fs.readdirSync(sessionsDir);
expect(files).toContain("orphan-session.jsonl");
const archivedOrphanTranscripts = files.filter((name) =>
name.startsWith("orphan-session.jsonl.deleted."),
);
expect(archivedOrphanTranscripts).toStrictEqual([]);
});
it.skipIf(process.platform === "win32")(
@@ -485,7 +479,7 @@ describe("doctor state integrity oauth dir checks", () => {
);
const transcriptPath = path.join(sessionsDir, "linked-session.jsonl");
fs.writeFileSync(transcriptPath, '{"type":"session"}\n');
writeSessionStore(cfg, {
await writeSessionStore(cfg, {
"agent:main:main": {
sessionId: "linked-session",
updatedAt: Date.now(),
@@ -493,7 +487,7 @@ describe("doctor state integrity oauth dir checks", () => {
});
const confirmRuntimeRepair = vi.fn(async (params: { message: string }) =>
params.message.includes("This only renames them to *.deleted.<timestamp>."),
params.message.includes("Delete 1 orphan transcript file"),
);
await noteStateIntegrity(cfg, { confirmRuntimeRepair, note: noteMock });
@@ -526,7 +520,7 @@ describe("doctor state integrity oauth dir checks", () => {
it("prints openclaw-only verification hints when recent sessions are missing transcripts", async () => {
const cfg: OpenClawConfig = {};
writeSessionStore(cfg, {
await writeSessionStore(cfg, {
"agent:main:main": {
sessionId: "missing-transcript",
updatedAt: Date.now(),
@@ -534,11 +528,9 @@ describe("doctor state integrity oauth dir checks", () => {
});
const text = await runStateIntegrityText(cfg);
expect(text).toContain("recent sessions are missing transcripts");
expect(text).toMatch(/openclaw sessions --store ".*sessions\.json"/);
expect(text).toMatch(/openclaw sessions cleanup --store ".*sessions\.json" --dry-run/);
expect(text).toMatch(
/openclaw sessions cleanup --store ".*sessions\.json" --enforce --fix-missing/,
);
expect(text).toContain("openclaw doctor --fix");
expect(text).toContain("openclaw sessions cleanup --dry-run");
expect(text).toContain("openclaw sessions cleanup --enforce --fix-missing");
expect(text).not.toContain("--active");
expect(text).not.toContain(" ls ");
});
@@ -547,17 +539,20 @@ describe("doctor state integrity oauth dir checks", () => {
const cfg: OpenClawConfig = {};
setupSessionState(cfg, process.env, tempHome);
const sessionsDir = resolveSessionTranscriptsDirForAgent("main", process.env, () => tempHome);
fs.writeFileSync(
path.join(sessionsDir, "heartbeat-session.jsonl"),
[
JSON.stringify({ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } }),
JSON.stringify({ message: { role: "assistant", content: "HEARTBEAT_OK" } }),
"",
].join("\n"),
);
writeSessionStore(cfg, {
const heartbeatTranscriptPath = path.join(sessionsDir, "heartbeat-session.jsonl");
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "heartbeat-session",
transcriptPath: heartbeatTranscriptPath,
events: [
{ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } },
{ message: { role: "assistant", content: "HEARTBEAT_OK" } },
],
});
await writeSessionStore(cfg, {
"agent:main:main": {
sessionId: "heartbeat-session",
sessionFile: heartbeatTranscriptPath,
updatedAt: Date.now(),
},
});
@@ -590,17 +585,10 @@ describe("doctor state integrity oauth dir checks", () => {
key.startsWith("agent:main:heartbeat-recovered-"),
);
expect(store["agent:main:main"]).toBeUndefined();
if (recoveredKey === undefined) {
throw new Error("expected recovered heartbeat session key");
}
expect(store[recoveredKey]?.sessionId).toBe("heartbeat-session");
expect(recoveredKey).toBeDefined();
expect(store[recoveredKey ?? ""]?.sessionId).toBe("heartbeat-session");
const tuiStore = JSON.parse(fs.readFileSync(tuiLastSessionPath, "utf8")) as Record<
string,
{ sessionKey?: string }
>;
expect(tuiStore.default).toBeUndefined();
expect(tuiStore.telegram?.sessionKey).toBe("agent:main:telegram:thread");
expect(fs.existsSync(tuiLastSessionPath)).toBe(false);
expect(doctorChangesText()).toContain("Moved heartbeat-owned main session agent:main:main");
expect(doctorChangesText()).toContain("Cleared 1 stale TUI last-session pointer");
});
@@ -609,16 +597,18 @@ describe("doctor state integrity oauth dir checks", () => {
const cfg: OpenClawConfig = {};
setupSessionState(cfg, process.env, tempHome);
const sessionsDir = resolveSessionTranscriptsDirForAgent("main", process.env, () => tempHome);
fs.writeFileSync(
path.join(sessionsDir, "mixed-session.jsonl"),
[
JSON.stringify({ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } }),
JSON.stringify({ message: { role: "assistant", content: "HEARTBEAT_OK" } }),
JSON.stringify({ message: { role: "user", content: "hello from telegram" } }),
"",
].join("\n"),
);
writeSessionStore(cfg, {
const mixedTranscriptPath = path.join(sessionsDir, "mixed-session.jsonl");
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "mixed-session",
transcriptPath: mixedTranscriptPath,
events: [
{ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } },
{ message: { role: "assistant", content: "HEARTBEAT_OK" } },
{ message: { role: "user", content: "hello from telegram" } },
],
});
await writeSessionStore(cfg, {
"agent:main:main": {
sessionId: "mixed-session",
updatedAt: Date.now(),
@@ -629,7 +619,7 @@ describe("doctor state integrity oauth dir checks", () => {
await noteStateIntegrity(cfg, { confirmRuntimeRepair, note: noteMock });
const storePath = resolveStorePath(cfg.session?.store, { agentId: "main" });
const store = JSON.parse(fs.readFileSync(storePath, "utf8")) as Record<string, SessionEntry>;
const store = loadSessionStore(storePath);
expect(store["agent:main:main"]?.sessionId).toBe("mixed-session");
expect(Object.keys(store).some((key) => key.includes("heartbeat-recovered"))).toBe(false);
expect(hasRepairPromptMessage(confirmRuntimeRepair, "Move heartbeat-owned main session")).toBe(
@@ -670,14 +660,15 @@ describe("doctor state integrity oauth dir checks", () => {
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-heartbeat-main-mixed-"));
try {
const transcriptPath = path.join(tempDir, "session.jsonl");
fs.writeFileSync(
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "session",
transcriptPath,
[
JSON.stringify({ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } }),
JSON.stringify({ message: { role: "user", content: "real follow-up" } }),
"",
].join("\n"),
);
events: [
{ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } },
{ message: { role: "user", content: "real follow-up" } },
],
});
const entry: SessionEntry = {
sessionId: "session",
updatedAt: 1,
@@ -693,14 +684,15 @@ describe("doctor state integrity oauth dir checks", () => {
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-heartbeat-main-route-"));
try {
const transcriptPath = path.join(tempDir, "session.jsonl");
fs.writeFileSync(
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "session",
transcriptPath,
[
JSON.stringify({ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } }),
JSON.stringify({ message: { role: "user", content: "real follow-up" } }),
"",
].join("\n"),
);
events: [
{ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } },
{ message: { role: "user", content: "real follow-up" } },
],
});
const entry = {
sessionId: "session",
updatedAt: 1,
@@ -718,17 +710,17 @@ describe("doctor state integrity oauth dir checks", () => {
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-heartbeat-main-cap-"));
try {
const transcriptPath = path.join(tempDir, "session.jsonl");
const heartbeatMessages = Array.from({ length: 400 }, () =>
JSON.stringify({ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } }),
);
fs.writeFileSync(
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "session",
transcriptPath,
[
...heartbeatMessages,
JSON.stringify({ message: { role: "user", content: "real follow-up" } }),
"",
].join("\n"),
);
events: [
...Array.from({ length: 400 }, () => ({
message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT },
})),
{ message: { role: "user", content: "real follow-up" } },
],
});
const entry: SessionEntry = { sessionId: "session", updatedAt: 1 };
expect(resolveHeartbeatMainSessionRepairCandidate({ entry, transcriptPath })).toBeNull();
} finally {
@@ -740,14 +732,15 @@ describe("doctor state integrity oauth dir checks", () => {
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-heartbeat-main-helper-"));
try {
const transcriptPath = path.join(tempDir, "session.jsonl");
fs.writeFileSync(
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "session",
transcriptPath,
[
JSON.stringify({ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } }),
JSON.stringify({ message: { role: "assistant", content: "HEARTBEAT_OK" } }),
"",
].join("\n"),
);
events: [
{ message: { role: "user", content: HEARTBEAT_TRANSCRIPT_PROMPT } },
{ message: { role: "assistant", content: "HEARTBEAT_OK" } },
],
});
const entry: SessionEntry = { sessionId: "session", updatedAt: 1 };
expect(resolveHeartbeatMainSessionRepairCandidate({ entry, transcriptPath })?.reason).toBe(
"transcript",
@@ -804,7 +797,7 @@ describe("doctor state integrity oauth dir checks", () => {
it("ignores slash-routing sessions for recent missing transcript warnings", async () => {
const cfg: OpenClawConfig = {};
writeSessionStore(cfg, {
await writeSessionStore(cfg, {
"agent:main:telegram:slash:6790081233": {
sessionId: "missing-slash-transcript",
updatedAt: Date.now(),

View File

@@ -9,10 +9,7 @@ import {
} from "../agents/subagent-recovery-state.js";
import { formatCliCommand } from "../cli/command-format.js";
import { resolveOAuthDir, resolveStateDir } from "../config/paths.js";
import {
formatSessionArchiveTimestamp,
isPrimarySessionTranscriptFileName,
} from "../config/sessions/artifacts.js";
import { isPrimarySessionTranscriptFileName } from "../config/sessions/artifacts.js";
import { resolveMainSessionKey } from "../config/sessions/main-session.js";
import {
resolveSessionFilePath,
@@ -1052,32 +1049,30 @@ export async function noteStateIntegrity(
[
`- Found ${orphanCount} in ${displaySessionsDir}.`,
" These .jsonl files are no longer referenced by sessions.json, so they are not part of any active session history.",
" Doctor can archive them safely by renaming each file to *.deleted.<timestamp>.",
" Doctor can delete them after the session transcript migration/import has run.",
` Examples: ${orphanPreview}`,
].join("\n"),
);
const archiveOrphans = await prompter.confirmRuntimeRepair({
message: `Archive ${orphanCount} in ${displaySessionsDir}? This only renames them to *.deleted.<timestamp>.`,
const deleteOrphans = await prompter.confirmRuntimeRepair({
message: `Delete ${orphanCount} in ${displaySessionsDir}?`,
initialValue: false,
requiresInteractiveConfirmation: true,
});
if (archiveOrphans) {
let archived = 0;
const archivedAt = formatSessionArchiveTimestamp();
if (deleteOrphans) {
let deleted = 0;
for (const orphanPath of orphanTranscriptPaths) {
const archivedPath = `${orphanPath}.deleted.${archivedAt}`;
try {
fs.renameSync(orphanPath, archivedPath);
archived += 1;
fs.rmSync(orphanPath, { force: true });
deleted += 1;
} catch (err) {
warnings.push(
`- Failed to archive orphan transcript ${shortenHomePath(orphanPath)}: ${String(err)}`,
`- Failed to delete orphan transcript ${shortenHomePath(orphanPath)}: ${String(err)}`,
);
}
}
if (archived > 0) {
if (deleted > 0) {
changes.push(
`- Archived ${countLabel(archived, "orphan transcript file")} in ${displaySessionsDir} as .deleted timestamped backups.`,
`- Deleted ${countLabel(deleted, "orphan transcript file")} in ${displaySessionsDir}.`,
);
}
}

View File

@@ -46,6 +46,28 @@ describe("legacy session maintenance migrate", () => {
});
expect(res.changes).toContain("Removed deprecated session.maintenance.rotateBytes.");
});
it("removes legacy session.maintenance.resetArchiveRetention", () => {
const res = migrateLegacyConfigForTest({
session: {
maintenance: {
mode: "enforce",
pruneAfter: "30d",
maxEntries: 500,
resetArchiveRetention: "14d",
},
},
});
expect(res.config?.session?.maintenance).toEqual({
mode: "enforce",
pruneAfter: "30d",
maxEntries: 500,
});
expect(res.changes).toContain(
"Removed session.maintenance.resetArchiveRetention; reset transcript archives are no longer used.",
);
});
});
describe("legacy session parent fork migrate", () => {

View File

@@ -10,6 +10,13 @@ function hasLegacyRotateBytes(value: unknown): boolean {
return Boolean(maintenance && Object.prototype.hasOwnProperty.call(maintenance, "rotateBytes"));
}
function hasLegacyResetArchiveRetention(value: unknown): boolean {
const maintenance = getRecord(value);
return Boolean(
maintenance && Object.prototype.hasOwnProperty.call(maintenance, "resetArchiveRetention"),
);
}
function hasLegacyParentForkMaxTokens(value: unknown): boolean {
const session = getRecord(value);
return Boolean(session && Object.prototype.hasOwnProperty.call(session, "parentForkMaxTokens"));
@@ -22,6 +29,13 @@ const LEGACY_SESSION_MAINTENANCE_ROTATE_BYTES_RULE: LegacyConfigRule = {
match: hasLegacyRotateBytes,
};
const LEGACY_SESSION_MAINTENANCE_RESET_ARCHIVE_RETENTION_RULE: LegacyConfigRule = {
path: ["session", "maintenance"],
message:
'session.maintenance.resetArchiveRetention was removed with reset transcript archives; run "openclaw doctor --fix" to remove it.',
match: hasLegacyResetArchiveRetention,
};
const LEGACY_SESSION_PARENT_FORK_MAX_TOKENS_RULE: LegacyConfigRule = {
path: ["session"],
message:
@@ -43,6 +57,24 @@ export const LEGACY_CONFIG_MIGRATIONS_RUNTIME_SESSION: LegacyConfigMigrationSpec
changes.push("Removed deprecated session.maintenance.rotateBytes.");
},
}),
defineLegacyConfigMigration({
id: "session.maintenance.resetArchiveRetention",
describe: "Remove legacy session.maintenance.resetArchiveRetention",
legacyRules: [LEGACY_SESSION_MAINTENANCE_RESET_ARCHIVE_RETENTION_RULE],
apply: (raw, changes) => {
const maintenance = getRecord(getRecord(raw.session)?.maintenance);
if (
!maintenance ||
!Object.prototype.hasOwnProperty.call(maintenance, "resetArchiveRetention")
) {
return;
}
delete maintenance.resetArchiveRetention;
changes.push(
"Removed session.maintenance.resetArchiveRetention; reset transcript archives are no longer used.",
);
},
}),
defineLegacyConfigMigration({
id: "session.parentForkMaxTokens",
describe: "Remove legacy session.parentForkMaxTokens",

View File

@@ -91,7 +91,6 @@ describe("sessionsCleanupCommand", () => {
mode: "warn",
pruneAfterMs: 7 * 24 * 60 * 60 * 1000,
maxEntries: 500,
resetArchiveRetentionMs: 7 * 24 * 60 * 60 * 1000,
maxDiskBytes: null,
highWaterBytes: null,
});

View File

@@ -182,7 +182,6 @@ const TARGET_KEYS = [
"session.maintenance.pruneDays",
"session.maintenance.maxEntries",
"session.maintenance.rotateBytes",
"session.maintenance.resetArchiveRetention",
"session.maintenance.maxDiskBytes",
"session.maintenance.highWaterBytes",
"approvals",
@@ -719,10 +718,6 @@ describe("config help copy quality", () => {
expect(/deprecated/i.test(deprecated)).toBe(true);
expect(deprecated.includes("session.maintenance.pruneAfter")).toBe(true);
const resetRetention = FIELD_HELP["session.maintenance.resetArchiveRetention"];
expect(resetRetention.includes(".reset.")).toBe(true);
expect(/false/i.test(resetRetention)).toBe(true);
const maxDisk = FIELD_HELP["session.maintenance.maxDiskBytes"];
expect(maxDisk.includes("500mb")).toBe(true);

View File

@@ -1551,7 +1551,7 @@ export const FIELD_HELP: Record<string, string> = {
"session.threadBindings.defaultSpawnContext":
'Default native subagent context for thread-bound spawns. Use "fork" to start from the requester transcript or "isolated" for a clean child. Default: "fork".',
"session.maintenance":
"Automatic session-store maintenance controls for pruning age, entry caps, reset archive retention, and disk budget cleanup. Start in warn mode to observe impact, then enforce once thresholds are tuned.",
"Automatic session-store maintenance controls for pruning age, entry caps, and disk budget cleanup. Start in warn mode to observe impact, then enforce once thresholds are tuned.",
"session.maintenance.mode":
'Determines whether maintenance policies are only reported ("warn") or actively applied ("enforce"). Keep "warn" during rollout and switch to "enforce" after validating safe thresholds.',
"session.maintenance.pruneAfter":
@@ -1562,8 +1562,6 @@ export const FIELD_HELP: Record<string, string> = {
"Caps total session entry count retained in the store to prevent unbounded growth over time. Use lower limits for constrained environments, or higher limits when longer history is required.",
"session.maintenance.rotateBytes":
'Deprecated and ignored. Do not use for `sessions.json` growth control; OpenClaw no longer creates automatic rotation backups, and "openclaw doctor --fix" removes this key.',
"session.maintenance.resetArchiveRetention":
"Retention for reset transcript archives (`*.reset.<timestamp>`). Accepts a duration (for example `30d`), or `false` to disable cleanup. Defaults to pruneAfter so reset artifacts do not grow forever.",
"session.maintenance.maxDiskBytes":
"Optional per-agent sessions-directory disk budget (for example `500mb`). Use this to cap session storage per agent; when exceeded, warn mode reports pressure and enforce mode performs oldest-first cleanup.",
"session.maintenance.highWaterBytes":

View File

@@ -760,7 +760,6 @@ export const FIELD_LABELS: Record<string, string> = {
"session.maintenance.pruneDays": "Session Prune Days (Deprecated)",
"session.maintenance.maxEntries": "Session Max Entries",
"session.maintenance.rotateBytes": "Deprecated Session Rotate Size",
"session.maintenance.resetArchiveRetention": "Session Reset Archive Retention",
"session.maintenance.maxDiskBytes": "Session Max Disk Budget",
"session.maintenance.highWaterBytes": "Session Disk High-water Target",
cron: "Cron",

View File

@@ -1,28 +1,17 @@
import { describe, expect, it } from "vitest";
import {
formatSessionArchiveTimestamp,
formatFilesystemTimestamp,
isCompactionCheckpointTranscriptFileName,
isPrimarySessionTranscriptFileName,
isSessionArchiveArtifactName,
isTrajectoryPointerArtifactName,
isTrajectoryRuntimeArtifactName,
isTrajectorySessionArtifactName,
isUsageCountedSessionTranscriptFileName,
parseCompactionCheckpointTranscriptFileName,
parseUsageCountedSessionIdFromFileName,
parseSessionArchiveTimestamp,
} from "./artifacts.js";
describe("session artifact helpers", () => {
it("classifies archived artifact file names", () => {
expect(isSessionArchiveArtifactName("abc.jsonl.deleted.2026-01-01T00-00-00.000Z")).toBe(true);
expect(isSessionArchiveArtifactName("abc.jsonl.reset.2026-01-01T00-00-00.000Z")).toBe(true);
expect(isSessionArchiveArtifactName("abc.jsonl.bak.2026-01-01T00-00-00.000Z")).toBe(true);
expect(isSessionArchiveArtifactName("sessions.json.bak.1737420882")).toBe(true);
expect(isSessionArchiveArtifactName("keep.deleted.keep.jsonl")).toBe(false);
expect(isSessionArchiveArtifactName("abc.jsonl")).toBe(false);
});
it("classifies primary transcript files", () => {
expect(isPrimarySessionTranscriptFileName("abc.jsonl")).toBe(true);
expect(isPrimarySessionTranscriptFileName("keep.deleted.keep.jsonl")).toBe(true);
@@ -31,9 +20,6 @@ describe("session artifact helpers", () => {
"abc.checkpoint.11111111-1111-4111-8111-111111111111.jsonl",
),
).toBe(false);
expect(isPrimarySessionTranscriptFileName("abc.jsonl.deleted.2026-01-01T00-00-00.000Z")).toBe(
false,
);
expect(isPrimarySessionTranscriptFileName("abc.trajectory.jsonl")).toBe(false);
expect(isPrimarySessionTranscriptFileName("sessions.json")).toBe(false);
});
@@ -48,15 +34,6 @@ describe("session artifact helpers", () => {
it("classifies usage-counted transcript files", () => {
expect(isUsageCountedSessionTranscriptFileName("abc.jsonl")).toBe(true);
expect(
isUsageCountedSessionTranscriptFileName("abc.jsonl.reset.2026-01-01T00-00-00.000Z"),
).toBe(true);
expect(
isUsageCountedSessionTranscriptFileName("abc.jsonl.deleted.2026-01-01T00-00-00.000Z"),
).toBe(true);
expect(isUsageCountedSessionTranscriptFileName("abc.jsonl.bak.2026-01-01T00-00-00.000Z")).toBe(
false,
);
expect(
isUsageCountedSessionTranscriptFileName(
"abc.checkpoint.11111111-1111-4111-8111-111111111111.jsonl",
@@ -67,15 +44,6 @@ describe("session artifact helpers", () => {
it("parses usage-counted session ids from file names", () => {
expect(parseUsageCountedSessionIdFromFileName("abc.jsonl")).toBe("abc");
expect(parseUsageCountedSessionIdFromFileName("abc.jsonl.reset.2026-01-01T00-00-00.000Z")).toBe(
"abc",
);
expect(
parseUsageCountedSessionIdFromFileName("abc.jsonl.deleted.2026-01-01T00-00-00.000Z"),
).toBe("abc");
expect(parseUsageCountedSessionIdFromFileName("abc.jsonl.bak.2026-01-01T00-00-00.000Z")).toBe(
null,
);
expect(
parseUsageCountedSessionIdFromFileName(
"abc.checkpoint.11111111-1111-4111-8111-111111111111.jsonl",
@@ -94,21 +62,10 @@ describe("session artifact helpers", () => {
checkpointId: "11111111-1111-4111-8111-111111111111",
});
expect(isCompactionCheckpointTranscriptFileName("abc.checkpoint.not-a-uuid.jsonl")).toBe(false);
expect(
isCompactionCheckpointTranscriptFileName(
"abc.checkpoint.11111111-1111-4111-8111-111111111111.jsonl.deleted.2026-01-01T00-00-00.000Z",
),
).toBe(false);
});
it("formats and parses archive timestamps", () => {
it("formats filesystem timestamps", () => {
const now = Date.parse("2026-02-23T12:34:56.000Z");
const stamp = formatSessionArchiveTimestamp(now);
expect(stamp).toBe("2026-02-23T12-34-56.000Z");
const file = `abc.jsonl.deleted.${stamp}`;
expect(parseSessionArchiveTimestamp(file, "deleted")).toBe(now);
expect(parseSessionArchiveTimestamp(file, "reset")).toBeNull();
expect(parseSessionArchiveTimestamp("keep.deleted.keep.jsonl", "deleted")).toBeNull();
expect(formatFilesystemTimestamp(now)).toBe("2026-02-23T12-34-56.000Z");
});
});

View File

@@ -1,31 +1,6 @@
export type SessionArchiveReason = "bak" | "reset" | "deleted";
const ARCHIVE_TIMESTAMP_RE = /^\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}(?:\.\d{3})?Z$/;
const LEGACY_STORE_BACKUP_RE = /^sessions\.json\.bak\.\d+$/;
const COMPACTION_CHECKPOINT_TRANSCRIPT_RE =
/^(.+)\.checkpoint\.([0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12})\.jsonl$/i;
function hasArchiveSuffix(fileName: string, reason: SessionArchiveReason): boolean {
const marker = `.${reason}.`;
const index = fileName.lastIndexOf(marker);
if (index < 0) {
return false;
}
const raw = fileName.slice(index + marker.length);
return ARCHIVE_TIMESTAMP_RE.test(raw);
}
export function isSessionArchiveArtifactName(fileName: string): boolean {
if (LEGACY_STORE_BACKUP_RE.test(fileName)) {
return true;
}
return (
hasArchiveSuffix(fileName, "deleted") ||
hasArchiveSuffix(fileName, "reset") ||
hasArchiveSuffix(fileName, "bak")
);
}
export function parseCompactionCheckpointTranscriptFileName(fileName: string): {
sessionId: string;
checkpointId: string;
@@ -65,58 +40,20 @@ export function isPrimarySessionTranscriptFileName(fileName: string): boolean {
if (isCompactionCheckpointTranscriptFileName(fileName)) {
return false;
}
return !isSessionArchiveArtifactName(fileName);
return true;
}
export function isUsageCountedSessionTranscriptFileName(fileName: string): boolean {
if (isPrimarySessionTranscriptFileName(fileName)) {
return true;
}
return hasArchiveSuffix(fileName, "reset") || hasArchiveSuffix(fileName, "deleted");
return isPrimarySessionTranscriptFileName(fileName);
}
export function parseUsageCountedSessionIdFromFileName(fileName: string): string | null {
if (isPrimarySessionTranscriptFileName(fileName)) {
return fileName.slice(0, -".jsonl".length);
}
for (const reason of ["reset", "deleted"] as const) {
const marker = `.jsonl.${reason}.`;
const index = fileName.lastIndexOf(marker);
if (index > 0 && hasArchiveSuffix(fileName, reason)) {
return fileName.slice(0, index);
}
}
return null;
}
export function formatSessionArchiveTimestamp(nowMs = Date.now()): string {
export function formatFilesystemTimestamp(nowMs = Date.now()): string {
return new Date(nowMs).toISOString().replaceAll(":", "-");
}
function restoreSessionArchiveTimestamp(raw: string): string {
const [datePart, timePart] = raw.split("T");
if (!datePart || !timePart) {
return raw;
}
return `${datePart}T${timePart.replace(/-/g, ":")}`;
}
export function parseSessionArchiveTimestamp(
fileName: string,
reason: SessionArchiveReason,
): number | null {
const marker = `.${reason}.`;
const index = fileName.lastIndexOf(marker);
if (index < 0) {
return null;
}
const raw = fileName.slice(index + marker.length);
if (!raw) {
return null;
}
if (!ARCHIVE_TIMESTAMP_RE.test(raw)) {
return null;
}
const timestamp = Date.parse(restoreSessionArchiveTimestamp(raw));
return Number.isNaN(timestamp) ? null : timestamp;
}

View File

@@ -6,155 +6,10 @@ import {
resolveTrajectoryFilePath,
resolveTrajectoryPointerFilePath,
} from "../../trajectory/paths.js";
import { formatSessionArchiveTimestamp } from "./artifacts.js";
import { enforceSessionDiskBudget } from "./disk-budget.js";
import type { SessionEntry } from "./types.js";
async function expectPathExists(targetPath: string): Promise<void> {
await expect(fs.access(targetPath)).resolves.toBeUndefined();
}
async function expectPathMissing(targetPath: string): Promise<void> {
await expect(fs.stat(targetPath)).rejects.toMatchObject({ code: "ENOENT" });
}
describe("enforceSessionDiskBudget", () => {
it("does not treat referenced transcripts with marker-like session IDs as archived artifacts", async () => {
await withTempDir({ prefix: "openclaw-disk-budget-" }, async (dir) => {
const storePath = path.join(dir, "sessions.json");
const sessionId = "keep.deleted.keep";
const activeKey = "agent:main:main";
const transcriptPath = path.join(dir, `${sessionId}.jsonl`);
const store: Record<string, SessionEntry> = {
[activeKey]: {
sessionId,
updatedAt: Date.now(),
},
};
await fs.writeFile(storePath, JSON.stringify(store, null, 2), "utf-8");
await fs.writeFile(transcriptPath, "x".repeat(256), "utf-8");
const result = await enforceSessionDiskBudget({
store,
storePath,
activeSessionKey: activeKey,
maintenance: {
maxDiskBytes: 150,
highWaterBytes: 100,
},
warnOnly: false,
});
await expectPathExists(transcriptPath);
expect(result).toEqual(
expect.objectContaining({
removedFiles: 0,
}),
);
});
});
it("removes true archived transcript artifacts while preserving referenced primary transcripts", async () => {
await withTempDir({ prefix: "openclaw-disk-budget-" }, async (dir) => {
const storePath = path.join(dir, "sessions.json");
const sessionId = "keep";
const transcriptPath = path.join(dir, `${sessionId}.jsonl`);
const archivePath = path.join(
dir,
`old-session.jsonl.deleted.${formatSessionArchiveTimestamp(Date.now() - 24 * 60 * 60 * 1000)}`,
);
const store: Record<string, SessionEntry> = {
"agent:main:main": {
sessionId,
updatedAt: Date.now(),
},
};
await fs.writeFile(storePath, JSON.stringify(store, null, 2), "utf-8");
await fs.writeFile(transcriptPath, "k".repeat(80), "utf-8");
await fs.writeFile(archivePath, "a".repeat(260), "utf-8");
const result = await enforceSessionDiskBudget({
store,
storePath,
maintenance: {
maxDiskBytes: 300,
highWaterBytes: 220,
},
warnOnly: false,
});
await expectPathExists(transcriptPath);
await expectPathMissing(archivePath);
expect(result).toEqual(
expect.objectContaining({
removedFiles: 1,
removedEntries: 0,
}),
);
});
});
it("removes unreferenced compaction checkpoint artifacts under pressure", async () => {
await withTempDir({ prefix: "openclaw-disk-budget-" }, async (dir) => {
const storePath = path.join(dir, "sessions.json");
const sessionId = "keep";
const transcriptPath = path.join(dir, `${sessionId}.jsonl`);
const checkpointPath = path.join(
dir,
"keep.checkpoint.11111111-1111-4111-8111-111111111111.jsonl",
);
const referencedCheckpointPath = path.join(
dir,
"keep.checkpoint.22222222-2222-4222-8222-222222222222.jsonl",
);
const store: Record<string, SessionEntry> = {
"agent:main:main": {
sessionId,
updatedAt: Date.now(),
compactionCheckpoints: [
{
checkpointId: "referenced",
sessionKey: "agent:main:main",
sessionId,
createdAt: Date.now(),
reason: "manual",
preCompaction: {
sessionId,
sessionFile: referencedCheckpointPath,
leafId: "leaf",
},
postCompaction: { sessionId },
},
],
},
};
await fs.writeFile(storePath, JSON.stringify(store, null, 2), "utf-8");
await fs.writeFile(transcriptPath, "k".repeat(80), "utf-8");
await fs.writeFile(checkpointPath, "c".repeat(5000), "utf-8");
await fs.writeFile(referencedCheckpointPath, "r".repeat(260), "utf-8");
const result = await enforceSessionDiskBudget({
store,
storePath,
maintenance: {
maxDiskBytes: 4000,
highWaterBytes: 3000,
},
warnOnly: false,
});
await expectPathExists(transcriptPath);
await expectPathMissing(checkpointPath);
await expectPathExists(referencedCheckpointPath);
expect(result).toEqual(
expect.objectContaining({
removedFiles: 1,
removedEntries: 0,
}),
);
});
});
it("removes unreferenced trajectory sidecars while preserving referenced ones", async () => {
await withTempDir({ prefix: "openclaw-disk-budget-" }, async (dir) => {
const storePath = path.join(dir, "sessions.json");
@@ -171,11 +26,10 @@ describe("enforceSessionDiskBudget", () => {
const store: Record<string, SessionEntry> = {
"agent:main:main": {
sessionId,
sessionFile: transcriptPath,
updatedAt: Date.now(),
},
};
await fs.writeFile(storePath, JSON.stringify(store, null, 2), "utf-8");
await fs.writeFile(transcriptPath, "k".repeat(80), "utf-8");
await fs.writeFile(referencedRuntime, "r".repeat(80), "utf-8");
await fs.writeFile(referencedPointer, "p".repeat(80), "utf-8");
await fs.writeFile(orphanRuntime, "o".repeat(5000), "utf-8");
@@ -191,11 +45,10 @@ describe("enforceSessionDiskBudget", () => {
warnOnly: false,
});
await expectPathExists(transcriptPath);
await expectPathExists(referencedRuntime);
await expectPathExists(referencedPointer);
await expectPathMissing(orphanRuntime);
await expectPathMissing(orphanPointer);
await expect(fs.stat(referencedRuntime)).resolves.toBeDefined();
await expect(fs.stat(referencedPointer)).resolves.toBeDefined();
await expect(fs.stat(orphanRuntime)).rejects.toThrow();
await expect(fs.stat(orphanPointer)).rejects.toThrow();
expect(result).toEqual(
expect.objectContaining({
removedFiles: 2,
@@ -211,6 +64,12 @@ describe("enforceSessionDiskBudget", () => {
const protectedKey = "agent:main:slack:channel:C123:thread:1710000000.000100";
const removableKey = "agent:main:subagent:old-worker";
const activeKey = "agent:main:main";
const removableSessionFile = path.join(dir, "removable-worker.jsonl");
const removableRuntime = resolveTrajectoryFilePath({
env: {},
sessionFile: removableSessionFile,
sessionId: "removable-worker",
});
const store: Record<string, SessionEntry> = {
[protectedKey]: {
sessionId: "protected-thread",
@@ -219,6 +78,7 @@ describe("enforceSessionDiskBudget", () => {
},
[removableKey]: {
sessionId: "removable-worker",
sessionFile: removableSessionFile,
updatedAt: 2,
displayName: "r".repeat(2000),
},
@@ -227,8 +87,7 @@ describe("enforceSessionDiskBudget", () => {
updatedAt: 3,
},
};
await fs.writeFile(storePath, JSON.stringify(store, null, 2), "utf-8");
await fs.writeFile(path.join(dir, "removable-worker.jsonl"), "w".repeat(800), "utf-8");
await fs.writeFile(removableRuntime, "w".repeat(800), "utf-8");
const result = await enforceSessionDiskBudget({
store,
@@ -241,9 +100,9 @@ describe("enforceSessionDiskBudget", () => {
warnOnly: false,
});
expect(store).toHaveProperty(protectedKey);
expect(store[protectedKey]).toBeDefined();
expect(store[removableKey]).toBeUndefined();
expect(store).toHaveProperty(activeKey);
expect(store[activeKey]).toBeDefined();
expect(result).toEqual(
expect.objectContaining({
removedEntries: 1,

View File

@@ -8,12 +8,7 @@ import {
resolveTrajectoryFilePath,
resolveTrajectoryPointerFilePath,
} from "../../trajectory/paths.js";
import {
isCompactionCheckpointTranscriptFileName,
isPrimarySessionTranscriptFileName,
isSessionArchiveArtifactName,
isTrajectorySessionArtifactName,
} from "./artifacts.js";
import { isTrajectorySessionArtifactName } from "./artifacts.js";
import { resolveSessionFilePath } from "./paths.js";
import { isProtectedSessionMaintenanceEntry } from "./store-maintenance.js";
import type { SessionEntry } from "./types.js";
@@ -119,7 +114,7 @@ function resolveSessionArtifactPathsForEntry(params: {
if (!transcriptPath) {
return [];
}
const paths = [transcriptPath];
const paths: string[] = [];
if (params.entry.sessionId) {
paths.push(resolveTrajectoryPointerFilePath(transcriptPath));
paths.push(
@@ -145,7 +140,6 @@ function resolveReferencedSessionArtifactPaths(params: {
store: Record<string, SessionEntry>;
}): Set<string> {
const referenced = new Set<string>();
const resolvedSessionsDir = canonicalizePathForComparison(params.sessionsDir);
for (const entry of Object.values(params.store)) {
for (const resolved of resolveSessionArtifactCanonicalPathsForEntry({
sessionsDir: params.sessionsDir,
@@ -153,17 +147,6 @@ function resolveReferencedSessionArtifactPaths(params: {
})) {
referenced.add(resolved);
}
for (const checkpoint of entry.compactionCheckpoints ?? []) {
const checkpointFile = checkpoint.preCompaction.sessionFile?.trim();
if (!checkpointFile) {
continue;
}
const resolvedCheckpointPath = canonicalizePathForComparison(checkpointFile);
const relative = path.relative(resolvedSessionsDir, resolvedCheckpointPath);
if (relative && !relative.startsWith("..") && !path.isAbsolute(relative)) {
referenced.add(resolvedCheckpointPath);
}
}
}
return referenced;
}
@@ -200,21 +183,14 @@ function isUnreferencedSessionArtifactFile(
if (referencedPaths.has(file.canonicalPath)) {
return false;
}
return (
isCompactionCheckpointTranscriptFileName(file.name) ||
isTrajectorySessionArtifactName(file.name) ||
isPrimarySessionTranscriptFileName(file.name)
);
return isTrajectorySessionArtifactName(file.name);
}
function isDiskBudgetRemovableSessionFile(
file: Pick<SessionsDirFileStat, "canonicalPath" | "name">,
referencedPaths: ReadonlySet<string>,
): boolean {
return (
isSessionArchiveArtifactName(file.name) ||
isUnreferencedSessionArtifactFile(file, referencedPaths)
);
return isUnreferencedSessionArtifactFile(file, referencedPaths);
}
async function removeFileIfExists(filePath: string): Promise<number> {

View File

@@ -39,7 +39,6 @@ export type ResolvedSessionMaintenanceConfig = {
mode: SessionMaintenanceMode;
pruneAfterMs: number;
maxEntries: number;
resetArchiveRetentionMs: number | null;
maxDiskBytes: number | null;
highWaterBytes: number | null;
};
@@ -57,25 +56,6 @@ function resolvePruneAfterMs(maintenance?: SessionMaintenanceConfig): number {
}
}
function resolveResetArchiveRetentionMs(
maintenance: SessionMaintenanceConfig | undefined,
pruneAfterMs: number,
): number | null {
const raw = maintenance?.resetArchiveRetention;
if (raw === false) {
return null;
}
const normalized = normalizeStringifiedOptionalString(raw);
if (!normalized) {
return pruneAfterMs;
}
try {
return parseDurationMs(normalized, { defaultUnit: "d" });
} catch {
return pruneAfterMs;
}
}
function resolveMaxDiskBytes(maintenance?: SessionMaintenanceConfig): number | null {
const raw = maintenance?.maxDiskBytes;
const normalized = normalizeStringifiedOptionalString(raw);
@@ -137,7 +117,6 @@ export function resolveMaintenanceConfigFromInput(
mode: maintenance?.mode ?? DEFAULT_SESSION_MAINTENANCE_MODE,
pruneAfterMs,
maxEntries: maintenance?.maxEntries ?? DEFAULT_SESSION_MAX_ENTRIES,
resetArchiveRetentionMs: resolveResetArchiveRetentionMs(maintenance, pruneAfterMs),
maxDiskBytes,
highWaterBytes: resolveHighWaterBytes(maintenance, maxDiskBytes),
};

View File

@@ -20,6 +20,7 @@ import {
} from "./store-maintenance.js";
import { normalizeSessionStore } from "./store-normalize.js";
import { runExclusiveSessionStoreWrite } from "./store-writer.js";
import { deleteSqliteSessionTranscript } from "./transcript-store.sqlite.js";
import {
mergeSessionEntry,
mergeSessionEntryPreserveActivity,
@@ -35,15 +36,6 @@ export { withSessionStoreWriterForTest } from "./store-writer.js";
export { loadSessionStore } from "./store-load.js";
export { normalizeStoreSessionKey, resolveSessionStoreEntry } from "./store-entry.js";
let sessionArchiveRuntimePromise: Promise<
typeof import("../../gateway/session-archive.runtime.js")
> | null = null;
function loadSessionArchiveRuntime() {
sessionArchiveRuntimePromise ??= import("../../gateway/session-archive.runtime.js");
return sessionArchiveRuntimePromise;
}
function removeThreadFromDeliveryContext(context?: DeliveryContext): DeliveryContext | undefined {
if (!context || context.threadId == null) {
return context;
@@ -199,31 +191,26 @@ export async function runQuotaSuspensionMaintenance(params: {
);
}
export async function archiveRemovedSessionTranscripts(params: {
export async function deleteRemovedSessionTranscripts(params: {
removedSessionFiles: Iterable<[string, string | undefined]>;
referencedSessionIds: ReadonlySet<string>;
storePath: string;
reason: "deleted" | "reset";
restrictToStoreDir?: boolean;
}): Promise<Set<string>> {
const { archiveSessionTranscripts } = await loadSessionArchiveRuntime();
const archivedDirs = new Set<string>();
for (const [sessionId, sessionFile] of params.removedSessionFiles) {
const sqliteOptions = resolveSqliteSessionStoreOptionsForPath(params.storePath);
if (!sqliteOptions) {
return new Set();
}
for (const [sessionId] of params.removedSessionFiles) {
if (params.referencedSessionIds.has(sessionId)) {
continue;
}
const archived = archiveSessionTranscripts({
deleteSqliteSessionTranscript({
...sqliteOptions,
sessionId,
storePath: params.storePath,
sessionFile,
reason: params.reason,
restrictToStoreDir: params.restrictToStoreDir,
});
for (const archivedPath of archived) {
archivedDirs.add(path.dirname(archivedPath));
}
}
return archivedDirs;
return new Set();
}
async function persistResolvedSessionEntry(params: {

View File

@@ -48,6 +48,12 @@ export type SqliteSessionTranscriptFile = SqliteSessionTranscriptScope & {
updatedAt: number;
};
export type SqliteSessionTranscript = SqliteSessionTranscriptScope & {
path?: string;
updatedAt: number;
eventCount: number;
};
function normalizeSessionId(value: string): string {
const sessionId = value.trim();
if (!sessionId) {
@@ -141,6 +147,51 @@ export function resolveSqliteSessionTranscriptScopeForPath(
};
}
export function resolveSqliteSessionTranscriptScope(
options: OpenClawStateDatabaseOptions & {
agentId?: string;
sessionId: string;
transcriptPath?: string;
},
): SqliteSessionTranscriptScope | undefined {
const sessionId = normalizeSessionId(options.sessionId);
if (options.agentId?.trim()) {
return {
agentId: normalizeAgentId(options.agentId),
sessionId,
};
}
if (options.transcriptPath?.trim()) {
const byPath = resolveSqliteSessionTranscriptScopeForPath({
...options,
transcriptPath: options.transcriptPath,
});
if (byPath?.sessionId === sessionId) {
return byPath;
}
}
const database = openOpenClawStateDatabase(options);
const row = database.db
.prepare(
`
SELECT agent_id, session_id
FROM transcript_events
WHERE session_id = ?
GROUP BY agent_id, session_id
ORDER BY MAX(created_at) DESC, agent_id ASC
LIMIT 1
`,
)
.get(sessionId) as { agent_id?: unknown; session_id?: unknown } | undefined;
if (typeof row?.agent_id !== "string" || typeof row.session_id !== "string") {
return undefined;
}
return {
agentId: normalizeAgentId(row.agent_id),
sessionId: normalizeSessionId(row.session_id),
};
}
export function listSqliteSessionTranscriptFiles(
options: OpenClawStateDatabaseOptions = {},
): SqliteSessionTranscriptFile[] {
@@ -195,6 +246,66 @@ export function listSqliteSessionTranscriptFiles(
});
}
export function listSqliteSessionTranscripts(
options: OpenClawStateDatabaseOptions & { agentId?: string } = {},
): SqliteSessionTranscript[] {
const agentId = options.agentId ? normalizeAgentId(options.agentId) : undefined;
const database = openOpenClawStateDatabase(options);
return database.db
.prepare(
`
SELECT
events.agent_id,
events.session_id,
MAX(events.created_at) AS updated_at,
COUNT(*) AS event_count,
(
SELECT files.path
FROM transcript_files files
WHERE files.agent_id = events.agent_id
AND files.session_id = events.session_id
ORDER BY COALESCE(files.imported_at, files.exported_at, 0) DESC, files.path ASC
LIMIT 1
) AS path
FROM transcript_events events
WHERE (? IS NULL OR events.agent_id = ?)
GROUP BY events.agent_id, events.session_id
ORDER BY updated_at DESC, events.session_id ASC
`,
)
.all(agentId ?? null, agentId ?? null)
.flatMap((row) => {
const record = row as {
agent_id?: unknown;
session_id?: unknown;
path?: unknown;
updated_at?: unknown;
event_count?: unknown;
};
if (typeof record.agent_id !== "string" || typeof record.session_id !== "string") {
return [];
}
const updatedAt =
typeof record.updated_at === "bigint"
? Number(record.updated_at)
: Number(record.updated_at ?? 0);
const eventCount =
typeof record.event_count === "bigint"
? Number(record.event_count)
: Number(record.event_count ?? 0);
const path = typeof record.path === "string" ? record.path : undefined;
return [
{
agentId: normalizeAgentId(record.agent_id),
sessionId: normalizeSessionId(record.session_id),
path,
updatedAt: Number.isFinite(updatedAt) ? updatedAt : 0,
eventCount: Number.isFinite(eventCount) ? eventCount : 0,
},
];
});
}
export function appendSqliteSessionTranscriptEvent(
options: AppendSqliteSessionTranscriptEventOptions,
): { seq: number } {
@@ -316,6 +427,21 @@ export function hasSqliteSessionTranscriptEvents(
return row?.found !== undefined;
}
export function deleteSqliteSessionTranscript(
options: SqliteSessionTranscriptStoreOptions,
): boolean {
const { agentId, sessionId } = normalizeTranscriptScope(options);
return runOpenClawStateWriteTransaction((database) => {
const events = database.db
.prepare("DELETE FROM transcript_events WHERE agent_id = ? AND session_id = ?")
.run(agentId, sessionId);
database.db
.prepare("DELETE FROM transcript_files WHERE agent_id = ? AND session_id = ?")
.run(agentId, sessionId);
return Number(events.changes ?? 0) > 0;
}, options);
}
export function exportSqliteSessionTranscriptJsonl(
options: ExportSqliteTranscriptJsonlOptions,
): string {

View File

@@ -232,11 +232,6 @@ export type SessionMaintenanceConfig = {
maxEntries?: number;
/** @deprecated Ignored. Run `openclaw doctor --fix` to remove. */
rotateBytes?: number | string;
/**
* Retention for archived reset transcripts (`*.reset.<timestamp>`).
* Set `false` to disable reset-archive cleanup. Default: same as `pruneAfter` (30d).
*/
resetArchiveRetention?: string | number | false;
/**
* Optional per-agent sessions-directory disk budget (e.g. "500mb").
* When exceeded, warn (mode=warn) or enforce oldest-first cleanup (mode=enforce).

View File

@@ -26,7 +26,6 @@ describe("SessionSchema maintenance extensions", () => {
expect(
SessionSchema.safeParse({
maintenance: {
resetArchiveRetention: "14d",
maxDiskBytes: "500mb",
highWaterBytes: "350mb",
},
@@ -34,25 +33,7 @@ describe("SessionSchema maintenance extensions", () => {
).toMatchObject({ success: true });
});
it("accepts disabling reset archive cleanup", () => {
expect(
SessionSchema.safeParse({
maintenance: {
resetArchiveRetention: false,
},
}),
).toMatchObject({ success: true });
});
it("rejects invalid maintenance extension values", () => {
expect(() =>
SessionSchema.parse({
maintenance: {
resetArchiveRetention: "never",
},
}),
).toThrow(/resetArchiveRetention|duration/i);
expect(() =>
SessionSchema.parse({
maintenance: {

View File

@@ -86,7 +86,6 @@ export const SessionSchema = z
pruneDays: z.number().int().positive().optional(),
maxEntries: z.number().int().positive().optional(),
rotateBytes: z.union([z.string(), z.number()]).optional(),
resetArchiveRetention: z.union([z.string(), z.number(), z.literal(false)]).optional(),
maxDiskBytes: z.union([z.string(), z.number()]).optional(),
highWaterBytes: z.union([z.string(), z.number()]).optional(),
})
@@ -105,19 +104,6 @@ export const SessionSchema = z
});
}
}
if (val.resetArchiveRetention !== undefined && val.resetArchiveRetention !== false) {
try {
parseDurationMs(normalizeStringifiedOptionalString(val.resetArchiveRetention) ?? "", {
defaultUnit: "d",
});
} catch {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["resetArchiveRetention"],
message: "invalid duration (use ms, s, m, h, d)",
});
}
}
if (val.maxDiskBytes !== undefined) {
try {
parseByteSize(normalizeStringifiedOptionalString(val.maxDiskBytes) ?? "", {

View File

@@ -1,4 +1,5 @@
import { createHash } from "node:crypto";
import { resolveAgentIdFromSessionKey } from "../../routing/session-key.js";
import { getTaskSessionLookupByIdForStatus } from "../../tasks/task-status-access.js";
import {
ErrorCodes,
@@ -328,6 +329,7 @@ async function loadArtifacts(
});
},
{
agentId: resolveAgentIdFromSessionKey(sessionKey),
mode: "full",
reason: "artifact query transcript scan",
},

View File

@@ -1,5 +1,4 @@
export {
archiveSessionTranscriptsForSessionDetailed,
cleanupSessionBeforeMutation,
emitGatewayBeforeResetPluginHook,
emitGatewaySessionEndPluginHook,

View File

@@ -1,5 +1,4 @@
import { randomUUID } from "node:crypto";
import fs from "node:fs";
import path from "node:path";
import { resolveModelAgentRuntimeMetadata } from "../../agents/agent-runtime-metadata.js";
import {
@@ -29,6 +28,7 @@ import {
import { resolveAgentMainSessionKey } from "../../config/sessions/main-session.js";
import {
appendSqliteSessionTranscriptEvent,
deleteSqliteSessionTranscript,
hasSqliteSessionTranscriptEvents,
replaceSqliteSessionTranscriptEvents,
} from "../../config/sessions/transcript-store.sqlite.js";
@@ -91,7 +91,6 @@ import {
} from "../session-compaction-checkpoints.js";
import { reactivateCompletedSubagentSession } from "../session-subagent-reactivation.js";
import {
archiveFileOnDisk,
buildGatewaySessionRow,
listSessionsFromStoreAsync,
loadCombinedSessionStoreForGateway,
@@ -631,7 +630,12 @@ async function handleSessionSend(params: {
}
const messageSeq =
(await readSessionMessageCountAsync(entry.sessionId, storePath, entry.sessionFile)) + 1;
(await readSessionMessageCountAsync(
entry.sessionId,
storePath,
entry.sessionFile,
resolveAgentIdFromSessionKey(canonicalKey),
)) + 1;
let sendAcked = false;
let sendPayload: unknown;
let sendCached = false;
@@ -1256,6 +1260,7 @@ export const sessionsHandlers: GatewayRequestHandlers = {
createdEntry.sessionId,
target.storePath,
createdEntry.sessionFile,
target.agentId,
)) + 1
: undefined;
@@ -1373,7 +1378,7 @@ export const sessionsHandlers: GatewayRequestHandlers = {
return;
}
const checkpoint = getSessionCompactionCheckpoint({ entry, checkpointId });
if (!checkpoint?.preCompaction.sessionFile) {
if (!checkpoint?.preCompaction.sessionId) {
respond(
false,
undefined,
@@ -1382,8 +1387,10 @@ export const sessionsHandlers: GatewayRequestHandlers = {
return;
}
const branchedSession = await forkCompactionCheckpointTranscriptAsync({
agentId: target.agentId,
sourceFile: checkpoint.preCompaction.sessionFile,
sessionDir: path.dirname(checkpoint.preCompaction.sessionFile),
sourceSessionId: checkpoint.preCompaction.sessionId,
sessionDir: entry.sessionFile ? path.dirname(entry.sessionFile) : undefined,
});
if (!branchedSession?.sessionFile) {
respond(
@@ -1472,7 +1479,7 @@ export const sessionsHandlers: GatewayRequestHandlers = {
return;
}
const checkpoint = getSessionCompactionCheckpoint({ entry, checkpointId });
if (!checkpoint?.preCompaction.sessionFile) {
if (!checkpoint?.preCompaction.sessionId) {
respond(
false,
undefined,
@@ -1494,9 +1501,12 @@ export const sessionsHandlers: GatewayRequestHandlers = {
return;
}
const target = resolveGatewaySessionStoreTarget({ cfg: loaded.cfg, key: canonicalKey });
const restoredSession = await forkCompactionCheckpointTranscriptAsync({
agentId: target.agentId,
sourceFile: checkpoint.preCompaction.sessionFile,
sessionDir: path.dirname(checkpoint.preCompaction.sessionFile),
sourceSessionId: checkpoint.preCompaction.sessionId,
sessionDir: entry.sessionFile ? path.dirname(entry.sessionFile) : undefined,
});
if (!restoredSession?.sessionFile) {
respond(
@@ -1872,7 +1882,6 @@ export const sessionsHandlers: GatewayRequestHandlers = {
const deleteTranscript = typeof p.deleteTranscript === "boolean" ? p.deleteTranscript : true;
const {
archiveSessionTranscriptsForSessionDetailed,
cleanupSessionBeforeMutation,
emitGatewaySessionEndPluginHook,
emitSessionUnboundLifecycleEvent,
@@ -1905,17 +1914,12 @@ export const sessionsHandlers: GatewayRequestHandlers = {
return hadEntry;
});
const archivedTranscripts =
deleted && deleteTranscript
? archiveSessionTranscriptsForSessionDetailed({
sessionId,
storePath,
sessionFile: entry?.sessionFile,
agentId: target.agentId,
reason: "deleted",
})
: [];
const archived = archivedTranscripts.map((entry) => entry.archivedPath);
if (deleted && deleteTranscript && sessionId) {
deleteSqliteSessionTranscript({
agentId: target.agentId,
sessionId,
});
}
if (deleted) {
emitGatewaySessionEndPluginHook({
cfg,
@@ -1925,7 +1929,6 @@ export const sessionsHandlers: GatewayRequestHandlers = {
sessionFile: entry?.sessionFile,
agentId: target.agentId,
reason: "deleted",
archivedTranscripts,
});
const emitLifecycleHooks = p.emitLifecycleHooks !== false;
await emitSessionUnboundLifecycleEvent({
@@ -1935,7 +1938,7 @@ export const sessionsHandlers: GatewayRequestHandlers = {
});
}
respond(true, { ok: true, key: target.canonicalKey, deleted, archived }, undefined);
respond(true, { ok: true, key: target.canonicalKey, deleted, archived: [] }, undefined);
if (deleted) {
emitSessionsChanged(context, {
sessionKey: target.canonicalKey,
@@ -2019,13 +2022,19 @@ export const sessionsHandlers: GatewayRequestHandlers = {
return;
}
const filePath = resolveSessionTranscriptCandidates(
const transcriptPath = resolveSessionTranscriptCandidates(
sessionId,
storePath,
entry?.sessionFile,
target.agentId,
).find((candidate) => fs.existsSync(candidate));
if (!filePath) {
)[0];
if (
!transcriptPath ||
!hasSqliteSessionTranscriptEvents({
agentId: target.agentId,
sessionId,
})
) {
respond(
true,
{
@@ -2062,7 +2071,7 @@ export const sessionsHandlers: GatewayRequestHandlers = {
sessionId,
sessionKey: target.canonicalKey,
allowGatewaySubagentBinding: true,
sessionFile: filePath,
sessionFile: transcriptPath,
workspaceDir,
config: cfg,
provider: resolvedModel.provider,
@@ -2152,11 +2161,10 @@ export const sessionsHandlers: GatewayRequestHandlers = {
return;
}
const archived = fs.existsSync(filePath) ? archiveFileOnDisk(filePath, "bak") : undefined;
replaceSqliteSessionTranscriptEvents({
agentId: target.agentId,
sessionId,
transcriptPath: filePath,
transcriptPath,
events: lines.map((line) => JSON.parse(line) as unknown),
});
@@ -2179,7 +2187,6 @@ export const sessionsHandlers: GatewayRequestHandlers = {
ok: true,
key: target.canonicalKey,
compacted: true,
archived,
kept: lines.length,
},
undefined,

View File

@@ -1,3 +1,4 @@
import { resolveAgentIdFromSessionKey } from "../routing/session-key.js";
import type { SessionLifecycleEvent } from "../sessions/session-lifecycle-events.js";
import type { SessionTranscriptUpdate } from "../sessions/transcript-events.js";
import { projectChatDisplayMessage } from "./chat-display-projection.js";
@@ -119,8 +120,9 @@ async function handleTranscriptUpdateBroadcast(
return;
}
const { entry, storePath } = loadSessionEntry(sessionKey);
const agentId = resolveAgentIdFromSessionKey(sessionKey);
const messageSeq = entry?.sessionId
? await readSessionMessageCountAsync(entry.sessionId, storePath, entry.sessionFile)
? await readSessionMessageCountAsync(entry.sessionId, storePath, entry.sessionFile, agentId)
: undefined;
const sessionSnapshot = buildGatewaySessionSnapshot({
sessionRow: loadGatewaySessionRow(sessionKey, { transcriptUsageMaxBytes: 64 * 1024 }),

View File

@@ -2,6 +2,9 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { expect, test, vi } from "vitest";
import { loadSessionStore, saveSessionStore } from "../config/sessions.js";
import { replaceSqliteSessionTranscriptEvents } from "../config/sessions/transcript-store.sqlite.js";
import { DEFAULT_AGENT_ID } from "../routing/session-key.js";
import { withEnvAsync } from "../test-utils/env.js";
import {
embeddedRunMock,
@@ -25,36 +28,34 @@ test("sessions.compaction.* lists checkpoints and branches or restores from pre-
const fixture = await createCheckpointFixture(dir);
const checkpointCreatedAt = Date.now();
const { SessionManager } = await getSessionManagerModule();
await writeSessionStore({
entries: {
main: sessionStoreEntry(fixture.sessionId, {
sessionFile: fixture.sessionFile,
compactionCheckpoints: [
{
checkpointId: "checkpoint-1",
sessionKey: "agent:main:main",
sessionId: fixture.sessionId,
createdAt: checkpointCreatedAt,
reason: "manual",
tokensBefore: 123,
tokensAfter: 45,
summary: "checkpoint summary",
firstKeptEntryId: fixture.preCompactionLeafId,
preCompaction: {
sessionId: fixture.preCompactionSession.getSessionId(),
sessionFile: fixture.preCompactionSessionFile,
leafId: fixture.preCompactionLeafId,
},
postCompaction: {
sessionId: fixture.sessionId,
sessionFile: fixture.sessionFile,
leafId: fixture.postCompactionLeafId,
entryId: fixture.postCompactionLeafId,
},
await saveSessionStore(storePath, {
"agent:main:main": sessionStoreEntry(fixture.sessionId, {
sessionFile: fixture.sessionFile,
compactionCheckpoints: [
{
checkpointId: "checkpoint-1",
sessionKey: "agent:main:main",
sessionId: fixture.sessionId,
createdAt: checkpointCreatedAt,
reason: "manual",
tokensBefore: 123,
tokensAfter: 45,
summary: "checkpoint summary",
firstKeptEntryId: fixture.preCompactionLeafId,
preCompaction: {
sessionId: fixture.preCompactionSession.getSessionId(),
sessionFile: fixture.preCompactionSessionFile,
leafId: fixture.preCompactionLeafId,
},
],
}),
},
postCompaction: {
sessionId: fixture.sessionId,
sessionFile: fixture.sessionFile,
leafId: fixture.postCompactionLeafId,
entryId: fixture.postCompactionLeafId,
},
},
],
}),
});
const { ws } = await openClient();
@@ -152,7 +153,7 @@ test("sessions.compaction.* lists checkpoints and branches or restores from pre-
fixture.preCompactionSession.getEntries().length,
);
const storeAfterBranch = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
const storeAfterBranch = loadSessionStore(storePath) as Record<
string,
{
parentSessionKey?: string;
@@ -205,7 +206,7 @@ test("sessions.compaction.* lists checkpoints and branches or restores from pre-
fixture.preCompactionSession.getEntries().length,
);
const storeAfterRestore = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
const storeAfterRestore = loadSessionStore(storePath) as Record<
string,
{ compactionCheckpoints?: unknown[]; sessionId?: string }
>;
@@ -217,18 +218,32 @@ test("sessions.compaction.* lists checkpoints and branches or restores from pre-
test("sessions.compact without maxLines runs embedded manual compaction for checkpoint-capable flows", async () => {
const { dir, storePath } = await createSessionStoreDir();
await fs.writeFile(
path.join(dir, "sess-main.jsonl"),
`${JSON.stringify({ role: "user", content: "hello" })}\n`,
"utf-8",
);
await writeSessionStore({
entries: {
main: sessionStoreEntry("sess-main", {
thinkingLevel: "medium",
reasoningLevel: "stream",
}),
},
replaceSqliteSessionTranscriptEvents({
agentId: DEFAULT_AGENT_ID,
sessionId: "sess-main",
transcriptPath: path.join(dir, "sess-main.jsonl"),
events: [
{
type: "session",
id: "sess-main",
timestamp: new Date().toISOString(),
cwd: dir,
},
{
type: "message",
id: "user-1",
parentId: null,
timestamp: new Date().toISOString(),
message: { role: "user", content: "hello", timestamp: Date.now() },
},
],
});
await saveSessionStore(storePath, {
"agent:main:main": sessionStoreEntry("sess-main", {
sessionFile: path.join(dir, "sess-main.jsonl"),
thinkingLevel: "medium",
reasoningLevel: "stream",
}),
});
const { ws } = await openClient();
@@ -259,7 +274,7 @@ test("sessions.compact without maxLines runs embedded manual compaction for chec
}),
);
const store = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
const store = loadSessionStore(storePath) as Record<
string,
{ compactionCount?: number; totalTokens?: number; totalTokensFresh?: boolean }
>;

View File

@@ -1,6 +1,8 @@
import fs from "node:fs/promises";
import path from "node:path";
import { expect, test } from "vitest";
import { loadSessionStore } from "../config/sessions.js";
import { replaceSqliteSessionTranscriptEvents } from "../config/sessions/transcript-store.sqlite.js";
import { embeddedRunMock, rpcReq, writeSessionStore } from "./test-helpers.js";
import {
setupGatewaySessionsTestHarness,
@@ -171,15 +173,18 @@ test("sessions.delete emits session_end with deleted reason and no replacement",
const { dir } = await createSessionStoreDir();
await writeSingleLineSession(dir, "sess-main", "hello");
const transcriptPath = path.join(dir, "sess-delete.jsonl");
await fs.writeFile(
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "sess-delete",
transcriptPath,
`${JSON.stringify({
type: "message",
id: "m-delete",
message: { role: "user", content: "delete me" },
})}\n`,
"utf-8",
);
events: [
{
type: "message",
id: "m-delete",
message: { role: "user", content: "delete me" },
},
],
});
await writeSessionStore({
entries: {
@@ -205,9 +210,8 @@ test("sessions.delete emits session_end with deleted reason and no replacement",
sessionId: "sess-delete",
sessionKey: "agent:main:discord:group:delete",
reason: "deleted",
transcriptArchived: true,
});
expect((event as { sessionFile?: string } | undefined)?.sessionFile).toContain(".jsonl.deleted.");
expect((event as { sessionFile?: string } | undefined)?.sessionFile).toBe(transcriptPath);
expect((event as { nextSessionId?: string } | undefined)?.nextSessionId).toBeUndefined();
expect(context).toMatchObject({
sessionId: "sess-delete",
@@ -339,15 +343,7 @@ test("sessions.delete returns unavailable when active run does not stop", async
);
expect(browserSessionTabMocks.closeTrackedBrowserTabsForSessions).not.toHaveBeenCalled();
const store = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
string,
{ sessionId?: string }
>;
const store = loadSessionStore(storePath);
expect(store["agent:main:discord:group:dev"]?.sessionId).toBe("sess-active");
const filesAfterDeleteAttempt = await fs.readdir(dir);
expect(filesAfterDeleteAttempt).not.toContainEqual(
expect.stringMatching(/^sess-active\.jsonl\.deleted\./),
);
ws.close();
});

View File

@@ -3,6 +3,7 @@ import os from "node:os";
import path from "node:path";
import { expect, test, vi } from "vitest";
import { WebSocket } from "ws";
import { loadSessionStore, saveSessionStore } from "../config/sessions.js";
import { isSessionPatchEvent } from "../hooks/internal-hooks.js";
import { GATEWAY_CLIENT_IDS, GATEWAY_CLIENT_MODES } from "./protocol/client-info.js";
import {
@@ -122,12 +123,10 @@ test("session:patch hook fires with correct context", async () => {
const storePath = path.join(dir, "sessions.json");
testState.sessionStorePath = storePath;
await writeSessionStore({
entries: {
main: sessionStoreEntry("sess-hook-test", {
label: "original-label",
}),
},
await saveSessionStore(storePath, {
"agent:main:main": sessionStoreEntry("sess-hook-test", {
label: "original-label",
}),
});
sessionHookMocks.triggerInternalHook.mockClear();
@@ -307,11 +306,9 @@ test("control-ui client can delete sessions even in webchat mode", async () => {
const storePath = path.join(dir, "sessions.json");
testState.sessionStorePath = storePath;
await writeSessionStore({
entries: {
main: sessionStoreEntry("sess-main"),
"discord:group:dev": sessionStoreEntry("sess-group"),
},
await saveSessionStore(storePath, {
"agent:main:main": sessionStoreEntry("sess-main"),
"agent:main:discord:group:dev": sessionStoreEntry("sess-group"),
});
const ws = new WebSocket(`ws://127.0.0.1:${getHarness().port}`, {
@@ -335,10 +332,7 @@ test("control-ui client can delete sessions even in webchat mode", async () => {
expect(deleted.ok).toBe(true);
expect(deleted.payload?.deleted).toBe(true);
const store = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
string,
{ sessionId?: string }
>;
const store = loadSessionStore(storePath) as Record<string, { sessionId?: string }>;
expect(store["agent:main:discord:group:dev"]).toBeUndefined();
ws.close();

View File

@@ -1,7 +1,11 @@
import fs from "node:fs/promises";
import path from "node:path";
import { expect, test } from "vitest";
import { appendSqliteSessionTranscriptEvent } from "../config/sessions/transcript-store.sqlite.js";
import { loadSessionStore } from "../config/sessions.js";
import {
appendSqliteSessionTranscriptEvent,
replaceSqliteSessionTranscriptEvents,
} from "../config/sessions/transcript-store.sqlite.js";
import { embeddedRunMock, testState, writeSessionStore } from "./test-helpers.js";
import {
setupGatewaySessionsTestHarness,
@@ -101,15 +105,18 @@ test("sessions.reset emits internal command hook with reason", async () => {
test("sessions.reset emits before_reset hook with transcript context", async () => {
const { dir } = await createSessionStoreDir();
const transcriptPath = path.join(dir, "sess-main.jsonl");
await fs.writeFile(
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "sess-main",
transcriptPath,
`${JSON.stringify({
type: "message",
id: "m1",
message: { role: "user", content: "hello from transcript" },
})}\n`,
"utf-8",
);
events: [
{
type: "message",
id: "m1",
message: { role: "user", content: "hello from transcript" },
},
],
});
await writeSessionStore({
entries: {
@@ -141,14 +148,17 @@ test("sessions.reset emits before_reset hook with transcript context", async ()
test("sessions.reset emits before_reset hook with scoped SQLite transcript context", async () => {
const { dir } = await createSessionStoreDir();
const transcriptPath = path.join(dir, "missing-sess-main.jsonl");
appendSqliteSessionTranscriptEvent({
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "sess-main-sqlite",
event: {
type: "message",
id: "m1",
message: { role: "user", content: "hello from sqlite transcript" },
},
transcriptPath,
events: [
{
type: "message",
id: "m1",
message: { role: "user", content: "hello from sqlite transcript" },
},
],
});
await writeSessionStore({
@@ -192,15 +202,18 @@ test("sessions.reset emits before_reset hook with scoped SQLite transcript conte
test("sessions.reset emits enriched session_end and session_start hooks", async () => {
const { dir } = await createSessionStoreDir();
const transcriptPath = path.join(dir, "sess-main.jsonl");
await fs.writeFile(
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "sess-main",
transcriptPath,
`${JSON.stringify({
type: "message",
id: "m1",
message: { role: "user", content: "hello from transcript" },
})}\n`,
"utf-8",
);
events: [
{
type: "message",
id: "m1",
message: { role: "user", content: "hello from transcript" },
},
],
});
await writeSessionStore({
entries: {
@@ -223,18 +236,29 @@ test("sessions.reset emits enriched session_end and session_start hooks", async
const [endEvent, endContext] = firstHookCall(sessionLifecycleHookMocks.runSessionEnd);
const [startEvent, startContext] = firstHookCall(sessionLifecycleHookMocks.runSessionStart);
expect(endEvent.sessionId).toBe("sess-main");
expect(endEvent.sessionKey).toBe("agent:main:main");
expect(endEvent.reason).toBe("new");
expect(endEvent.transcriptArchived).toBe(true);
expect(endEvent.sessionFile).toEqual(expect.stringContaining(".jsonl.reset."));
expect(endEvent.nextSessionId).toBe(startEvent.sessionId);
expectMainHookContext(endContext, "sess-main");
expect(startEvent.sessionKey).toBe("agent:main:main");
expect(startEvent.resumedFrom).toBe("sess-main");
expect(startContext.sessionId).toBe(startEvent.sessionId);
expect(startContext.sessionKey).toBe("agent:main:main");
expect(startContext.agentId).toBe("main");
expect(endEvent).toMatchObject({
sessionId: "sess-main",
sessionKey: "agent:main:main",
reason: "new",
});
expect((endEvent as { sessionFile?: string } | undefined)?.sessionFile).toBe(transcriptPath);
expect((endEvent as { nextSessionId?: string } | undefined)?.nextSessionId).toBe(
(startEvent as { sessionId?: string } | undefined)?.sessionId,
);
expect(endContext).toMatchObject({
sessionId: "sess-main",
sessionKey: "agent:main:main",
agentId: "main",
});
expect(startEvent).toMatchObject({
sessionKey: "agent:main:main",
resumedFrom: "sess-main",
});
expect(startContext).toMatchObject({
sessionId: (startEvent as { sessionId?: string } | undefined)?.sessionId,
sessionKey: "agent:main:main",
agentId: "main",
});
});
test("sessions.reset returns unavailable when active run does not stop", async () => {
@@ -259,39 +283,38 @@ test("sessions.reset returns unavailable when active run does not stop", async (
expect(waitCallCountAtSnapshotClear).toEqual([1]);
expect(browserSessionTabMocks.closeTrackedBrowserTabsForSessions).not.toHaveBeenCalled();
const store = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
string,
{ sessionId?: string }
>;
const store = loadSessionStore(storePath);
expect(store["agent:main:main"]?.sessionId).toBe("sess-main");
const filesAfterResetAttempt = await fs.readdir(dir);
expect(filesAfterResetAttempt).not.toContainEqual(
expect.stringMatching(/^sess-main\.jsonl\.reset\./),
);
});
test("sessions.reset emits before_reset for the entry actually reset in the writer slot", async () => {
const { dir } = await createSessionStoreDir();
const oldTranscriptPath = path.join(dir, "sess-old.jsonl");
const newTranscriptPath = path.join(dir, "sess-new.jsonl");
await fs.writeFile(
oldTranscriptPath,
`${JSON.stringify({
type: "message",
id: "m-old",
message: { role: "user", content: "old transcript" },
})}\n`,
"utf-8",
);
await fs.writeFile(
newTranscriptPath,
`${JSON.stringify({
type: "message",
id: "m-new",
message: { role: "user", content: "new transcript" },
})}\n`,
"utf-8",
);
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "sess-old",
transcriptPath: oldTranscriptPath,
events: [
{
type: "message",
id: "m-old",
message: { role: "user", content: "old transcript" },
},
],
});
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "sess-new",
transcriptPath: newTranscriptPath,
events: [
{
type: "message",
id: "m-new",
message: { role: "user", content: "new transcript" },
},
],
});
await writeSessionStore({
entries: {
@@ -374,15 +397,18 @@ test("sessions.create with emitCommandHooks=true fires command:new hook against
test("sessions.create with emitCommandHooks=true emits reset lifecycle hooks against parent (#76957)", async () => {
const { dir } = await createSessionStoreDir();
const transcriptPath = path.join(dir, "sess-parent-hooks.jsonl");
await fs.writeFile(
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "sess-parent-hooks",
transcriptPath,
`${JSON.stringify({
type: "message",
id: "m1",
message: { role: "user", content: "remember this before new" },
})}\n`,
"utf-8",
);
events: [
{
type: "message",
id: "m1",
message: { role: "user", content: "remember this before new" },
},
],
});
await writeSessionStore({
entries: {

View File

@@ -2,7 +2,12 @@ import fsSync from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import { expect, test, vi } from "vitest";
import { piSdkMock, rpcReq, testState, writeSessionStore } from "./test-helpers.js";
import { loadSessionStore } from "../config/sessions.js";
import {
loadSqliteSessionTranscriptEvents,
replaceSqliteSessionTranscriptEvents,
} from "../config/sessions/transcript-store.sqlite.js";
import { piSdkMock, rpcReq, writeSessionStore } from "./test-helpers.js";
import {
directSessionReq as directSessionHandlerReq,
setupGatewaySessionsTestHarness,
@@ -28,18 +33,18 @@ test("lists and patches session store via sessions.* RPC", async () => {
const recent = now - 30_000;
const stale = now - 15 * 60_000;
await fs.writeFile(
path.join(dir, "sess-main.jsonl"),
`${Array.from({ length: 10 })
.map((_, idx) => JSON.stringify({ role: "user", content: `line ${idx}` }))
.join("\n")}\n`,
"utf-8",
);
await fs.writeFile(
path.join(dir, "sess-group.jsonl"),
`${JSON.stringify({ role: "user", content: "group line 0" })}\n`,
"utf-8",
);
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "sess-main",
transcriptPath: path.join(dir, "sess-main.jsonl"),
events: Array.from({ length: 10 }, (_, idx) => ({ role: "user", content: `line ${idx}` })),
});
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: "sess-group",
transcriptPath: path.join(dir, "sess-group.jsonl"),
events: [{ role: "user", content: "group line 0" }],
});
await writeSessionStore({
entries: {
@@ -389,12 +394,9 @@ test("lists and patches session store via sessions.* RPC", async () => {
});
expect(compacted.ok).toBe(true);
expect(compacted.payload?.compacted).toBe(true);
const compactedLines = collectNonEmptyLines(
await fs.readFile(path.join(dir, "sess-main.jsonl"), "utf-8"),
);
expect(compactedLines).toHaveLength(3);
const filesAfterCompact = await fs.readdir(dir);
expect(filesAfterCompact).toContainEqual(expect.stringMatching(/^sess-main\.jsonl\.bak\./));
expect(
loadSqliteSessionTranscriptEvents({ agentId: "main", sessionId: "sess-main" }),
).toHaveLength(3);
const deleted = await directSessionReq<{ ok: true; deleted: boolean }>("sessions.delete", {
key: "agent:main:discord:group:dev",
@@ -405,11 +407,12 @@ test("lists and patches session store via sessions.* RPC", async () => {
sessions: Array<{ key: string }>;
}>("sessions.list", {});
expect(listAfterDelete.ok).toBe(true);
expect(listAfterDelete.payload?.sessions.map((session) => session.key)).not.toContain(
"agent:main:discord:group:dev",
expect(
listAfterDelete.payload?.sessions.some((s) => s.key === "agent:main:discord:group:dev"),
).toBe(false);
expect(loadSqliteSessionTranscriptEvents({ agentId: "main", sessionId: "sess-group" })).toEqual(
[],
);
const filesAfterDelete = await fs.readdir(dir);
expect(filesAfterDelete).toContainEqual(expect.stringMatching(/^sess-group\.jsonl\.deleted\./));
const reset = await directSessionReq<{
ok: true;
@@ -429,14 +432,12 @@ test("lists and patches session store via sessions.* RPC", async () => {
expect(reset.payload?.entry.model).toBe("gpt-test-a");
expect(reset.payload?.entry.lastAccountId).toBe("work");
expect(reset.payload?.entry.lastThreadId).toBe("1737500000.123456");
const storeAfterReset = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
string,
{ lastAccountId?: string; lastThreadId?: string | number }
>;
const storeAfterReset = loadSessionStore(storePath);
expect(storeAfterReset["agent:main:main"]?.lastAccountId).toBe("work");
expect(storeAfterReset["agent:main:main"]?.lastThreadId).toBe("1737500000.123456");
const filesAfterReset = await fs.readdir(dir);
expect(filesAfterReset).toContainEqual(expect.stringMatching(/^sess-main\.jsonl\.reset\./));
expect(loadSqliteSessionTranscriptEvents({ agentId: "main", sessionId: "sess-main" })).toEqual(
[],
);
const badThinking = await directSessionReq("sessions.patch", {
key: "agent:main:main",

View File

@@ -1,7 +0,0 @@
export {
archiveFileOnDisk,
archiveSessionTranscriptsDetailed,
archiveSessionTranscripts,
cleanupArchivedSessionTranscripts,
resolveStableSessionEndTranscript,
} from "./session-transcript-files.fs.js";

View File

@@ -1,35 +0,0 @@
import { importFreshModule } from "openclaw/plugin-sdk/test-fixtures";
import { describe, expect, it, vi } from "vitest";
describe("session archive runtime import guards", () => {
it.each([
{
label: "reply session module",
importPath: "../auto-reply/reply/session.js",
scope: "reply-session",
},
{
label: "session store module",
importPath: "../config/sessions/store.js",
scope: "session-store",
},
])("does not load archive runtime on module import for $label", async ({ importPath, scope }) => {
const archiveRuntimeLoads = vi.fn();
vi.doMock("./session-archive.runtime.js", async () => {
archiveRuntimeLoads();
return await vi.importActual<typeof import("./session-archive.runtime.js")>(
"./session-archive.runtime.js",
);
});
try {
await importFreshModule<typeof import("./session-archive.runtime.js")>(
import.meta.url,
`${importPath}?scope=no-archive-runtime-on-import-${scope}`,
);
expect(archiveRuntimeLoads).not.toHaveBeenCalled();
} finally {
vi.doUnmock("./session-archive.runtime.js");
}
});
});

View File

@@ -1,6 +0,0 @@
export {
archiveSessionTranscriptsDetailed,
archiveSessionTranscripts,
cleanupArchivedSessionTranscripts,
resolveStableSessionEndTranscript,
} from "./session-archive.fs.js";

View File

@@ -1,14 +1,18 @@
import fsSync from "node:fs";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, test, vi } from "vitest";
import type { AssistantMessage } from "../agents/pi-ai-contract.js";
import { SessionManager } from "../agents/transcript/session-transcript-contract.js";
import { loadSessionStore, saveSessionStore } from "../config/sessions.js";
import {
CURRENT_SESSION_VERSION,
SessionManager,
} from "../agents/transcript/session-transcript-contract.js";
exportSqliteSessionTranscriptJsonl,
hasSqliteSessionTranscriptEvents,
loadSqliteSessionTranscriptEvents,
replaceSqliteSessionTranscriptEvents,
} from "../config/sessions/transcript-store.sqlite.js";
import type { OpenClawConfig } from "../config/types.openclaw.js";
import { DEFAULT_AGENT_ID } from "../routing/session-key.js";
import {
captureCompactionCheckpointSnapshotAsync,
cleanupCompactionCheckpointSnapshot,
@@ -20,32 +24,11 @@ import {
const tempDirs: string[] = [];
function requireNonEmptyString(value: string | null | undefined, message: string): string {
if (!value) {
throw new Error(message);
}
return value;
}
function requireRecord(value: unknown, message: string): Record<string, unknown> {
if (!value || typeof value !== "object") {
throw new Error(message);
}
return value as Record<string, unknown>;
}
function expectRecordFields(value: unknown, expected: Record<string, unknown>): void {
const record = requireRecord(value, "expected record");
for (const [key, expectedValue] of Object.entries(expected)) {
expect(record[key]).toEqual(expectedValue);
}
}
function expectNonEmptyStringField(value: unknown, message: string): string {
if (typeof value !== "string" || value.length === 0) {
throw new Error(message);
}
return value;
function readSqliteTranscriptEvents(sessionId: string): Record<string, unknown>[] {
return loadSqliteSessionTranscriptEvents({
agentId: DEFAULT_AGENT_ID,
sessionId,
}).map((entry) => entry.event as Record<string, unknown>);
}
afterEach(async () => {
@@ -53,7 +36,7 @@ afterEach(async () => {
});
describe("session-compaction-checkpoints", () => {
test("async capture stores the copied pre-compaction transcript without sync copy", async () => {
test("async capture stores the pre-compaction transcript in SQLite", async () => {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-checkpoint-async-"));
tempDirs.push(dir);
@@ -72,40 +55,59 @@ describe("session-compaction-checkpoints", () => {
timestamp: Date.now(),
} as AssistantMessage);
const sessionFile = requireNonEmptyString(session.getSessionFile(), "session file missing");
const leafId = requireNonEmptyString(session.getLeafId(), "session leaf id missing");
const sessionFile = session.getSessionFile();
const leafId = session.getLeafId();
expect(sessionFile).toBeTruthy();
expect(leafId).toBeTruthy();
const originalBefore = await fs.readFile(sessionFile, "utf-8");
const copyFileSyncSpy = vi.spyOn(fsSync, "copyFileSync");
const sessionManagerOpenSpy = vi.spyOn(SessionManager, "open");
const originalBefore = exportSqliteSessionTranscriptJsonl({
agentId: DEFAULT_AGENT_ID,
sessionId: session.getSessionId(),
});
try {
const snapshot = await captureCompactionCheckpointSnapshotAsync({
sessionManager: session,
sessionFile,
sessionFile: sessionFile!,
});
expect(copyFileSyncSpy).not.toHaveBeenCalled();
expect(sessionManagerOpenSpy).not.toHaveBeenCalled();
if (!snapshot) {
throw new Error("expected checkpoint snapshot");
}
expect(snapshot.leafId).toBe(leafId);
expect(snapshot.sessionFile).not.toBe(sessionFile);
expect(snapshot.sessionFile).toContain(".checkpoint.");
expect(fsSync.existsSync(snapshot.sessionFile)).toBe(true);
expect(await fs.readFile(snapshot.sessionFile, "utf-8")).toBe(originalBefore);
expect(snapshot).not.toBeNull();
expect(snapshot?.leafId).toBe(leafId);
expect(snapshot?.sessionFile).not.toBe(sessionFile);
expect(snapshot?.sessionFile).toContain(".checkpoint.");
const snapshotBefore = exportSqliteSessionTranscriptJsonl({
agentId: DEFAULT_AGENT_ID,
sessionId: snapshot!.sessionId,
});
expect(snapshotBefore).toContain("before async compaction");
expect(snapshotBefore).toContain("async working on it");
expect(snapshotBefore).not.toBe(originalBefore);
session.appendCompaction("checkpoint summary", leafId, 123, { ok: true });
session.appendCompaction("checkpoint summary", leafId!, 123, { ok: true });
expect(await fs.readFile(snapshot.sessionFile, "utf-8")).toBe(originalBefore);
expect(await fs.readFile(sessionFile, "utf-8")).not.toBe(originalBefore);
expect(
exportSqliteSessionTranscriptJsonl({
agentId: DEFAULT_AGENT_ID,
sessionId: snapshot!.sessionId,
}),
).toBe(snapshotBefore);
expect(
exportSqliteSessionTranscriptJsonl({
agentId: DEFAULT_AGENT_ID,
sessionId: session.getSessionId(),
}),
).not.toBe(originalBefore);
await cleanupCompactionCheckpointSnapshot(snapshot);
expect(fsSync.existsSync(snapshot.sessionFile)).toBe(false);
expect(fsSync.existsSync(sessionFile)).toBe(true);
expect(
hasSqliteSessionTranscriptEvents({
agentId: DEFAULT_AGENT_ID,
sessionId: snapshot!.sessionId,
}),
).toBe(true);
} finally {
copyFileSyncSpy.mockRestore();
sessionManagerOpenSpy.mockRestore();
}
});
@@ -129,32 +131,29 @@ describe("session-compaction-checkpoints", () => {
timestamp: Date.now(),
} as unknown as AssistantMessage);
const sessionFile = requireNonEmptyString(session.getSessionFile(), "session file missing");
const sessionId = requireNonEmptyString(session.getSessionId(), "session id missing");
const leafId = requireNonEmptyString(session.getLeafId(), "session leaf id missing");
await fs.appendFile(sessionFile, "\nnot-json\n", "utf-8");
const sessionFile = session.getSessionFile();
const sessionId = session.getSessionId();
const leafId = session.getLeafId();
expect(sessionFile).toBeTruthy();
expect(sessionId).toBeTruthy();
expect(leafId).toBeTruthy();
const copyFileSyncSpy = vi.spyOn(fsSync, "copyFileSync");
const sessionManagerOpenSpy = vi.spyOn(SessionManager, "open");
let snapshot: Awaited<ReturnType<typeof captureCompactionCheckpointSnapshotAsync>> = null;
try {
expect(await readSessionLeafIdFromTranscriptAsync(sessionFile)).toBe(leafId);
expect(await readSessionLeafIdFromTranscriptAsync(sessionFile!)).toBe(leafId);
snapshot = await captureCompactionCheckpointSnapshotAsync({
sessionFile,
sessionFile: sessionFile!,
});
expect(copyFileSyncSpy).not.toHaveBeenCalled();
expect(sessionManagerOpenSpy).not.toHaveBeenCalled();
if (!snapshot) {
throw new Error("expected checkpoint snapshot");
}
expect(snapshot.sessionId).toBe(sessionId);
expect(snapshot.leafId).toBe(leafId);
expect(snapshot.sessionFile).not.toBe(sessionFile);
expect(snapshot.sessionFile).toContain(".checkpoint.");
expect(snapshot).not.toBeNull();
expect(snapshot?.sessionId).not.toBe(sessionId);
expect(snapshot?.leafId).toBe(leafId);
expect(snapshot?.sessionFile).not.toBe(sessionFile);
expect(snapshot?.sessionFile).toContain(".checkpoint.");
} finally {
await cleanupCompactionCheckpointSnapshot(snapshot);
copyFileSyncSpy.mockRestore();
sessionManagerOpenSpy.mockRestore();
}
});
@@ -169,24 +168,17 @@ describe("session-compaction-checkpoints", () => {
content: "before compaction",
timestamp: Date.now(),
});
const sessionFile = requireNonEmptyString(session.getSessionFile(), "session file missing");
await fs.appendFile(sessionFile, "x".repeat(128), "utf-8");
const sessionFile = session.getSessionFile();
expect(sessionFile).toBeTruthy();
const copyFileSyncSpy = vi.spyOn(fsSync, "copyFileSync");
try {
const snapshot = await captureCompactionCheckpointSnapshotAsync({
sessionManager: session,
sessionFile,
maxBytes: 64,
});
const snapshot = await captureCompactionCheckpointSnapshotAsync({
sessionManager: session,
sessionFile: sessionFile!,
maxBytes: 64,
});
expect(snapshot).toBeNull();
expect(copyFileSyncSpy).not.toHaveBeenCalled();
expect(MAX_COMPACTION_CHECKPOINT_SNAPSHOT_BYTES).toBeGreaterThan(64);
expect(fsSync.readdirSync(dir).some((file) => file.includes(".checkpoint."))).toBe(false);
} finally {
copyFileSyncSpy.mockRestore();
}
expect(snapshot).toBeNull();
expect(MAX_COMPACTION_CHECKPOINT_SNAPSHOT_BYTES).toBeGreaterThan(64);
});
test("async fork creates a checkpoint branch transcript without SessionManager sync reads", async () => {
@@ -208,48 +200,34 @@ describe("session-compaction-checkpoints", () => {
timestamp: Date.now(),
} as unknown as AssistantMessage);
const sessionFile = requireNonEmptyString(session.getSessionFile(), "session file missing");
await fs.appendFile(sessionFile, "\nnot-json\n", "utf-8");
const sessionFile = session.getSessionFile();
expect(sessionFile).toBeTruthy();
const openSpy = vi.spyOn(SessionManager, "open");
const forkSpy = vi.spyOn(SessionManager, "forkFrom");
let forked: Awaited<ReturnType<typeof forkCompactionCheckpointTranscriptAsync>> = null;
try {
forked = await forkCompactionCheckpointTranscriptAsync({
sourceFile: sessionFile,
sourceFile: sessionFile!,
sessionDir: dir,
});
expect(openSpy).not.toHaveBeenCalled();
expect(forkSpy).not.toHaveBeenCalled();
if (!forked) {
throw new Error("expected forked checkpoint transcript");
}
expectNonEmptyStringField(forked.sessionFile, "expected forked session file");
expect(forked.sessionFile).not.toBe(sessionFile);
expect(forked.sessionId).toBeTypeOf("string");
expect(forked.sessionId).not.toBe("");
expect(forked).not.toBeNull();
expect(forked?.sessionFile).not.toBe(sessionFile);
expect(forked?.sessionId).toBeTruthy();
} finally {
openSpy.mockRestore();
forkSpy.mockRestore();
}
const forkedLines = (await fs.readFile(forked.sessionFile, "utf-8")).trim().split(/\r?\n/);
const forkedEntries = forkedLines.map((line) => JSON.parse(line) as Record<string, unknown>);
const sourceEntries = (await fs.readFile(sessionFile, "utf-8"))
.trim()
.split(/\r?\n/)
.flatMap((line) => {
try {
return [JSON.parse(line) as Record<string, unknown>];
} catch {
return [];
}
});
const forkedEntries = readSqliteTranscriptEvents(forked!.sessionId);
const sourceEntries = readSqliteTranscriptEvents(session.getSessionId());
expectRecordFields(forkedEntries[0], {
expect(forkedEntries[0]).toMatchObject({
type: "session",
id: forked.sessionId,
id: forked!.sessionId,
cwd: dir,
parentSession: sessionFile,
});
@@ -258,32 +236,11 @@ describe("session-compaction-checkpoints", () => {
);
});
test("async fork migrates legacy checkpoint snapshots before writing a current header", async () => {
test("async fork ignores legacy checkpoint files that doctor has not imported", async () => {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-checkpoint-legacy-fork-"));
tempDirs.push(dir);
const legacySessionFile = path.join(dir, "legacy.jsonl");
const firstMessage = {
type: "message",
timestamp: new Date(0).toISOString(),
message: {
role: "user",
content: "legacy first",
timestamp: 1,
},
};
const secondMessage = {
type: "message",
timestamp: new Date(1).toISOString(),
message: {
role: "assistant",
content: "legacy second",
api: "responses",
provider: "openai",
model: "gpt-test",
timestamp: 2,
},
};
await fs.writeFile(
legacySessionFile,
[
@@ -293,8 +250,6 @@ describe("session-compaction-checkpoints", () => {
timestamp: new Date(0).toISOString(),
cwd: dir,
}),
JSON.stringify(firstMessage),
JSON.stringify(secondMessage),
"",
].join("\n"),
"utf-8",
@@ -305,58 +260,31 @@ describe("session-compaction-checkpoints", () => {
sessionDir: dir,
});
if (!forked) {
throw new Error("expected forked checkpoint transcript");
}
expectNonEmptyStringField(forked.sessionFile, "expected forked session file");
const forkedEntries = (await fs.readFile(forked.sessionFile, "utf-8"))
.trim()
.split(/\r?\n/)
.map((line) => JSON.parse(line) as Record<string, unknown>);
expectRecordFields(forkedEntries[0], {
type: "session",
version: CURRENT_SESSION_VERSION,
id: forked.sessionId,
parentSession: legacySessionFile,
});
expectRecordFields(forkedEntries[1], {
type: "message",
parentId: null,
});
expect(requireRecord(forkedEntries[1]?.message, "first forked message").content).toBe(
"legacy first",
);
expect(forkedEntries[1]?.id).toBeTypeOf("string");
expect(forkedEntries[1]?.id).not.toBe("");
expectRecordFields(forkedEntries[2], {
type: "message",
parentId: forkedEntries[1]?.id,
});
expect(requireRecord(forkedEntries[2]?.message, "second forked message").content).toBe(
"legacy second",
);
expect(forkedEntries[2]?.id).toBeTypeOf("string");
expect(forkedEntries[2]?.id).not.toBe("");
const messages = SessionManager.open(forked.sessionFile, dir).buildSessionContext().messages;
expect(messages.map((message) => (message as { content?: unknown }).content)).toEqual([
"legacy first",
"legacy second",
]);
expect(forked).toBeNull();
});
test("persist trims old checkpoint metadata and removes trimmed snapshot files", async () => {
test("persist trims old checkpoint metadata and removes trimmed SQLite snapshots", async () => {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-checkpoint-trim-"));
tempDirs.push(dir);
const storePath = path.join(dir, "sessions.json");
const storePath = path.join(dir, ".openclaw", "agents", "main", "sessions", "sessions.json");
const sessionId = "sess";
const sessionKey = "agent:main:main";
const now = Date.now();
const existingCheckpoints = Array.from({ length: 26 }, (_, index) => {
const uuid = `${String(index + 1).padStart(8, "0")}-1111-4111-8111-111111111111`;
const sessionFile = path.join(dir, `sess.checkpoint.${uuid}.jsonl`);
fsSync.writeFileSync(sessionFile, `checkpoint ${index}`, "utf-8");
const checkpointSessionId = `checkpoint-session-${index}`;
replaceSqliteSessionTranscriptEvents({
agentId: DEFAULT_AGENT_ID,
sessionId: checkpointSessionId,
events: [
{
type: "session",
id: checkpointSessionId,
timestamp: new Date(now + index).toISOString(),
cwd: dir,
},
],
});
return {
checkpointId: `old-${index}`,
sessionKey,
@@ -364,62 +292,75 @@ describe("session-compaction-checkpoints", () => {
createdAt: now + index,
reason: "manual" as const,
preCompaction: {
sessionId,
sessionFile,
sessionId: checkpointSessionId,
leafId: `old-leaf-${index}`,
},
postCompaction: { sessionId },
};
});
await fs.writeFile(
storePath,
JSON.stringify(
{
[sessionKey]: {
sessionId,
updatedAt: now,
compactionCheckpoints: existingCheckpoints,
},
},
null,
2,
),
"utf-8",
);
await saveSessionStore(storePath, {
[sessionKey]: {
sessionId,
updatedAt: now,
compactionCheckpoints: existingCheckpoints,
},
});
const currentSnapshotFile = path.join(
dir,
"sess.checkpoint.99999999-9999-4999-8999-999999999999.jsonl",
);
await fs.writeFile(currentSnapshotFile, "current", "utf-8");
replaceSqliteSessionTranscriptEvents({
agentId: DEFAULT_AGENT_ID,
sessionId: "current-snapshot",
events: [
{
type: "session",
id: "current-snapshot",
timestamp: new Date(now + 100).toISOString(),
cwd: dir,
},
],
});
const stored = await persistSessionCompactionCheckpoint({
cfg: {
session: { store: storePath },
agents: { list: [{ id: "main", default: true }] },
} as OpenClawConfig,
sessionKey: "main",
sessionKey,
sessionId,
reason: "manual",
snapshot: {
sessionId,
sessionFile: currentSnapshotFile,
sessionId: "current-snapshot",
leafId: "current-leaf",
},
createdAt: now + 100,
});
expectRecordFields(stored?.preCompaction, {
sessionId,
sessionFile: currentSnapshotFile,
leafId: "current-leaf",
});
expect(fsSync.existsSync(existingCheckpoints[0].preCompaction.sessionFile)).toBe(false);
expect(fsSync.existsSync(existingCheckpoints[1].preCompaction.sessionFile)).toBe(false);
expect(fsSync.existsSync(existingCheckpoints[2].preCompaction.sessionFile)).toBe(true);
expect(fsSync.existsSync(currentSnapshotFile)).toBe(true);
expect(stored).not.toBeNull();
expect(
hasSqliteSessionTranscriptEvents({
agentId: DEFAULT_AGENT_ID,
sessionId: existingCheckpoints[0].preCompaction.sessionId,
}),
).toBe(false);
expect(
hasSqliteSessionTranscriptEvents({
agentId: DEFAULT_AGENT_ID,
sessionId: existingCheckpoints[1].preCompaction.sessionId,
}),
).toBe(false);
expect(
hasSqliteSessionTranscriptEvents({
agentId: DEFAULT_AGENT_ID,
sessionId: existingCheckpoints[2].preCompaction.sessionId,
}),
).toBe(true);
expect(
hasSqliteSessionTranscriptEvents({
agentId: DEFAULT_AGENT_ID,
sessionId: "current-snapshot",
}),
).toBe(true);
const nextStore = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
const nextStore = loadSessionStore(storePath) as Record<
string,
{ compactionCheckpoints?: unknown[] }
>;

View File

@@ -1,11 +1,11 @@
import { randomUUID } from "node:crypto";
import fs from "node:fs/promises";
import path from "node:path";
import {
CURRENT_SESSION_VERSION,
migrateSessionEntries,
SessionManager,
type FileEntry as PiSessionFileEntry,
type SessionHeader,
} from "../agents/transcript/session-transcript-contract.js";
import { updateSessionStore } from "../config/sessions.js";
import type {
@@ -13,8 +13,9 @@ import type {
SessionCompactionCheckpointReason,
SessionEntry,
} from "../config/sessions.js";
import { isCompactionCheckpointTranscriptFileName } from "../config/sessions/artifacts.js";
import {
deleteSqliteSessionTranscript,
loadSqliteSessionTranscriptEvents,
replaceSqliteSessionTranscriptEvents,
resolveSqliteSessionTranscriptScopeForPath,
} from "../config/sessions/transcript-store.sqlite.js";
@@ -29,7 +30,7 @@ export const MAX_COMPACTION_CHECKPOINT_SNAPSHOT_BYTES = 64 * 1024 * 1024;
export type CapturedCompactionCheckpointSnapshot = {
sessionId: string;
sessionFile: string;
sessionFile?: string;
leafId: string;
};
@@ -74,197 +75,113 @@ export function resolveSessionCompactionCheckpointReason(params: {
return "auto-threshold";
}
const SESSION_HEADER_READ_MAX_BYTES = 64 * 1024;
const SESSION_TAIL_READ_INITIAL_BYTES = 64 * 1024;
type AsyncTranscriptFileHandle = Awaited<ReturnType<typeof fs.open>>;
async function readFileRangeAsync(
fileHandle: AsyncTranscriptFileHandle,
position: number,
length: number,
): Promise<Buffer> {
const buffer = Buffer.alloc(length);
let offset = 0;
while (offset < length) {
const { bytesRead } = await fileHandle.read(buffer, offset, length - offset, position + offset);
if (bytesRead <= 0) {
break;
}
offset += bytesRead;
}
return offset === length ? buffer : buffer.subarray(0, offset);
}
async function readSessionHeaderFromTranscriptAsync(
sessionFile: string,
): Promise<{ id: string; cwd?: string } | null> {
let fileHandle: AsyncTranscriptFileHandle | undefined;
try {
fileHandle = await fs.open(sessionFile, "r");
const buffer = await readFileRangeAsync(fileHandle, 0, SESSION_HEADER_READ_MAX_BYTES);
if (buffer.length <= 0) {
return null;
}
const chunk = buffer.toString("utf-8");
const firstLine = chunk
.split(/\r?\n/)
.map((line) => line.trim())
.find((line) => line.length > 0);
if (!firstLine) {
return null;
}
const parsed = JSON.parse(firstLine) as { type?: unknown; id?: unknown; cwd?: unknown };
if (parsed.type !== "session" || typeof parsed.id !== "string" || !parsed.id.trim()) {
return null;
}
return {
id: parsed.id.trim(),
...(typeof parsed.cwd === "string" && parsed.cwd.trim() ? { cwd: parsed.cwd } : {}),
};
} catch {
function cloneTranscriptEvents(events: unknown[]): PiSessionFileEntry[] | null {
const entries = events.filter((event): event is PiSessionFileEntry =>
Boolean(event && typeof event === "object"),
);
const firstEntry = entries[0] as { type?: unknown; id?: unknown } | undefined;
if (firstEntry?.type !== "session" || typeof firstEntry.id !== "string") {
return null;
} finally {
if (fileHandle) {
await fileHandle.close().catch(() => undefined);
}
}
return structuredClone(entries);
}
async function readSessionIdFromTranscriptHeaderAsync(sessionFile: string): Promise<string | null> {
return (await readSessionHeaderFromTranscriptAsync(sessionFile))?.id ?? null;
}
function parseTranscriptLineId(
line: string,
): { kind: "session" } | { kind: "entry"; id: string } | null {
try {
const parsed = JSON.parse(line) as { type?: unknown; id?: unknown };
if (parsed.type === "session") {
return { kind: "session" };
}
if (typeof parsed.id === "string" && parsed.id.trim()) {
return { kind: "entry", id: parsed.id.trim() };
}
} catch {
function loadTranscriptEntriesFromSqlite(params: {
agentId?: string;
sessionId?: string;
sessionFile?: string;
}): PiSessionFileEntry[] | null {
let agentId = params.agentId?.trim() || DEFAULT_AGENT_ID;
let sessionId = params.sessionId?.trim();
if (!sessionId && params.sessionFile?.trim()) {
const scope = resolveSqliteSessionTranscriptScopeForPath({
transcriptPath: params.sessionFile,
});
agentId = scope?.agentId ?? agentId;
sessionId = scope?.sessionId;
}
if (!sessionId) {
return null;
}
return cloneTranscriptEvents(
loadSqliteSessionTranscriptEvents({
agentId,
sessionId,
}).map((entry) => entry.event),
);
}
function transcriptEventsByteLength(events: readonly PiSessionFileEntry[]): number {
let total = 0;
for (const event of events) {
total += Buffer.byteLength(`${JSON.stringify(event)}\n`, "utf8");
}
return total;
}
function latestEntryId(entries: readonly PiSessionFileEntry[]): string | null {
for (let index = entries.length - 1; index >= 0; index -= 1) {
const entry = entries[index] as { type?: unknown; id?: unknown } | undefined;
if (entry?.type === "session") {
return null;
}
if (typeof entry?.id === "string" && entry.id.trim()) {
return entry.id.trim();
}
}
return null;
}
async function readTranscriptEntriesForForkAsync(
sessionFile: string,
): Promise<PiSessionFileEntry[] | null> {
let fileHandle: AsyncTranscriptFileHandle | undefined;
try {
fileHandle = await fs.open(sessionFile, "r");
const content = await fileHandle.readFile("utf-8");
const entries: PiSessionFileEntry[] = [];
for (const line of content.trim().split(/\r?\n/)) {
const trimmed = line.trim();
if (!trimmed) {
continue;
}
try {
entries.push(JSON.parse(trimmed) as PiSessionFileEntry);
} catch {
// Match pi-coding-agent's loader: malformed JSONL entries are ignored.
}
}
const firstEntry = entries[0] as { type?: unknown; id?: unknown } | undefined;
if (firstEntry?.type !== "session" || typeof firstEntry.id !== "string") {
return null;
}
return entries;
} catch {
return null;
} finally {
if (fileHandle) {
await fileHandle.close().catch(() => undefined);
}
function createCheckpointVirtualTranscriptPath(params: {
sourceFile?: string;
checkpointId: string;
}): string | undefined {
const sourceFile = params.sourceFile?.trim();
if (!sourceFile) {
return undefined;
}
const parsed = path.parse(sourceFile);
return path.join(
parsed.dir,
`${parsed.name}.checkpoint.${params.checkpointId}${parsed.ext || ".jsonl"}`,
);
}
export async function readSessionLeafIdFromTranscriptAsync(
sessionFile: string,
maxBytes = MAX_COMPACTION_CHECKPOINT_SNAPSHOT_BYTES,
): Promise<string | null> {
let fileHandle: AsyncTranscriptFileHandle | undefined;
try {
fileHandle = await fs.open(sessionFile, "r");
const stat = await fileHandle.stat();
if (!stat.isFile() || stat.size <= 0) {
return null;
}
const requestedMaxBytes = Number.isFinite(maxBytes)
? Math.max(1024, Math.floor(maxBytes))
: MAX_COMPACTION_CHECKPOINT_SNAPSHOT_BYTES;
const maxReadableBytes = Math.min(stat.size, requestedMaxBytes);
let readLength = Math.min(maxReadableBytes, SESSION_TAIL_READ_INITIAL_BYTES);
while (readLength > 0) {
const readStart = Math.max(0, stat.size - readLength);
const buffer = await readFileRangeAsync(fileHandle, readStart, readLength);
const lines = buffer.toString("utf-8").split(/\r?\n/);
// If we did not read from the beginning, the first line may be a suffix of
// a larger JSONL entry. Ignore it and grow the window if no complete entry
// is found.
const candidateLines = readStart > 0 ? lines.slice(1) : lines;
for (let i = candidateLines.length - 1; i >= 0; i -= 1) {
const line = candidateLines[i]?.trim();
if (!line) {
continue;
}
const parsed = parseTranscriptLineId(line);
if (!parsed) {
continue;
}
if (parsed.kind === "session") {
return null;
}
return parsed.id;
}
if (readStart === 0) {
return null;
}
const nextReadLength = Math.min(maxReadableBytes, readLength * 2);
if (nextReadLength === readLength) {
return null;
}
readLength = nextReadLength;
}
} catch {
const entries = loadTranscriptEntriesFromSqlite({ sessionFile });
if (!entries || transcriptEventsByteLength(entries) > maxBytes) {
return null;
} finally {
if (fileHandle) {
await fileHandle.close().catch(() => undefined);
}
}
return null;
return latestEntryId(entries);
}
export async function forkCompactionCheckpointTranscriptAsync(params: {
sourceFile: string;
sourceFile?: string;
sourceSessionId?: string;
agentId?: string;
targetCwd?: string;
sessionDir?: string;
}): Promise<ForkedCompactionCheckpointTranscript | null> {
const sourceFile = params.sourceFile.trim();
if (!sourceFile) {
return null;
}
const sourceHeader = await readSessionHeaderFromTranscriptAsync(sourceFile);
if (!sourceHeader) {
return null;
}
const entries = await readTranscriptEntriesForForkAsync(sourceFile);
const sourceFile = params.sourceFile?.trim();
const entries = loadTranscriptEntriesFromSqlite({
agentId: params.agentId,
sessionId: params.sourceSessionId,
sessionFile: sourceFile,
});
if (!entries) {
return null;
}
const sourceHeader = entries[0] as SessionHeader | undefined;
if (!sourceHeader) {
return null;
}
migrateSessionEntries(entries);
const targetCwd = params.targetCwd ?? sourceHeader.cwd ?? process.cwd();
const sessionDir = params.sessionDir ?? path.dirname(sourceFile);
const sessionDir = params.sessionDir ?? (sourceFile ? path.dirname(sourceFile) : process.cwd());
const sessionId = randomUUID();
const timestamp = new Date().toISOString();
const fileTimestamp = timestamp.replace(/[:.]/g, "-");
@@ -275,13 +192,12 @@ export async function forkCompactionCheckpointTranscriptAsync(params: {
id: sessionId,
timestamp,
cwd: targetCwd,
parentSession: sourceFile,
...(sourceFile ? { parentSession: sourceFile } : {}),
};
try {
const sourceScope = resolveSqliteSessionTranscriptScopeForPath({ transcriptPath: sourceFile });
replaceSqliteSessionTranscriptEvents({
agentId: sourceScope?.agentId ?? DEFAULT_AGENT_ID,
agentId: params.agentId?.trim() || DEFAULT_AGENT_ID,
sessionId,
transcriptPath: sessionFile,
events: [
@@ -300,7 +216,8 @@ export async function forkCompactionCheckpointTranscriptAsync(params: {
* Gateway event loop on synchronous file reads/copies.
*/
export async function captureCompactionCheckpointSnapshotAsync(params: {
sessionManager?: Pick<SessionManager, "getLeafId">;
agentId?: string;
sessionManager?: Pick<SessionManager, "getEntries" | "getHeader" | "getLeafId">;
sessionFile: string;
maxBytes?: number;
}): Promise<CapturedCompactionCheckpointSnapshot | null> {
@@ -317,36 +234,45 @@ export async function captureCompactionCheckpointSnapshotAsync(params: {
return null;
}
const maxBytes = params.maxBytes ?? MAX_COMPACTION_CHECKPOINT_SNAPSHOT_BYTES;
try {
const stat = await fs.stat(sessionFile);
if (!stat.isFile() || stat.size > maxBytes) {
return null;
}
} catch {
const entries = params.sessionManager
? cloneTranscriptEvents([
params.sessionManager.getHeader(),
...params.sessionManager.getEntries(),
])
: loadTranscriptEntriesFromSqlite({
agentId: params.agentId,
sessionFile,
});
if (!entries || transcriptEventsByteLength(entries) > maxBytes) {
return null;
}
const parsedSessionFile = path.parse(sessionFile);
const snapshotFile = path.join(
parsedSessionFile.dir,
`${parsedSessionFile.name}.checkpoint.${randomUUID()}${parsedSessionFile.ext || ".jsonl"}`,
);
try {
await fs.copyFile(sessionFile, snapshotFile);
} catch {
return null;
}
const sessionId = await readSessionIdFromTranscriptHeaderAsync(snapshotFile);
const leafId = liveLeafId ?? (await readSessionLeafIdFromTranscriptAsync(snapshotFile, maxBytes));
if (!sessionId || !leafId) {
try {
await fs.unlink(snapshotFile);
} catch {
// Best-effort cleanup if the copied transcript cannot be validated.
}
const sourceHeader = entries[0] as SessionHeader | undefined;
const leafId = liveLeafId ?? latestEntryId(entries);
if (!sourceHeader?.id || !leafId) {
return null;
}
const snapshotSessionId = randomUUID();
const snapshotFile = createCheckpointVirtualTranscriptPath({
sourceFile: sessionFile,
checkpointId: snapshotSessionId,
});
const snapshotHeader: SessionHeader = {
...sourceHeader,
id: snapshotSessionId,
timestamp: new Date().toISOString(),
parentSession: sessionFile,
};
replaceSqliteSessionTranscriptEvents({
agentId: params.agentId?.trim() || DEFAULT_AGENT_ID,
sessionId: snapshotSessionId,
transcriptPath: snapshotFile,
events: [
snapshotHeader,
...entries.filter((entry) => (entry as { type?: unknown }).type !== "session"),
],
});
return {
sessionId,
sessionId: snapshotSessionId,
sessionFile: snapshotFile,
leafId,
};
@@ -355,48 +281,7 @@ export async function captureCompactionCheckpointSnapshotAsync(params: {
export async function cleanupCompactionCheckpointSnapshot(
snapshot: CapturedCompactionCheckpointSnapshot | null | undefined,
): Promise<void> {
if (!snapshot?.sessionFile) {
return;
}
try {
await fs.unlink(snapshot.sessionFile);
} catch {
// Best-effort cleanup; retained snapshots are harmless and easier to debug.
}
}
async function cleanupTrimmedCompactionCheckpointFiles(params: {
removed: SessionCompactionCheckpoint[];
retained: SessionCompactionCheckpoint[] | undefined;
currentSnapshotFile: string;
}): Promise<void> {
if (params.removed.length === 0) {
return;
}
const retainedPaths = new Set(
(params.retained ?? [])
.map((checkpoint) => checkpoint.preCompaction.sessionFile?.trim())
.filter((filePath): filePath is string => Boolean(filePath)),
);
const snapshotDir = path.resolve(path.dirname(params.currentSnapshotFile));
for (const checkpoint of params.removed) {
const sessionFile = checkpoint.preCompaction.sessionFile?.trim();
if (!sessionFile || retainedPaths.has(sessionFile)) {
continue;
}
const resolvedSessionFile = path.resolve(sessionFile);
if (
path.dirname(resolvedSessionFile) !== snapshotDir ||
!isCompactionCheckpointTranscriptFileName(path.basename(resolvedSessionFile))
) {
continue;
}
try {
await fs.unlink(resolvedSessionFile);
} catch {
// Best-effort cleanup; disk budget can still collect old checkpoint artifacts.
}
}
void snapshot;
}
export async function persistSessionCompactionCheckpoint(params: {
@@ -433,7 +318,9 @@ export async function persistSessionCompactionCheckpoint(params: {
: {}),
preCompaction: {
sessionId: params.snapshot.sessionId,
sessionFile: params.snapshot.sessionFile,
...(params.snapshot.sessionFile?.trim()
? { sessionFile: params.snapshot.sessionFile.trim() }
: {}),
leafId: params.snapshot.leafId,
},
postCompaction: {
@@ -473,11 +360,12 @@ export async function persistSessionCompactionCheckpoint(params: {
});
return null;
}
await cleanupTrimmedCompactionCheckpointFiles({
removed: trimmedCheckpoints?.removed ?? [],
retained: trimmedCheckpoints?.kept,
currentSnapshotFile: params.snapshot.sessionFile,
});
for (const removed of trimmedCheckpoints?.removed ?? []) {
deleteSqliteSessionTranscript({
agentId: target.agentId,
sessionId: removed.preCompaction.sessionId,
});
}
return checkpoint;
}

View File

@@ -25,6 +25,7 @@ import { resolveSessionFilePath, resolveSessionFilePathOptions } from "../config
import { resolveResetPreservedSelection } from "../config/sessions/reset-preserved-selection.js";
import {
appendSqliteSessionTranscriptEvent,
deleteSqliteSessionTranscript,
hasSqliteSessionTranscriptEvents,
loadSqliteSessionTranscriptEvents,
} from "../config/sessions/transcript-store.sqlite.js";
@@ -43,11 +44,7 @@ import {
parseAgentSessionKey,
} from "../routing/session-key.js";
import { ErrorCodes, errorShape } from "./protocol/index.js";
import {
archiveSessionTranscriptsDetailed,
resolveStableSessionEndTranscript,
type ArchivedSessionTranscript,
} from "./session-transcript-files.fs.js";
import { resolveStableSessionEndTranscript } from "./session-transcript-files.fs.js";
import {
loadSessionEntry,
migrateAndPruneGatewaySessionStoreKey,
@@ -71,35 +68,6 @@ function stripRuntimeModelState(entry?: SessionEntry): SessionEntry | undefined
};
}
export function archiveSessionTranscriptsForSession(params: {
sessionId: string | undefined;
storePath: string;
sessionFile?: string;
agentId?: string;
reason: "reset" | "deleted";
}): string[] {
return archiveSessionTranscriptsForSessionDetailed(params).map((entry) => entry.archivedPath);
}
export function archiveSessionTranscriptsForSessionDetailed(params: {
sessionId: string | undefined;
storePath: string;
sessionFile?: string;
agentId?: string;
reason: "reset" | "deleted";
}): ArchivedSessionTranscript[] {
if (!params.sessionId) {
return [];
}
return archiveSessionTranscriptsDetailed({
sessionId: params.sessionId,
storePath: params.storePath,
sessionFile: params.sessionFile,
agentId: params.agentId,
reason: params.reason,
});
}
export function emitGatewaySessionEndPluginHook(params: {
cfg: OpenClawConfig;
sessionKey: string;
@@ -108,7 +76,6 @@ export function emitGatewaySessionEndPluginHook(params: {
sessionFile?: string;
agentId?: string;
reason: "new" | "reset" | "idle" | "daily" | "compaction" | "deleted" | "unknown";
archivedTranscripts?: ArchivedSessionTranscript[];
nextSessionId?: string;
nextSessionKey?: string;
}): void {
@@ -124,7 +91,6 @@ export function emitGatewaySessionEndPluginHook(params: {
storePath: params.storePath,
sessionFile: params.sessionFile,
agentId: params.agentId,
archivedTranscripts: params.archivedTranscripts,
});
const payload = buildSessionEndHookPayload({
sessionId: params.sessionId,
@@ -132,7 +98,6 @@ export function emitGatewaySessionEndPluginHook(params: {
cfg: params.cfg,
reason: params.reason,
sessionFile: transcript.sessionFile,
transcriptArchived: transcript.transcriptArchived,
nextSessionId: params.nextSessionId,
nextSessionKey: params.nextSessionKey,
});
@@ -570,6 +535,7 @@ export async function performGatewaySessionReset(params: {
let oldSessionId: string | undefined;
let oldSessionFile: string | undefined;
let resetSourceEntry: SessionEntry | undefined;
let deleteOldTranscript = false;
const next = await updateSessionStore(storePath, (store) => {
const { primaryKey } = migrateAndPruneGatewaySessionStoreKey({
cfg,
@@ -674,6 +640,11 @@ export async function performGatewaySessionReset(params: {
totalTokensFresh: true,
};
store[primaryKey] = nextEntry;
deleteOldTranscript = Boolean(
oldSessionId &&
oldSessionId !== nextSessionId &&
!Object.values(store).some((candidate) => candidate?.sessionId === oldSessionId),
);
return nextEntry;
});
await emitGatewayBeforeResetPluginHook({
@@ -685,13 +656,6 @@ export async function performGatewaySessionReset(params: {
reason: params.reason,
});
const archivedTranscripts = archiveSessionTranscriptsForSessionDetailed({
sessionId: oldSessionId,
storePath,
sessionFile: oldSessionFile,
agentId: target.agentId,
reason: "reset",
});
if (!hasSqliteSessionTranscriptEvents({ agentId: target.agentId, sessionId: next.sessionId })) {
const header = {
type: "session",
@@ -715,7 +679,6 @@ export async function performGatewaySessionReset(params: {
sessionFile: oldSessionFile,
agentId: target.agentId,
reason: params.reason,
archivedTranscripts,
nextSessionId: next.sessionId,
});
emitGatewaySessionStartPluginHook({
@@ -724,6 +687,12 @@ export async function performGatewaySessionReset(params: {
sessionId: next.sessionId,
resumedFrom: oldSessionId,
});
if (deleteOldTranscript && oldSessionId) {
deleteSqliteSessionTranscript({
agentId: target.agentId,
sessionId: oldSessionId,
});
}
if (hadExistingEntry) {
await emitSessionUnboundLifecycleEvent({
targetSessionKey: target.canonicalKey ?? params.key,

View File

@@ -1,67 +0,0 @@
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, it } from "vitest";
import {
onSessionTranscriptUpdate,
type SessionTranscriptUpdate,
} from "../sessions/transcript-events.js";
import { archiveFileOnDisk } from "./session-transcript-files.fs.js";
const subscriptions: Array<() => void> = [];
afterEach(() => {
while (subscriptions.length > 0) {
subscriptions.pop()?.();
}
});
describe("archiveFileOnDisk transcript updates", () => {
it("emits a session transcript update for the archived path on reset", () => {
const updates: SessionTranscriptUpdate[] = [];
subscriptions.push(onSessionTranscriptUpdate((update) => updates.push(update)));
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oc-archive-events-reset-"));
try {
const sessionFile = path.join(tmpDir, "live.jsonl");
fs.writeFileSync(sessionFile, '{"type":"session-meta","agentId":"main"}\n');
const archived = archiveFileOnDisk(sessionFile, "reset");
expect(fs.existsSync(archived)).toBe(true);
expect(fs.existsSync(sessionFile)).toBe(false);
expect(archived).toContain(".jsonl.reset.");
expect(updates).toHaveLength(1);
expect(updates[0].sessionFile).toBe(archived);
// Archive does not carry a messageId/message payload — this is a
// pure-path mutation notification, matching how compaction-only
// emits (sessionFile + sessionKey-only) behave.
expect(updates[0].message).toBeUndefined();
expect(updates[0].messageId).toBeUndefined();
} finally {
fs.rmSync(tmpDir, { recursive: true, force: true });
}
});
it("also emits for deleted and bak archive reasons", () => {
const updates: SessionTranscriptUpdate[] = [];
subscriptions.push(onSessionTranscriptUpdate((update) => updates.push(update)));
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oc-archive-events-mixed-"));
try {
const deletedSource = path.join(tmpDir, "deleted.jsonl");
fs.writeFileSync(deletedSource, "{}\n");
const deletedArchived = archiveFileOnDisk(deletedSource, "deleted");
const bakSource = path.join(tmpDir, "bak.jsonl");
fs.writeFileSync(bakSource, "{}\n");
const bakArchived = archiveFileOnDisk(bakSource, "bak");
expect(deletedArchived).toContain(".jsonl.deleted.");
expect(bakArchived).toContain(".jsonl.bak.");
expect(updates.map((update) => update.sessionFile)).toEqual([deletedArchived, bakArchived]);
} finally {
fs.rmSync(tmpDir, { recursive: true, force: true });
}
});
});

View File

@@ -1,24 +1,12 @@
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
import {
formatSessionArchiveTimestamp,
parseSessionArchiveTimestamp,
type SessionArchiveReason,
} from "../config/sessions/artifacts.js";
import {
resolveSessionFilePath,
resolveSessionTranscriptPath,
resolveSessionTranscriptPathInDir,
} from "../config/sessions/paths.js";
import { resolveRequiredHomeDir } from "../infra/home-dir.js";
import { emitSessionTranscriptUpdate } from "../sessions/transcript-events.js";
type ArchiveFileReason = SessionArchiveReason;
export type ArchivedSessionTranscript = {
sourcePath: string;
archivedPath: string;
};
function classifySessionTranscriptCandidate(
sessionId: string,
@@ -124,105 +112,15 @@ export function resolveSessionTranscriptCandidates(
return Array.from(new Set(candidates));
}
export function archiveFileOnDisk(filePath: string, reason: ArchiveFileReason): string {
const ts = formatSessionArchiveTimestamp();
const archived = `${filePath}.${reason}.${ts}`;
fs.renameSync(filePath, archived);
// Notify the session transcript subscribers (memory index, sessions-history
// HTTP, etc.) that a mutation landed on a session-owned path. Without this
// emit the memory sync's incremental path never learns the new archive
// exists: chokidar does not watch the sessions directory, and the event bus
// is the only channel gateway code uses to signal session-file mutations.
// All other in-process mutations (append, compaction, tool-result rewrite,
// chat inject, command execution) already emit here; archive was the sole
// remaining gap, which is why `.jsonl.reset.<iso>` / `.jsonl.deleted.<iso>`
// files only surfaced in the index after a full reindex.
emitSessionTranscriptUpdate({ sessionFile: archived });
return archived;
}
export function archiveSessionTranscripts(opts: {
sessionId: string;
storePath: string | undefined;
sessionFile?: string;
agentId?: string;
reason: "reset" | "deleted";
/**
* When true, only archive files resolved under the session store directory.
* This prevents maintenance operations from mutating paths outside the agent sessions dir.
*/
restrictToStoreDir?: boolean;
}): string[] {
return archiveSessionTranscriptsDetailed(opts).map((entry) => entry.archivedPath);
}
export function archiveSessionTranscriptsDetailed(opts: {
sessionId: string;
storePath: string | undefined;
sessionFile?: string;
agentId?: string;
reason: "reset" | "deleted";
/**
* When true, only archive files resolved under the session store directory.
* This prevents maintenance operations from mutating paths outside the agent sessions dir.
*/
restrictToStoreDir?: boolean;
}): ArchivedSessionTranscript[] {
const archived: ArchivedSessionTranscript[] = [];
const storeDir =
opts.restrictToStoreDir && opts.storePath
? canonicalizePathForComparison(path.dirname(opts.storePath))
: null;
for (const candidate of resolveSessionTranscriptCandidates(
opts.sessionId,
opts.storePath,
opts.sessionFile,
opts.agentId,
)) {
const candidatePath = canonicalizePathForComparison(candidate);
if (storeDir) {
const relative = path.relative(storeDir, candidatePath);
if (!relative || relative.startsWith("..") || path.isAbsolute(relative)) {
continue;
}
}
if (!fs.existsSync(candidatePath)) {
continue;
}
try {
archived.push({
sourcePath: candidatePath,
archivedPath: archiveFileOnDisk(candidatePath, opts.reason),
});
} catch {
// Best-effort.
}
}
return archived;
}
export function resolveStableSessionEndTranscript(params: {
sessionId: string;
storePath: string | undefined;
sessionFile?: string;
agentId?: string;
archivedTranscripts?: ArchivedSessionTranscript[];
}): { sessionFile?: string; transcriptArchived?: boolean } {
const archivedTranscripts = params.archivedTranscripts ?? [];
if (archivedTranscripts.length > 0) {
const preferredPath = params.sessionFile?.trim()
? canonicalizePathForComparison(params.sessionFile)
: undefined;
const archivedMatch =
preferredPath == null
? undefined
: archivedTranscripts.find(
(entry) => canonicalizePathForComparison(entry.sourcePath) === preferredPath,
);
const archivedPath = archivedMatch?.archivedPath ?? archivedTranscripts[0]?.archivedPath;
if (archivedPath) {
return { sessionFile: archivedPath, transcriptArchived: true };
}
}): { sessionFile?: string } {
const stablePath = params.sessionFile?.trim();
if (stablePath) {
return { sessionFile: path.resolve(stablePath) };
}
for (const candidate of resolveSessionTranscriptCandidates(
@@ -233,48 +131,9 @@ export function resolveStableSessionEndTranscript(params: {
)) {
const candidatePath = canonicalizePathForComparison(candidate);
if (fs.existsSync(candidatePath)) {
return { sessionFile: candidatePath, transcriptArchived: false };
return { sessionFile: candidatePath };
}
}
return {};
}
export async function cleanupArchivedSessionTranscripts(opts: {
directories: string[];
olderThanMs: number;
reason?: ArchiveFileReason;
nowMs?: number;
}): Promise<{ removed: number; scanned: number }> {
if (!Number.isFinite(opts.olderThanMs) || opts.olderThanMs < 0) {
return { removed: 0, scanned: 0 };
}
const now = opts.nowMs ?? Date.now();
const reason: ArchiveFileReason = opts.reason ?? "deleted";
const directories = Array.from(new Set(opts.directories.map((dir) => path.resolve(dir))));
let removed = 0;
let scanned = 0;
for (const dir of directories) {
const entries = await fs.promises.readdir(dir).catch(() => []);
for (const entry of entries) {
const timestamp = parseSessionArchiveTimestamp(entry, reason);
if (timestamp == null) {
continue;
}
scanned += 1;
if (now - timestamp <= opts.olderThanMs) {
continue;
}
const fullPath = path.join(dir, entry);
const stat = await fs.promises.stat(fullPath).catch(() => null);
if (!stat?.isFile()) {
continue;
}
await fs.promises.rm(fullPath).catch(() => undefined);
removed += 1;
}
}
return { removed, scanned };
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -8,7 +8,9 @@ import {
} from "../agents/subagent-registry.test-helpers.js";
import type { OpenClawConfig } from "../config/config.js";
import type { SessionEntry } from "../config/sessions.js";
import { replaceSqliteSessionTranscriptEvents } from "../config/sessions/transcript-store.sqlite.js";
import { registerAgentRunContext, resetAgentRunContextForTest } from "../infra/agent-events.js";
import { closeOpenClawStateDatabaseForTest } from "../state/openclaw-state-db.js";
import { listSessionsFromStore } from "./session-utils.js";
function createModelDefaultsConfig(params: {
@@ -58,11 +60,15 @@ function withTranscriptStoreFixture<T>(params: {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), params.prefix));
const storePath = path.join(tmpDir, "sessions.json");
const now = Date.now();
fs.writeFileSync(
path.join(tmpDir, `${params.transcriptId}.jsonl`),
[
JSON.stringify({ type: "session", version: 1, id: params.transcriptId }),
JSON.stringify({
const previousStateDir = process.env.OPENCLAW_STATE_DIR;
process.env.OPENCLAW_STATE_DIR = tmpDir;
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId: params.transcriptId,
transcriptPath: path.join(tmpDir, `${params.transcriptId}.jsonl`),
events: [
{ type: "session", version: 1, id: params.transcriptId },
{
message: {
role: "assistant",
provider: params.provider,
@@ -74,14 +80,19 @@ function withTranscriptStoreFixture<T>(params: {
cost: { total: params.costTotal },
},
},
}),
].join("\n"),
"utf-8",
);
},
],
});
try {
return params.run({ storePath, now });
} finally {
closeOpenClawStateDatabaseForTest();
if (previousStateDir === undefined) {
delete process.env.OPENCLAW_STATE_DIR;
} else {
process.env.OPENCLAW_STATE_DIR = previousStateDir;
}
fs.rmSync(tmpDir, { recursive: true, force: true });
}
}

View File

@@ -5,8 +5,10 @@ import { afterEach, describe, expect, test, vi } from "vitest";
import { resetConfigRuntimeState, setRuntimeConfigSnapshot } from "../config/config.js";
import type { OpenClawConfig } from "../config/config.js";
import type { SessionEntry } from "../config/sessions.js";
import { replaceSqliteSessionTranscriptEvents } from "../config/sessions/transcript-store.sqlite.js";
import { createEmptyPluginRegistry } from "../plugins/registry-empty.js";
import { resetPluginRuntimeStateForTest, setActivePluginRegistry } from "../plugins/runtime.js";
import { closeOpenClawStateDatabaseForTest } from "../state/openclaw-state-db.js";
import { withStateDirEnv } from "../test-helpers/state-dir-env.js";
import {
buildGatewaySessionRow,
@@ -17,7 +19,6 @@ import {
listAgentsForGateway,
listSessionsFromStore,
listSessionsFromStoreAsync,
loadSessionEntry,
migrateAndPruneGatewaySessionStoreKey,
parseGroupKey,
pruneLegacyStoreKeys,
@@ -30,10 +31,6 @@ import {
resolveSessionStoreKey,
} from "./session-utils.js";
function resolveSyncRealpath(filePath: string): string {
return fs.realpathSync.native(filePath);
}
function createSymlinkOrSkip(targetPath: string, linkPath: string): boolean {
try {
fs.symlinkSync(targetPath, linkPath);
@@ -627,311 +624,6 @@ describe("gateway session utils", () => {
expect(target.storePath).toBe(path.resolve(storeTemplate.replace("{agentId}", "ops")));
});
test("resolveGatewaySessionStoreTarget includes legacy mixed-case store key", () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "session-utils-case-"));
const storePath = path.join(dir, "sessions.json");
fs.writeFileSync(
storePath,
JSON.stringify({ "agent:ops:MySession": { sessionId: "s1", updatedAt: 1 } }),
"utf8",
);
const cfg = {
session: { mainKey: "main", store: storePath },
agents: { list: [{ id: "ops", default: true }] },
} as OpenClawConfig;
const target = resolveGatewaySessionStoreTarget({ cfg, key: "agent:ops:mysession" });
expect(target.canonicalKey).toBe("agent:ops:mysession");
expect(target.storeKeys).toContain("agent:ops:mysession");
expect(target.storeKeys).toContain("agent:ops:MySession");
const store = JSON.parse(fs.readFileSync(storePath, "utf8"));
const found = target.storeKeys.some((k) => Boolean(store[k]));
expect(found).toBe(true);
});
test("resolveGatewaySessionStoreTarget includes all case-variant duplicate keys", () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "session-utils-dupes-"));
const storePath = path.join(dir, "sessions.json");
fs.writeFileSync(
storePath,
JSON.stringify({
"agent:ops:mysession": { sessionId: "s-lower", updatedAt: 2 },
"agent:ops:MySession": { sessionId: "s-mixed", updatedAt: 1 },
}),
"utf8",
);
const cfg = {
session: { mainKey: "main", store: storePath },
agents: { list: [{ id: "ops", default: true }] },
} as OpenClawConfig;
const target = resolveGatewaySessionStoreTarget({ cfg, key: "agent:ops:mysession" });
expect(target.storeKeys).toContain("agent:ops:mysession");
expect(target.storeKeys).toContain("agent:ops:MySession");
});
test("resolveGatewaySessionStoreTarget finds legacy main alias key when mainKey is customized", () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "session-utils-alias-"));
const storePath = path.join(dir, "sessions.json");
fs.writeFileSync(
storePath,
JSON.stringify({ "agent:ops:MAIN": { sessionId: "s1", updatedAt: 1 } }),
"utf8",
);
const cfg = {
session: { mainKey: "work", store: storePath },
agents: { list: [{ id: "ops", default: true }] },
} as OpenClawConfig;
const target = resolveGatewaySessionStoreTarget({ cfg, key: "agent:ops:main" });
expect(target.canonicalKey).toBe("agent:ops:work");
expect(target.storeKeys).toContain("agent:ops:MAIN");
});
test("resolveGatewaySessionStoreTarget preserves discovered store paths for non-round-tripping agent dirs", async () => {
await withStateDirEnv("session-utils-discovered-store-", async ({ stateDir }) => {
const retiredSessionsDir = path.join(stateDir, "agents", "Retired Agent", "sessions");
fs.mkdirSync(retiredSessionsDir, { recursive: true });
const retiredStorePath = path.join(retiredSessionsDir, "sessions.json");
fs.writeFileSync(
retiredStorePath,
JSON.stringify({
"agent:retired-agent:main": { sessionId: "sess-retired", updatedAt: 1 },
}),
"utf8",
);
const cfg = {
session: {
mainKey: "main",
store: path.join(stateDir, "agents", "{agentId}", "sessions", "sessions.json"),
},
agents: { list: [{ id: "main", default: true }] },
} as OpenClawConfig;
const target = resolveGatewaySessionStoreTarget({ cfg, key: "agent:retired-agent:main" });
expect(target.storePath).toBe(resolveSyncRealpath(retiredStorePath));
});
});
test("loadSessionEntry reads discovered stores from non-round-tripping agent dirs", async () => {
resetConfigRuntimeState();
try {
await withStateDirEnv("session-utils-load-entry-", async ({ stateDir }) => {
const retiredSessionsDir = path.join(stateDir, "agents", "Retired Agent", "sessions");
fs.mkdirSync(retiredSessionsDir, { recursive: true });
const retiredStorePath = path.join(retiredSessionsDir, "sessions.json");
fs.writeFileSync(
retiredStorePath,
JSON.stringify({
"agent:retired-agent:main": { sessionId: "sess-retired", updatedAt: 7 },
}),
"utf8",
);
const cfg = {
session: {
mainKey: "main",
store: path.join(stateDir, "agents", "{agentId}", "sessions", "sessions.json"),
},
agents: { list: [{ id: "main", default: true }] },
} as OpenClawConfig;
setRuntimeConfigSnapshot(cfg, cfg);
const loaded = loadSessionEntry("agent:retired-agent:main");
expect(loaded.storePath).toBe(resolveSyncRealpath(retiredStorePath));
expect(loaded.entry?.sessionId).toBe("sess-retired");
});
} finally {
resetConfigRuntimeState();
}
});
test("loadSessionEntry preserves a listed deleted main session over the live default main", async () => {
resetConfigRuntimeState();
try {
await withStateDirEnv("session-utils-load-deleted-main-entry-", async ({ stateDir }) => {
const storeTemplate = path.join(
stateDir,
"agents",
"{agentId}",
"sessions",
"sessions.json",
);
const liveSessionsDir = path.join(stateDir, "agents", "ops", "sessions");
const deletedSessionsDir = path.join(stateDir, "agents", "main", "sessions");
fs.mkdirSync(liveSessionsDir, { recursive: true });
fs.mkdirSync(deletedSessionsDir, { recursive: true });
const liveStorePath = path.join(liveSessionsDir, "sessions.json");
const deletedStorePath = path.join(deletedSessionsDir, "sessions.json");
fs.writeFileSync(
liveStorePath,
JSON.stringify({
"agent:ops:main": { sessionId: "sess-live-default", updatedAt: 10 },
}),
"utf8",
);
fs.writeFileSync(
deletedStorePath,
JSON.stringify({
"agent:main:main": { sessionId: "sess-deleted-main", updatedAt: 20 },
}),
"utf8",
);
const cfg = {
session: { mainKey: "main", store: storeTemplate },
agents: { list: [{ id: "ops", default: true }] },
} as OpenClawConfig;
setRuntimeConfigSnapshot(cfg, cfg);
const target = resolveGatewaySessionStoreTarget({ cfg, key: "agent:main:main" });
const loaded = loadSessionEntry("agent:main:main");
expect(target.canonicalKey).toBe("agent:main:main");
expect(target.agentId).toBe("main");
expect(target.storePath).toBe(resolveSyncRealpath(deletedStorePath));
expect(loaded.canonicalKey).toBe("agent:main:main");
expect(loaded.storePath).toBe(resolveSyncRealpath(deletedStorePath));
expect(loaded.entry?.sessionId).toBe("sess-deleted-main");
});
} finally {
resetConfigRuntimeState();
}
});
test("loadSessionEntry resolves deleted main aliases when mainKey is customized", async () => {
resetConfigRuntimeState();
try {
await withStateDirEnv("session-utils-load-deleted-main-alias-", async ({ stateDir }) => {
const storeTemplate = path.join(
stateDir,
"agents",
"{agentId}",
"sessions",
"sessions.json",
);
const liveSessionsDir = path.join(stateDir, "agents", "ops", "sessions");
const deletedSessionsDir = path.join(stateDir, "agents", "main", "sessions");
fs.mkdirSync(liveSessionsDir, { recursive: true });
fs.mkdirSync(deletedSessionsDir, { recursive: true });
fs.writeFileSync(
path.join(liveSessionsDir, "sessions.json"),
JSON.stringify({
"agent:ops:work": { sessionId: "sess-live-default", updatedAt: 10 },
}),
"utf8",
);
const deletedStorePath = path.join(deletedSessionsDir, "sessions.json");
fs.writeFileSync(
deletedStorePath,
JSON.stringify({
"agent:main:main": { sessionId: "sess-deleted-main", updatedAt: 20 },
}),
"utf8",
);
const cfg = {
session: { mainKey: "work", store: storeTemplate },
agents: { list: [{ id: "ops", default: true }] },
} as OpenClawConfig;
setRuntimeConfigSnapshot(cfg, cfg);
const loaded = loadSessionEntry("agent:main:work");
expect(loaded.canonicalKey).toBe("agent:main:work");
expect(loaded.storePath).toBe(resolveSyncRealpath(deletedStorePath));
expect(loaded.entry?.sessionId).toBe("sess-deleted-main");
});
} finally {
resetConfigRuntimeState();
}
});
test("loadSessionEntry prefers the freshest duplicate row for a logical key", async () => {
resetConfigRuntimeState();
try {
await withStateDirEnv("session-utils-load-entry-freshest-", async ({ stateDir }) => {
const sessionsDir = path.join(stateDir, "agents", "main", "sessions");
fs.mkdirSync(sessionsDir, { recursive: true });
const storePath = path.join(sessionsDir, "sessions.json");
fs.writeFileSync(
storePath,
JSON.stringify(
{
"agent:main:main": { sessionId: "sess-stale", updatedAt: 1 },
"agent:main:MAIN": { sessionId: "sess-fresh", updatedAt: 2 },
},
null,
2,
),
"utf8",
);
const cfg = {
session: {
mainKey: "main",
store: path.join(stateDir, "agents", "{agentId}", "sessions", "sessions.json"),
},
agents: { list: [{ id: "main", default: true }] },
} as OpenClawConfig;
setRuntimeConfigSnapshot(cfg, cfg);
const loaded = loadSessionEntry("agent:main:main");
expect(loaded.entry?.sessionId).toBe("sess-fresh");
});
} finally {
resetConfigRuntimeState();
}
});
test("loadSessionEntry prefers the freshest duplicate row across discovered stores", async () => {
resetConfigRuntimeState();
try {
await withStateDirEnv("session-utils-load-entry-cross-store-", async ({ stateDir }) => {
const canonicalSessionsDir = path.join(stateDir, "agents", "main", "sessions");
fs.mkdirSync(canonicalSessionsDir, { recursive: true });
fs.writeFileSync(
path.join(canonicalSessionsDir, "sessions.json"),
JSON.stringify(
{
"agent:main:main": { sessionId: "sess-canonical-stale", updatedAt: 10 },
"agent:main:MAIN": { sessionId: "sess-canonical-fresh", updatedAt: 1000 },
},
null,
2,
),
"utf8",
);
const discoveredSessionsDir = path.join(stateDir, "agents", "main ", "sessions");
fs.mkdirSync(discoveredSessionsDir, { recursive: true });
fs.writeFileSync(
path.join(discoveredSessionsDir, "sessions.json"),
JSON.stringify(
{
"agent:main:main": { sessionId: "sess-discovered-mid", updatedAt: 500 },
},
null,
2,
),
"utf8",
);
const cfg = {
session: {
mainKey: "main",
store: path.join(stateDir, "agents", "{agentId}", "sessions", "sessions.json"),
},
agents: { list: [{ id: "main", default: true }] },
} as OpenClawConfig;
setRuntimeConfigSnapshot(cfg, cfg);
const loaded = loadSessionEntry("agent:main:main");
expect(loaded.entry?.sessionId).toBe("sess-canonical-fresh");
});
} finally {
resetConfigRuntimeState();
}
});
test("pruneLegacyStoreKeys removes alias and case-variant ghost keys", () => {
const store: Record<string, unknown> = {
"agent:ops:work": { sessionId: "canonical", updatedAt: 3 },
@@ -1281,6 +973,8 @@ describe("resolveSessionModelRef", () => {
describe("listSessionsFromStore selected model display", () => {
test("async list yields during bulk transcript title and last-message hydration", async () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-sessions-list-yield-"));
const previousStateDir = process.env.OPENCLAW_STATE_DIR;
process.env.OPENCLAW_STATE_DIR = tmpDir;
try {
const storePath = path.join(tmpDir, "sessions.json");
const store: Record<string, SessionEntry> = {};
@@ -1297,15 +991,16 @@ describe("listSessionsFromStore selected model display", () => {
contextTokens: 1,
estimatedCostUsd: 0,
} as SessionEntry;
fs.writeFileSync(
path.join(tmpDir, `${sessionId}.jsonl`),
[
JSON.stringify({ type: "session", version: 1, id: sessionId }),
JSON.stringify({ message: { role: "user", content: `title ${i}` } }),
JSON.stringify({ message: { role: "assistant", content: `last ${i}` } }),
].join("\n"),
"utf-8",
);
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId,
transcriptPath: path.join(tmpDir, `${sessionId}.jsonl`),
events: [
{ type: "session", version: 1, id: sessionId },
{ message: { role: "user", content: `title ${i}` } },
{ message: { role: "assistant", content: `last ${i}` } },
],
});
}
const params = {
@@ -1340,12 +1035,20 @@ describe("listSessionsFromStore selected model display", () => {
expect(listed.sessions[0]?.thinkingOptions?.length).toBeGreaterThan(0);
expect(listed.sessions[0]?.thinkingDefault).toBe("off");
} finally {
closeOpenClawStateDatabaseForTest();
if (previousStateDir === undefined) {
delete process.env.OPENCLAW_STATE_DIR;
} else {
process.env.OPENCLAW_STATE_DIR = previousStateDir;
}
fs.rmSync(tmpDir, { recursive: true, force: true });
}
});
test("caps transcript title and last-message hydration for bulk list responses", async () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-sessions-list-cap-"));
const previousStateDir = process.env.OPENCLAW_STATE_DIR;
process.env.OPENCLAW_STATE_DIR = tmpDir;
try {
const storePath = path.join(tmpDir, "sessions.json");
const store: Record<string, SessionEntry> = {};
@@ -1358,17 +1061,16 @@ describe("listSessionsFromStore selected model display", () => {
modelProvider: "openai",
model: "gpt-5.4",
} as SessionEntry;
if (i === 0 || i === 99 || i === 100) {
fs.writeFileSync(
path.join(tmpDir, `${sessionId}.jsonl`),
[
JSON.stringify({ type: "session", version: 1, id: sessionId }),
JSON.stringify({ message: { role: "user", content: `title ${i}` } }),
JSON.stringify({ message: { role: "assistant", content: `last ${i}` } }),
].join("\n"),
"utf-8",
);
}
replaceSqliteSessionTranscriptEvents({
agentId: "main",
sessionId,
transcriptPath: path.join(tmpDir, `${sessionId}.jsonl`),
events: [
{ type: "session", version: 1, id: sessionId },
{ message: { role: "user", content: `title ${i}` } },
{ message: { role: "assistant", content: `last ${i}` } },
],
});
}
const result = await listSessionsFromStoreAsync({
@@ -1386,6 +1088,12 @@ describe("listSessionsFromStore selected model display", () => {
expect(result.sessions[100]?.derivedTitle).toBeUndefined();
expect(result.sessions[100]?.lastMessagePreview).toBeUndefined();
} finally {
closeOpenClawStateDatabaseForTest();
if (previousStateDir === undefined) {
delete process.env.OPENCLAW_STATE_DIR;
} else {
process.env.OPENCLAW_STATE_DIR = previousStateDir;
}
fs.rmSync(tmpDir, { recursive: true, force: true });
}
});

View File

@@ -99,8 +99,6 @@ import type {
} from "./session-utils.types.js";
export {
archiveFileOnDisk,
archiveSessionTranscripts,
attachOpenClawTranscriptMeta,
capArrayByJsonBytes,
readFirstUserMessageFromTranscript,

View File

@@ -8,6 +8,7 @@ import { parseConfigJson5, resetConfigRuntimeState } from "../config/config.js";
import {
clearSessionStoreCacheForTest,
resolveMainSessionKeyFromConfig,
saveSessionStore,
type SessionEntry,
} from "../config/sessions.js";
import { resetAgentRunContextForTest } from "../infra/agent-events.js";
@@ -213,12 +214,11 @@ export async function writeSessionStore(params: {
});
store[storeKey] = entry;
}
// Gateway suites often reuse the same store path across tests while writing the
// file directly; clear the in-process cache so handlers reload the seeded state.
// Gateway suites often reuse the same store path across tests; clear the
// in-process cache so handlers reload the seeded SQLite state.
clearSessionStoreCacheForTest();
await persistTestSessionConfig();
await fs.mkdir(path.dirname(storePath), { recursive: true });
await fs.writeFile(storePath, JSON.stringify(store, null, 2), "utf-8");
await saveSessionStore(storePath, store as Record<string, SessionEntry>);
clearSessionStoreCacheForTest();
}

View File

@@ -1,13 +1,14 @@
import fsSync from "node:fs";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterAll, beforeAll, beforeEach, expect, vi } from "vitest";
import type { AssistantMessage, UserMessage } from "../../agents/pi-ai-contract.js";
import type { SessionEntry } from "../../config/sessions.js";
import { replaceSqliteSessionTranscriptEvents } from "../../config/sessions/transcript-store.sqlite.js";
import type { InternalHookEvent } from "../../hooks/internal-hooks.js";
import { resetSystemEventsForTest } from "../../infra/system-events.js";
import { startGatewayServerHarness, type GatewayServerHarness } from "../server.e2e-ws-harness.js";
import { captureCompactionCheckpointSnapshotAsync } from "../session-compaction-checkpoints.js";
import {
connectOk,
embeddedRunMock,
@@ -323,12 +324,26 @@ export function setupGatewaySessionsTestHarness() {
};
}
export async function writeSingleLineSession(dir: string, sessionId: string, content: string) {
await fs.writeFile(
path.join(dir, `${sessionId}.jsonl`),
`${JSON.stringify({ role: "user", content })}\n`,
"utf-8",
);
export async function writeSingleLineSession(
dir: string,
sessionId: string,
content: string,
opts: { agentId?: string; transcriptPath?: string } = {},
) {
const transcriptPath = opts.transcriptPath ?? path.join(dir, `${sessionId}.jsonl`);
replaceSqliteSessionTranscriptEvents({
agentId: opts.agentId ?? "main",
sessionId,
transcriptPath,
events: [
{
type: "message",
id: `${sessionId}-message`,
message: { role: "user", content },
},
],
});
return transcriptPath;
}
export function sessionStoreEntry(sessionId: string, overrides: Partial<SessionEntry> = {}) {
@@ -380,11 +395,14 @@ export async function createCheckpointFixture(dir: string) {
if (!sessionFile) {
throw new Error("expected persisted session file");
}
const preCompactionSessionFile = path.join(
dir,
`${path.parse(sessionFile).name}.checkpoint-test.jsonl`,
);
fsSync.copyFileSync(sessionFile, preCompactionSessionFile);
const checkpointSnapshot = await captureCompactionCheckpointSnapshotAsync({
sessionManager: session,
sessionFile,
});
const preCompactionSessionFile = checkpointSnapshot?.sessionFile;
if (!preCompactionSessionFile) {
throw new Error("expected persisted checkpoint snapshot");
}
const preCompactionSession = SessionManager.open(preCompactionSessionFile, dir);
session.appendCompaction("checkpoint summary", preCompactionLeafId, 123, { ok: true });
const postCompactionLeafId = session.getLeafId();

View File

@@ -7,11 +7,7 @@ import { writeWorkspaceFile } from "../../../test-helpers/workspace.js";
import { withEnvAsync } from "../../../test-utils/env.js";
import { createHookEvent } from "../../hooks.js";
import { generateSlugViaLLM } from "../../llm-slug-generator.js";
import {
findPreviousSessionFile,
getRecentSessionContent,
getRecentSessionContentWithResetFallback,
} from "./transcript.js";
import { findPreviousSessionFile, getRecentSessionContent } from "./transcript.js";
// Avoid calling the embedded Pi agent (global command lane); keep this unit test deterministic.
vi.mock("../../llm-slug-generator.js", () => ({
@@ -582,65 +578,13 @@ describe("session-memory hook", () => {
expect(memoryContent).toContain("assistant: Fourth message");
});
it("falls back to latest .jsonl.reset.* transcript when active file is empty", async () => {
const { sessionsDir, activeSessionFile } = await createSessionMemoryWorkspace({
activeSession: { name: "test-session.jsonl", content: "" },
});
// Simulate /new rotation where useful content is now in .reset.* file
const resetContent = createMockSessionContent([
{ role: "user", content: "Message from rotated transcript" },
{ role: "assistant", content: "Recovered from reset fallback" },
]);
await writeWorkspaceFile({
dir: sessionsDir,
name: "test-session.jsonl.reset.2026-02-16T22-26-33.000Z",
content: resetContent,
});
const memoryContent = await getRecentSessionContentWithResetFallback(activeSessionFile!);
expect(memoryContent).toContain("user: Message from rotated transcript");
expect(memoryContent).toContain("assistant: Recovered from reset fallback");
});
it("handles reset-path session pointers from previousSessionEntry", async () => {
const { sessionsDir } = await createSessionMemoryWorkspace();
const sessionId = "reset-pointer-session";
const resetSessionFile = await writeWorkspaceFile({
dir: sessionsDir,
name: `${sessionId}.jsonl.reset.2026-02-16T22-26-33.000Z`,
content: createMockSessionContent([
{ role: "user", content: "Message from reset pointer" },
{ role: "assistant", content: "Recovered directly from reset file" },
]),
});
const previousSessionFile = await findPreviousSessionFile({
sessionsDir,
currentSessionFile: resetSessionFile,
sessionId,
});
expect(previousSessionFile).toBeUndefined();
const memoryContent = await getRecentSessionContentWithResetFallback(resetSessionFile);
expect(memoryContent).toContain("user: Message from reset pointer");
expect(memoryContent).toContain("assistant: Recovered directly from reset file");
});
it("recovers transcript when previousSessionEntry.sessionFile is missing", async () => {
it("recovers canonical transcript when previousSessionEntry.sessionFile is missing", async () => {
const { sessionsDir } = await createSessionMemoryWorkspace();
const sessionId = "missing-session-file";
await writeWorkspaceFile({
dir: sessionsDir,
name: `${sessionId}.jsonl`,
content: "",
});
await writeWorkspaceFile({
dir: sessionsDir,
name: `${sessionId}.jsonl.reset.2026-02-16T22-26-33.000Z`,
content: createMockSessionContent([
{ role: "user", content: "Recovered with missing sessionFile pointer" },
{ role: "assistant", content: "Recovered by sessionId fallback" },
@@ -653,79 +597,11 @@ describe("session-memory hook", () => {
});
expect(previousSessionFile).toBe(path.join(sessionsDir, `${sessionId}.jsonl`));
const memoryContent = await getRecentSessionContentWithResetFallback(previousSessionFile!);
const memoryContent = await getRecentSessionContent(previousSessionFile!);
expect(memoryContent).toContain("user: Recovered with missing sessionFile pointer");
expect(memoryContent).toContain("assistant: Recovered by sessionId fallback");
});
it("prefers the newest reset transcript when multiple reset candidates exist", async () => {
const { sessionsDir, activeSessionFile } = await createSessionMemoryWorkspace({
activeSession: { name: "test-session.jsonl", content: "" },
});
await writeWorkspaceFile({
dir: sessionsDir,
name: "test-session.jsonl.reset.2026-02-16T22-26-33.000Z",
content: createMockSessionContent([
{ role: "user", content: "Older rotated transcript" },
{ role: "assistant", content: "Old summary" },
]),
});
await writeWorkspaceFile({
dir: sessionsDir,
name: "test-session.jsonl.reset.2026-02-16T22-26-34.000Z",
content: createMockSessionContent([
{ role: "user", content: "Newest rotated transcript" },
{ role: "assistant", content: "Newest summary" },
]),
});
const memoryContent = await getRecentSessionContentWithResetFallback(activeSessionFile!);
if (!memoryContent) {
throw new Error("expected newest reset transcript content");
}
expectMemoryConversation({
memoryContent,
user: "Newest rotated transcript",
assistant: "Newest summary",
absent: "Older rotated transcript",
});
});
it("prefers active transcript when it is non-empty even with reset candidates", async () => {
const { sessionsDir, activeSessionFile } = await createSessionMemoryWorkspace({
activeSession: {
name: "test-session.jsonl",
content: createMockSessionContent([
{ role: "user", content: "Active transcript message" },
{ role: "assistant", content: "Active transcript summary" },
]),
},
});
await writeWorkspaceFile({
dir: sessionsDir,
name: "test-session.jsonl.reset.2026-02-16T22-26-34.000Z",
content: createMockSessionContent([
{ role: "user", content: "Reset fallback message" },
{ role: "assistant", content: "Reset fallback summary" },
]),
});
const memoryContent = await getRecentSessionContentWithResetFallback(activeSessionFile!);
if (!memoryContent) {
throw new Error("expected active transcript memory content");
}
expectMemoryConversation({
memoryContent,
user: "Active transcript message",
assistant: "Active transcript summary",
absent: "Reset fallback message",
});
});
it("handles empty session files gracefully", async () => {
// Should not throw
const { files } = await runNewWithPreviousSession({ sessionContent: "" });

View File

@@ -24,7 +24,7 @@ import {
import { resolveHookConfig } from "../../config.js";
import type { HookHandler } from "../../hooks.js";
import { generateSlugViaLLM } from "../../llm-slug-generator.js";
import { findPreviousSessionFile, getRecentSessionContentWithResetFallback } from "./transcript.js";
import { findPreviousSessionFile, getRecentSessionContent } from "./transcript.js";
const log = createSubsystemLogger("hooks/session-memory");
@@ -173,18 +173,13 @@ async function saveSessionMemoryNow(event: Parameters<HookHandler>[0]): Promise<
const currentSessionId = sessionEntry.sessionId as string;
let currentSessionFile = (sessionEntry.sessionFile as string) || undefined;
// If sessionFile is empty or looks like a new/reset file, try to find the previous session file.
if (!currentSessionFile || currentSessionFile.includes(".reset.")) {
if (!currentSessionFile) {
const sessionsDirs = new Set<string>();
if (currentSessionFile) {
sessionsDirs.add(path.dirname(currentSessionFile));
}
sessionsDirs.add(path.join(workspaceDir, "sessions"));
for (const sessionsDir of sessionsDirs) {
const recoveredSessionFile = await findPreviousSessionFile({
sessionsDir,
currentSessionFile,
sessionId: currentSessionId,
});
if (!recoveredSessionFile) {
@@ -215,8 +210,7 @@ async function saveSessionMemoryNow(event: Parameters<HookHandler>[0]): Promise<
let sessionContent: string | null = null;
if (sessionFile) {
// Get recent conversation content, with fallback to rotated reset transcript.
sessionContent = await getRecentSessionContentWithResetFallback(sessionFile, messageCount);
sessionContent = await getRecentSessionContent(sessionFile, messageCount);
log.debug("Session content loaded", {
length: sessionContent?.length ?? 0,
messageCount,

View File

@@ -61,54 +61,14 @@ export async function getRecentSessionContent(
}
}
export async function getRecentSessionContentWithResetFallback(
sessionFilePath: string,
messageCount: number = 15,
): Promise<string | null> {
const primary = await getRecentSessionContent(sessionFilePath, messageCount);
if (primary) {
return primary;
}
try {
const dir = path.dirname(sessionFilePath);
const base = path.basename(sessionFilePath);
const resetPrefix = `${base}.reset.`;
const files = await fs.readdir(dir);
const resetCandidates = files.filter((name) => name.startsWith(resetPrefix)).toSorted();
if (resetCandidates.length === 0) {
return primary;
}
const latestResetPath = path.join(dir, resetCandidates[resetCandidates.length - 1]);
return (await getRecentSessionContent(latestResetPath, messageCount)) || primary;
} catch {
return primary;
}
}
function stripResetSuffix(fileName: string): string {
const resetIndex = fileName.indexOf(".reset.");
return resetIndex === -1 ? fileName : fileName.slice(0, resetIndex);
}
export async function findPreviousSessionFile(params: {
sessionsDir: string;
currentSessionFile?: string;
sessionId?: string;
}): Promise<string | undefined> {
try {
const files = await fs.readdir(params.sessionsDir);
const fileSet = new Set(files);
const baseFromReset = params.currentSessionFile
? stripResetSuffix(path.basename(params.currentSessionFile))
: undefined;
if (baseFromReset && fileSet.has(baseFromReset)) {
return path.join(params.sessionsDir, baseFromReset);
}
const trimmedSessionId = params.sessionId?.trim();
if (trimmedSessionId) {
const canonicalFile = `${trimmedSessionId}.jsonl`;
@@ -117,30 +77,13 @@ export async function findPreviousSessionFile(params: {
}
const topicVariants = files
.filter(
(name) =>
name.startsWith(`${trimmedSessionId}-topic-`) &&
name.endsWith(".jsonl") &&
!name.includes(".reset."),
)
.filter((name) => name.startsWith(`${trimmedSessionId}-topic-`) && name.endsWith(".jsonl"))
.toSorted()
.toReversed();
if (topicVariants.length > 0) {
return path.join(params.sessionsDir, topicVariants[0]);
}
}
if (!params.currentSessionFile) {
return undefined;
}
const nonResetJsonl = files
.filter((name) => name.endsWith(".jsonl") && !name.includes(".reset."))
.toSorted()
.toReversed();
if (nonResetJsonl.length > 0) {
return path.join(params.sessionsDir, nonResetJsonl[0]);
}
} catch {
// Ignore directory read errors.
}

View File

@@ -60,7 +60,7 @@ import {
} from "../config/sessions/main-session.js";
import { resolveStorePath } from "../config/sessions/paths.js";
import { loadSessionStore } from "../config/sessions/store-load.js";
import { archiveRemovedSessionTranscripts, updateSessionStore } from "../config/sessions/store.js";
import { deleteRemovedSessionTranscripts, updateSessionStore } from "../config/sessions/store.js";
import type { SessionEntry } from "../config/sessions/types.js";
import type { AgentDefaultsConfig } from "../config/types.agent-defaults.js";
import type { OpenClawConfig } from "../config/types.openclaw.js";
@@ -1442,15 +1442,14 @@ export async function runHeartbeatOnce(opts: {
});
if (removedSessionFiles.size > 0) {
try {
await archiveRemovedSessionTranscripts({
await deleteRemovedSessionTranscripts({
removedSessionFiles,
referencedSessionIds,
storePath: isolatedStorePath,
reason: "deleted",
restrictToStoreDir: true,
});
} catch (err) {
log.warn("heartbeat: failed to archive stale isolated session transcript", {
log.warn("heartbeat: failed to delete stale isolated session transcript", {
err: String(err),
sessionKey: staleIsolatedSessionKey,
});

File diff suppressed because it is too large Load Diff

View File

@@ -133,6 +133,17 @@ export {
type SessionWriteLockAcquireTimeoutConfig,
} from "../agents/session-write-lock.js";
export { appendSessionTranscriptMessage } from "../config/sessions/transcript-append.js";
export {
deleteOpenClawStateKvJson,
readOpenClawStateKvJson,
writeOpenClawStateKvJson,
type OpenClawStateJsonValue,
} from "../state/openclaw-state-kv.js";
export {
hasSqliteSessionTranscriptEvents,
loadSqliteSessionTranscriptEvents,
resolveSqliteSessionTranscriptScopeForPath,
} from "../config/sessions/transcript-store.sqlite.js";
export { emitSessionTranscriptUpdate } from "../sessions/transcript-events.js";
export {
buildSessionContext,
@@ -143,6 +154,7 @@ export {
type AgentSession,
type ExtensionAPI,
type ExtensionContext,
type FileEntry,
type SessionEntry,
} from "../agents/transcript/session-transcript-contract.js";
export {

View File

@@ -21,30 +21,7 @@ describe("extractTranscriptStemFromSessionsMemoryHit", () => {
expect(extractTranscriptStemFromSessionsMemoryHit("qmd/sessions/x/y/z.md")).toBe("z");
});
it("strips .jsonl.reset.<iso> archive suffix so rotated transcripts resolve to the live stem", () => {
expect(
extractTranscriptStemFromSessionsMemoryHit(
"sessions/abc-uuid.jsonl.reset.2026-02-16T22-26-33.000Z",
),
).toBe("abc-uuid");
});
it("strips .jsonl.deleted.<iso> archive suffix the same way", () => {
expect(
extractTranscriptStemFromSessionsMemoryHit(
"sessions/def-uuid.jsonl.deleted.2026-02-16T22-27-33.000Z",
),
).toBe("def-uuid");
});
it("handles archive suffix on bare basenames without the sessions/ prefix", () => {
expect(
extractTranscriptStemFromSessionsMemoryHit("ghi-thread.jsonl.reset.2026-02-16T22-28-33.000Z"),
).toBe("ghi-thread");
});
it("does not mistake arbitrary suffixes containing .jsonl. for archives", () => {
// Not a real archive pattern: suffix after .jsonl. must be `reset` or `deleted`.
it("does not accept suffixed jsonl artifact names", () => {
expect(
extractTranscriptStemFromSessionsMemoryHit("sessions/weird.jsonl.backup.2026-01-01.zst"),
).toBeNull();
@@ -52,22 +29,9 @@ describe("extractTranscriptStemFromSessionsMemoryHit", () => {
});
describe("extractTranscriptIdentityFromSessionsMemoryHit", () => {
it("extracts owner metadata from agent-scoped session archive paths", () => {
expect(
extractTranscriptIdentityFromSessionsMemoryHit(
"sessions/main/deleted-uuid.jsonl.deleted.2026-02-16T22-27-33.000Z",
),
).toEqual({
stem: "deleted-uuid",
ownerAgentId: "main",
archived: true,
});
});
it("does not invent owner metadata for legacy basename-only paths", () => {
it("does not invent owner metadata for basename-only paths", () => {
expect(extractTranscriptIdentityFromSessionsMemoryHit("sessions/abc-uuid.jsonl")).toEqual({
stem: "abc-uuid",
archived: false,
});
});
});
@@ -92,13 +56,9 @@ describe("resolveTranscriptStemToSessionKeys", () => {
expect(keys).toEqual(["agent:main:s1", "agent:peer:s2"]);
});
it("falls back to archived owner metadata when deleted archives are gone from the live store", () => {
const keys = resolveTranscriptStemToSessionKeys({
store: {},
stem: "deleted-stem",
archivedOwnerAgentId: "main",
});
it("does not synthesize keys when the live store has no matching transcript", () => {
const keys = resolveTranscriptStemToSessionKeys({ store: {}, stem: "deleted-stem" });
expect(keys).toEqual(["agent:main:deleted-stem"]);
expect(keys).toEqual([]);
});
});

View File

@@ -1,5 +1,4 @@
import path from "node:path";
import { parseUsageCountedSessionIdFromFileName } from "../config/sessions/artifacts.js";
import type { SessionEntry } from "../config/sessions/types.js";
import { normalizeAgentId } from "../routing/session-key.js";
import { normalizeOptionalString } from "../shared/string-coerce.js";
@@ -9,7 +8,6 @@ export { loadCombinedSessionStoreForGateway } from "../config/sessions/combined-
export type SessionTranscriptHitIdentity = {
stem: string;
ownerAgentId?: string;
archived: boolean;
};
function parseSessionsPath(hitPath: string): { base: string; ownerAgentId?: string } {
@@ -29,8 +27,6 @@ function parseSessionsPath(hitPath: string): { base: string; ownerAgentId?: stri
/**
* Derive transcript stem `S` from a memory search hit path for `source === "sessions"`.
* Builtin index uses `sessions/<basename>.jsonl`; QMD exports use `<stem>.md`.
* Archived transcripts (`.jsonl.reset.<iso>` / `.jsonl.deleted.<iso>`) resolve
* to the same stem as the live `.jsonl` they were rotated from.
*/
export function extractTranscriptStemFromSessionsMemoryHit(hitPath: string): string | null {
return extractTranscriptIdentityFromSessionsMemoryHit(hitPath)?.stem ?? null;
@@ -40,17 +36,13 @@ export function extractTranscriptIdentityFromSessionsMemoryHit(
hitPath: string,
): SessionTranscriptHitIdentity | null {
const { base, ownerAgentId } = parseSessionsPath(hitPath);
const archivedStem = parseUsageCountedSessionIdFromFileName(base);
if (archivedStem && base !== `${archivedStem}.jsonl`) {
return { stem: archivedStem, ownerAgentId, archived: true };
}
if (base.endsWith(".jsonl")) {
const stem = base.slice(0, -".jsonl".length);
return stem ? { stem, ownerAgentId, archived: false } : null;
return stem ? { stem, ownerAgentId } : null;
}
if (base.endsWith(".md")) {
const stem = base.slice(0, -".md".length);
return stem ? { stem, archived: false } : null;
return stem ? { stem } : null;
}
return null;
}
@@ -63,12 +55,9 @@ export function extractTranscriptIdentityFromSessionsMemoryHit(
export function resolveTranscriptStemToSessionKeys(params: {
store: Record<string, SessionEntry>;
stem: string;
archivedOwnerAgentId?: string;
}): string[] {
const { store } = params;
const matches: string[] = [];
const stemAsFile = params.stem.endsWith(".jsonl") ? params.stem : `${params.stem}.jsonl`;
const parsedStemId = parseUsageCountedSessionIdFromFileName(stemAsFile);
for (const [sessionKey, entry] of Object.entries(store)) {
const sessionFile = normalizeOptionalString(entry.sessionFile);
@@ -80,7 +69,7 @@ export function resolveTranscriptStemToSessionKeys(params: {
continue;
}
}
if (entry.sessionId === params.stem || (parsedStemId && entry.sessionId === parsedStemId)) {
if (entry.sessionId === params.stem) {
matches.push(sessionKey);
}
}
@@ -88,8 +77,5 @@ export function resolveTranscriptStemToSessionKeys(params: {
if (deduped.length > 0) {
return deduped;
}
const archivedOwnerAgentId = normalizeOptionalString(params.archivedOwnerAgentId);
return archivedOwnerAgentId
? [`agent:${normalizeAgentId(archivedOwnerAgentId)}:${params.stem}`]
: [];
return [];
}

View File

@@ -525,7 +525,6 @@ export type PluginHookSessionEndEvent = {
durationMs?: number;
reason?: PluginHookSessionEndReason;
sessionFile?: string;
transcriptArchived?: boolean;
nextSessionId?: string;
nextSessionKey?: string;
};

View File

@@ -45,8 +45,7 @@ describe("session hook runner methods", () => {
sessionKey: "agent:main:abc",
messageCount: 42,
reason: "daily" as const,
sessionFile: "/tmp/abc-123.jsonl.reset.2026-04-02T10-00-00.000Z",
transcriptArchived: true,
sessionFile: "/tmp/abc-123.jsonl",
nextSessionId: "def-456",
},
},