mirror of
https://github.com/moltbot/moltbot.git
synced 2026-05-13 23:56:07 +00:00
refactor: store memory wiki digests in sqlite
This commit is contained in:
@@ -93,7 +93,7 @@ The plugin initializes a vault like this:
|
||||
|
||||
Generated content stays inside managed blocks. Human note blocks are preserved.
|
||||
|
||||
Key beliefs can live in structured `claims` frontmatter with per-claim evidence, confidence, and status. Compile also emits machine-readable digests under `.openclaw-wiki/cache/` so agent/runtime consumers do not have to scrape markdown pages.
|
||||
Key beliefs can live in structured `claims` frontmatter with per-claim evidence, confidence, and status. Compile also stores machine-readable digests in SQLite plugin state so agent/runtime consumers do not have to scrape markdown pages.
|
||||
|
||||
When `render.createBacklinks` is enabled, compile adds deterministic `## Related` blocks to pages. Those blocks list source pages, pages that reference the current page, and nearby pages that share the same source ids.
|
||||
|
||||
@@ -142,7 +142,7 @@ The plugin also registers a non-exclusive memory corpus supplement, so shared `m
|
||||
|
||||
`wiki_apply` accepts structured `claims` payloads for synthesis and metadata updates, so the wiki can store claim-level evidence instead of only page-level prose.
|
||||
|
||||
When `context.includeCompiledDigestPrompt` is enabled, the memory prompt supplement also appends a compact snapshot from `.openclaw-wiki/cache/agent-digest.json`. Legacy prompt assembly sees that automatically, and non-legacy context engines can pick it up when they explicitly consume memory prompt supplements via `buildActiveMemoryPromptSection(...)`.
|
||||
When `context.includeCompiledDigestPrompt` is enabled, the memory prompt supplement also appends a compact snapshot from the SQLite-backed compiled digest. Legacy prompt assembly sees that automatically, and non-legacy context engines can pick it up when they explicitly consume memory prompt supplements via `buildActiveMemoryPromptSection(...)`.
|
||||
|
||||
## Gateway RPC
|
||||
|
||||
@@ -173,5 +173,5 @@ Write methods:
|
||||
- `unsafe-local` is intentionally experimental and non-portable.
|
||||
- Bridge mode reads the active memory plugin through public seams only.
|
||||
- Wiki pages are compiled artifacts, not the ultimate source of truth. Keep provenance attached to raw sources, memory artifacts, and daily notes.
|
||||
- The compiled agent digests in `.openclaw-wiki/cache/agent-digest.json` and `.openclaw-wiki/cache/claims.jsonl` are the stable machine-facing view of the wiki.
|
||||
- The compiled agent digests in SQLite plugin state are the stable machine-facing view of the wiki.
|
||||
- Obsidian CLI support requires the official `obsidian` CLI to be installed and available on `PATH`.
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { resetPluginBlobStoreForTests } from "openclaw/plugin-sdk/plugin-state-runtime";
|
||||
import { afterAll, beforeAll, describe, expect, it } from "vitest";
|
||||
import { compileMemoryWikiVault } from "./compile.js";
|
||||
import { readMemoryWikiCompiledDigestBundle } from "./digest-state.js";
|
||||
import { renderWikiMarkdown } from "./markdown.js";
|
||||
import { createMemoryWikiTestHarness } from "./test-helpers.js";
|
||||
|
||||
@@ -11,12 +13,21 @@ const { createVault } = createMemoryWikiTestHarness();
|
||||
describe("compileMemoryWikiVault", () => {
|
||||
let suiteRoot = "";
|
||||
let caseId = 0;
|
||||
let previousStateDir: string | undefined;
|
||||
|
||||
beforeAll(async () => {
|
||||
suiteRoot = await fs.mkdtemp(path.join(os.tmpdir(), "memory-wiki-compile-suite-"));
|
||||
previousStateDir = process.env.OPENCLAW_STATE_DIR;
|
||||
process.env.OPENCLAW_STATE_DIR = path.join(suiteRoot, "state");
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
resetPluginBlobStoreForTests();
|
||||
if (previousStateDir === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = previousStateDir;
|
||||
}
|
||||
if (suiteRoot) {
|
||||
await fs.rm(suiteRoot, { recursive: true, force: true });
|
||||
}
|
||||
@@ -70,9 +81,8 @@ describe("compileMemoryWikiVault", () => {
|
||||
await expect(fs.readFile(path.join(rootDir, "sources", "index.md"), "utf8")).resolves.toContain(
|
||||
"[Alpha](sources/alpha.md)",
|
||||
);
|
||||
const agentDigest = JSON.parse(
|
||||
await fs.readFile(path.join(rootDir, ".openclaw-wiki", "cache", "agent-digest.json"), "utf8"),
|
||||
) as {
|
||||
const digestBundle = await readMemoryWikiCompiledDigestBundle(rootDir);
|
||||
const agentDigest = JSON.parse(digestBundle.agentDigest ?? "") as {
|
||||
claimCount: number;
|
||||
pages: Array<{ path: string; claimCount: number; topClaims: Array<{ text: string }> }>;
|
||||
};
|
||||
@@ -84,9 +94,10 @@ describe("compileMemoryWikiVault", () => {
|
||||
topClaims: [expect.objectContaining({ text: "Alpha is the canonical source page." })],
|
||||
}),
|
||||
);
|
||||
await expect(
|
||||
fs.readFile(path.join(rootDir, ".openclaw-wiki", "cache", "claims.jsonl"), "utf8"),
|
||||
).resolves.toContain('"text":"Alpha is the canonical source page."');
|
||||
expect(digestBundle.claimsDigest).toContain('"text":"Alpha is the canonical source page."');
|
||||
await expect(fs.stat(path.join(rootDir, ".openclaw-wiki", "cache"))).rejects.toMatchObject({
|
||||
code: "ENOENT",
|
||||
});
|
||||
});
|
||||
|
||||
it("renders obsidian-friendly links when configured", async () => {
|
||||
@@ -333,9 +344,8 @@ describe("compileMemoryWikiVault", () => {
|
||||
await expect(
|
||||
fs.readFile(path.join(rootDir, "reports", "stale-pages.md"), "utf8"),
|
||||
).resolves.toContain("[Alpha](entities/alpha.md): missing updatedAt");
|
||||
const agentDigest = JSON.parse(
|
||||
await fs.readFile(path.join(rootDir, ".openclaw-wiki", "cache", "agent-digest.json"), "utf8"),
|
||||
) as {
|
||||
const digestBundle = await readMemoryWikiCompiledDigestBundle(rootDir);
|
||||
const agentDigest = JSON.parse(digestBundle.agentDigest ?? "") as {
|
||||
claimHealth: { missingEvidence: number; freshness: { unknown: number } };
|
||||
contradictionClusters: Array<{ key: string }>;
|
||||
};
|
||||
@@ -445,9 +455,8 @@ describe("compileMemoryWikiVault", () => {
|
||||
fs.readFile(path.join(rootDir, "reports", "privacy-review.md"), "utf8"),
|
||||
).resolves.toContain("confirm-before-use");
|
||||
|
||||
const agentDigest = JSON.parse(
|
||||
await fs.readFile(path.join(rootDir, ".openclaw-wiki", "cache", "agent-digest.json"), "utf8"),
|
||||
) as {
|
||||
const digestBundle = await readMemoryWikiCompiledDigestBundle(rootDir);
|
||||
const agentDigest = JSON.parse(digestBundle.agentDigest ?? "") as {
|
||||
pages: Array<{
|
||||
path: string;
|
||||
canonicalId?: string;
|
||||
@@ -465,9 +474,7 @@ describe("compileMemoryWikiVault", () => {
|
||||
relationshipCount: 1,
|
||||
}),
|
||||
);
|
||||
await expect(
|
||||
fs.readFile(path.join(rootDir, ".openclaw-wiki", "cache", "claims.jsonl"), "utf8"),
|
||||
).resolves.toContain('"evidenceKinds":["maintainer-whois"]');
|
||||
expect(digestBundle.claimsDigest).toContain('"evidenceKinds":["maintainer-whois"]');
|
||||
});
|
||||
|
||||
it("ignores generated related links when computing backlinks on repeated compile", async () => {
|
||||
|
||||
@@ -22,6 +22,7 @@ import {
|
||||
type WikiPageContradictionCluster,
|
||||
} from "./claim-health.js";
|
||||
import type { ResolvedMemoryWikiConfig } from "./config.js";
|
||||
import { writeMemoryWikiCompiledDigests } from "./digest-state.js";
|
||||
import { appendMemoryWikiLog } from "./log.js";
|
||||
import {
|
||||
formatWikiLink,
|
||||
@@ -45,8 +46,6 @@ const COMPILE_PAGE_GROUPS: Array<{ kind: WikiPageKind; dir: string; heading: str
|
||||
{ kind: "synthesis", dir: "syntheses", heading: "Syntheses" },
|
||||
{ kind: "report", dir: "reports", heading: "Reports" },
|
||||
];
|
||||
const AGENT_DIGEST_PATH = ".openclaw-wiki/cache/agent-digest.json";
|
||||
const CLAIMS_DIGEST_PATH = ".openclaw-wiki/cache/claims.jsonl";
|
||||
const MAX_RELATED_PAGES_PER_SECTION = 12;
|
||||
const MAX_SHARED_SOURCE_FANOUT = 24;
|
||||
|
||||
@@ -1254,10 +1253,7 @@ async function writeAgentDigestArtifacts(params: {
|
||||
rootDir: string;
|
||||
pages: WikiPageSummary[];
|
||||
pageCounts: Record<WikiPageKind, number>;
|
||||
}): Promise<string[]> {
|
||||
const updatedFiles: string[] = [];
|
||||
const agentDigestPath = path.join(params.rootDir, AGENT_DIGEST_PATH);
|
||||
const claimsDigestPath = path.join(params.rootDir, CLAIMS_DIGEST_PATH);
|
||||
}): Promise<void> {
|
||||
const agentDigest = `${JSON.stringify(
|
||||
buildAgentDigest({
|
||||
pages: params.pages,
|
||||
@@ -1270,20 +1266,11 @@ async function writeAgentDigestArtifacts(params: {
|
||||
buildClaimsDigestLines({ pages: params.pages }).join("\n"),
|
||||
);
|
||||
|
||||
for (const [filePath, content] of [
|
||||
[agentDigestPath, agentDigest],
|
||||
[claimsDigestPath, claimsDigest],
|
||||
] as const) {
|
||||
const relativePath = path.relative(params.rootDir, filePath);
|
||||
const root = await fsRoot(params.rootDir);
|
||||
const existing = await root.readText(relativePath).catch(() => "");
|
||||
if (existing === content) {
|
||||
continue;
|
||||
}
|
||||
await root.write(relativePath, content);
|
||||
updatedFiles.push(filePath);
|
||||
}
|
||||
return updatedFiles;
|
||||
await writeMemoryWikiCompiledDigests({
|
||||
vaultRoot: params.rootDir,
|
||||
agentDigest,
|
||||
claimsDigest,
|
||||
});
|
||||
}
|
||||
|
||||
export async function compileMemoryWikiVault(
|
||||
@@ -1302,12 +1289,11 @@ export async function compileMemoryWikiVault(
|
||||
pages = await readPageSummaries(rootDir);
|
||||
}
|
||||
const counts = buildPageCounts(pages);
|
||||
const digestUpdatedFiles = await writeAgentDigestArtifacts({
|
||||
await writeAgentDigestArtifacts({
|
||||
rootDir,
|
||||
pages,
|
||||
pageCounts: counts,
|
||||
});
|
||||
updatedFiles.push(...digestUpdatedFiles);
|
||||
|
||||
const rootIndexPath = path.join(rootDir, "index.md");
|
||||
if (
|
||||
|
||||
76
extensions/memory-wiki/src/digest-state.test.ts
Normal file
76
extensions/memory-wiki/src/digest-state.test.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { resetPluginBlobStoreForTests } from "openclaw/plugin-sdk/plugin-state-runtime";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import {
|
||||
importMemoryWikiLegacyDigestFiles,
|
||||
legacyMemoryWikiDigestFilesExist,
|
||||
readMemoryWikiAgentDigestSync,
|
||||
readMemoryWikiCompiledDigestBundle,
|
||||
resolveMemoryWikiLegacyDigestPath,
|
||||
writeMemoryWikiCompiledDigests,
|
||||
} from "./digest-state.js";
|
||||
|
||||
describe("memory wiki compiled digest state", () => {
|
||||
const previousStateDir = process.env.OPENCLAW_STATE_DIR;
|
||||
const roots: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
resetPluginBlobStoreForTests();
|
||||
if (previousStateDir === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = previousStateDir;
|
||||
}
|
||||
await Promise.all(roots.splice(0).map((root) => fs.rm(root, { recursive: true, force: true })));
|
||||
});
|
||||
|
||||
async function createVaultRoot(): Promise<string> {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "memory-wiki-digest-"));
|
||||
roots.push(root);
|
||||
process.env.OPENCLAW_STATE_DIR = path.join(root, "state");
|
||||
return root;
|
||||
}
|
||||
|
||||
it("stores compiled digests in SQLite plugin blob state", async () => {
|
||||
const vaultRoot = await createVaultRoot();
|
||||
|
||||
await writeMemoryWikiCompiledDigests({
|
||||
vaultRoot,
|
||||
agentDigest: '{"claimCount":1,"pages":[]}\n',
|
||||
claimsDigest: '{"text":"Alpha"}\n',
|
||||
});
|
||||
|
||||
expect(readMemoryWikiAgentDigestSync(vaultRoot)).toBe('{"claimCount":1,"pages":[]}\n');
|
||||
await expect(readMemoryWikiCompiledDigestBundle(vaultRoot)).resolves.toEqual({
|
||||
agentDigest: '{"claimCount":1,"pages":[]}\n',
|
||||
claimsDigest: '{"text":"Alpha"}\n',
|
||||
});
|
||||
await expect(
|
||||
fs.stat(resolveMemoryWikiLegacyDigestPath(vaultRoot, "agent-digest")),
|
||||
).rejects.toMatchObject({ code: "ENOENT" });
|
||||
});
|
||||
|
||||
it("imports legacy cache files through the migration helper", async () => {
|
||||
const vaultRoot = await createVaultRoot();
|
||||
const agentPath = resolveMemoryWikiLegacyDigestPath(vaultRoot, "agent-digest");
|
||||
const claimsPath = resolveMemoryWikiLegacyDigestPath(vaultRoot, "claims-digest");
|
||||
await fs.mkdir(path.dirname(agentPath), { recursive: true });
|
||||
await fs.writeFile(agentPath, '{"claimCount":2,"pages":[]}\n', "utf8");
|
||||
await fs.writeFile(claimsPath, '{"text":"Beta"}\n', "utf8");
|
||||
|
||||
await expect(legacyMemoryWikiDigestFilesExist(vaultRoot)).resolves.toBe(true);
|
||||
await expect(importMemoryWikiLegacyDigestFiles({ vaultRoot })).resolves.toMatchObject({
|
||||
imported: 2,
|
||||
warnings: [],
|
||||
});
|
||||
|
||||
await expect(readMemoryWikiCompiledDigestBundle(vaultRoot)).resolves.toEqual({
|
||||
agentDigest: '{"claimCount":2,"pages":[]}\n',
|
||||
claimsDigest: '{"text":"Beta"}\n',
|
||||
});
|
||||
await expect(fs.stat(agentPath)).rejects.toMatchObject({ code: "ENOENT" });
|
||||
await expect(fs.stat(claimsPath)).rejects.toMatchObject({ code: "ENOENT" });
|
||||
});
|
||||
});
|
||||
173
extensions/memory-wiki/src/digest-state.ts
Normal file
173
extensions/memory-wiki/src/digest-state.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import { createHash } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import {
|
||||
createPluginBlobStore,
|
||||
createPluginBlobSyncStore,
|
||||
} from "openclaw/plugin-sdk/plugin-state-runtime";
|
||||
|
||||
export const MEMORY_WIKI_AGENT_DIGEST_LEGACY_PATH = ".openclaw-wiki/cache/agent-digest.json";
|
||||
export const MEMORY_WIKI_CLAIMS_DIGEST_LEGACY_PATH = ".openclaw-wiki/cache/claims.jsonl";
|
||||
|
||||
type MemoryWikiDigestKind = "agent-digest" | "claims-digest";
|
||||
|
||||
type MemoryWikiDigestMetadata = {
|
||||
vaultHash: string;
|
||||
kind: MemoryWikiDigestKind;
|
||||
contentType: "application/json" | "application/x-ndjson";
|
||||
};
|
||||
|
||||
const digestStore = createPluginBlobStore<MemoryWikiDigestMetadata>("memory-wiki", {
|
||||
namespace: "compiled-digest",
|
||||
maxEntries: 2000,
|
||||
});
|
||||
|
||||
const syncDigestStore = createPluginBlobSyncStore<MemoryWikiDigestMetadata>("memory-wiki", {
|
||||
namespace: "compiled-digest",
|
||||
maxEntries: 2000,
|
||||
});
|
||||
|
||||
function hashSegment(value: string): string {
|
||||
return createHash("sha256").update(value).digest("hex").slice(0, 32);
|
||||
}
|
||||
|
||||
function resolveVaultHash(vaultRoot: string): string {
|
||||
return hashSegment(path.resolve(vaultRoot));
|
||||
}
|
||||
|
||||
function resolveDigestKey(vaultRoot: string, kind: MemoryWikiDigestKind): string {
|
||||
return `${resolveVaultHash(vaultRoot)}:${kind}`;
|
||||
}
|
||||
|
||||
function contentTypeForDigestKind(
|
||||
kind: MemoryWikiDigestKind,
|
||||
): MemoryWikiDigestMetadata["contentType"] {
|
||||
return kind === "agent-digest" ? "application/json" : "application/x-ndjson";
|
||||
}
|
||||
|
||||
async function writeDigest(params: {
|
||||
vaultRoot: string;
|
||||
kind: MemoryWikiDigestKind;
|
||||
content: string;
|
||||
}): Promise<boolean> {
|
||||
const key = resolveDigestKey(params.vaultRoot, params.kind);
|
||||
const existing = await digestStore.lookup(key);
|
||||
if (existing?.blob.toString("utf8") === params.content) {
|
||||
return false;
|
||||
}
|
||||
await digestStore.register(
|
||||
key,
|
||||
{
|
||||
vaultHash: resolveVaultHash(params.vaultRoot),
|
||||
kind: params.kind,
|
||||
contentType: contentTypeForDigestKind(params.kind),
|
||||
},
|
||||
Buffer.from(params.content, "utf8"),
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
export async function writeMemoryWikiCompiledDigests(params: {
|
||||
vaultRoot: string;
|
||||
agentDigest: string;
|
||||
claimsDigest: string;
|
||||
}): Promise<{ agentDigestChanged: boolean; claimsDigestChanged: boolean }> {
|
||||
const [agentDigestChanged, claimsDigestChanged] = await Promise.all([
|
||||
writeDigest({
|
||||
vaultRoot: params.vaultRoot,
|
||||
kind: "agent-digest",
|
||||
content: params.agentDigest,
|
||||
}),
|
||||
writeDigest({
|
||||
vaultRoot: params.vaultRoot,
|
||||
kind: "claims-digest",
|
||||
content: params.claimsDigest,
|
||||
}),
|
||||
]);
|
||||
return { agentDigestChanged, claimsDigestChanged };
|
||||
}
|
||||
|
||||
export function readMemoryWikiAgentDigestSync(vaultRoot: string): string | null {
|
||||
return (
|
||||
syncDigestStore.lookup(resolveDigestKey(vaultRoot, "agent-digest"))?.blob.toString("utf8") ??
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
export async function readMemoryWikiCompiledDigestBundle(vaultRoot: string): Promise<{
|
||||
agentDigest: string | null;
|
||||
claimsDigest: string | null;
|
||||
}> {
|
||||
const [agentDigest, claimsDigest] = await Promise.all([
|
||||
digestStore.lookup(resolveDigestKey(vaultRoot, "agent-digest")),
|
||||
digestStore.lookup(resolveDigestKey(vaultRoot, "claims-digest")),
|
||||
]);
|
||||
return {
|
||||
agentDigest: agentDigest?.blob.toString("utf8") ?? null,
|
||||
claimsDigest: claimsDigest?.blob.toString("utf8") ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
export function resolveMemoryWikiLegacyDigestPath(
|
||||
vaultRoot: string,
|
||||
kind: MemoryWikiDigestKind,
|
||||
): string {
|
||||
return path.join(
|
||||
vaultRoot,
|
||||
kind === "agent-digest"
|
||||
? MEMORY_WIKI_AGENT_DIGEST_LEGACY_PATH
|
||||
: MEMORY_WIKI_CLAIMS_DIGEST_LEGACY_PATH,
|
||||
);
|
||||
}
|
||||
|
||||
async function importLegacyDigest(params: {
|
||||
vaultRoot: string;
|
||||
kind: MemoryWikiDigestKind;
|
||||
}): Promise<{ imported: boolean; sourcePath: string }> {
|
||||
const sourcePath = resolveMemoryWikiLegacyDigestPath(params.vaultRoot, params.kind);
|
||||
const content = await fs.readFile(sourcePath, "utf8");
|
||||
await writeDigest({
|
||||
vaultRoot: params.vaultRoot,
|
||||
kind: params.kind,
|
||||
content,
|
||||
});
|
||||
await fs.rm(sourcePath, { force: true });
|
||||
return { imported: true, sourcePath };
|
||||
}
|
||||
|
||||
export async function legacyMemoryWikiDigestFilesExist(vaultRoot: string): Promise<boolean> {
|
||||
const results = await Promise.all(
|
||||
(["agent-digest", "claims-digest"] as const).map((kind) =>
|
||||
fs
|
||||
.stat(resolveMemoryWikiLegacyDigestPath(vaultRoot, kind))
|
||||
.then((stat) => stat.isFile())
|
||||
.catch(() => false),
|
||||
),
|
||||
);
|
||||
return results.some(Boolean);
|
||||
}
|
||||
|
||||
export async function importMemoryWikiLegacyDigestFiles(params: {
|
||||
vaultRoot: string;
|
||||
}): Promise<{ imported: number; warnings: string[]; sourcePaths: string[] }> {
|
||||
const warnings: string[] = [];
|
||||
const sourcePaths: string[] = [];
|
||||
let imported = 0;
|
||||
for (const kind of ["agent-digest", "claims-digest"] as const) {
|
||||
try {
|
||||
const result = await importLegacyDigest({ vaultRoot: params.vaultRoot, kind });
|
||||
imported += result.imported ? 1 : 0;
|
||||
sourcePaths.push(result.sourcePath);
|
||||
} catch (error) {
|
||||
const sourcePath = resolveMemoryWikiLegacyDigestPath(params.vaultRoot, kind);
|
||||
if ((error as NodeJS.ErrnoException)?.code === "ENOENT") {
|
||||
continue;
|
||||
}
|
||||
warnings.push(`Failed importing Memory Wiki ${kind}: ${String(error)}`);
|
||||
sourcePaths.push(sourcePath);
|
||||
}
|
||||
}
|
||||
const cacheDir = path.join(params.vaultRoot, ".openclaw-wiki", "cache");
|
||||
await fs.rmdir(cacheDir).catch(() => undefined);
|
||||
return { imported, warnings, sourcePaths };
|
||||
}
|
||||
@@ -1,17 +1,28 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { resetPluginBlobStoreForTests } from "openclaw/plugin-sdk/plugin-state-runtime";
|
||||
import { afterAll, beforeAll, describe, expect, it } from "vitest";
|
||||
import { resolveMemoryWikiConfig } from "./config.js";
|
||||
import { writeMemoryWikiCompiledDigests } from "./digest-state.js";
|
||||
import { buildWikiPromptSection, createWikiPromptSectionBuilder } from "./prompt-section.js";
|
||||
|
||||
let suiteRoot = "";
|
||||
let previousStateDir: string | undefined;
|
||||
|
||||
beforeAll(async () => {
|
||||
suiteRoot = await fs.mkdtemp(path.join(os.tmpdir(), "memory-wiki-prompt-suite-"));
|
||||
previousStateDir = process.env.OPENCLAW_STATE_DIR;
|
||||
process.env.OPENCLAW_STATE_DIR = path.join(suiteRoot, "state");
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
resetPluginBlobStoreForTests();
|
||||
if (previousStateDir === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = previousStateDir;
|
||||
}
|
||||
if (suiteRoot) {
|
||||
await fs.rm(suiteRoot, { recursive: true, force: true });
|
||||
}
|
||||
@@ -34,10 +45,9 @@ describe("buildWikiPromptSection", () => {
|
||||
|
||||
it("can append a compact compiled digest snapshot when enabled", async () => {
|
||||
const rootDir = path.join(suiteRoot, "digest-enabled");
|
||||
await fs.mkdir(path.join(rootDir, ".openclaw-wiki", "cache"), { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(rootDir, ".openclaw-wiki", "cache", "agent-digest.json"),
|
||||
JSON.stringify(
|
||||
await writeMemoryWikiCompiledDigests({
|
||||
vaultRoot: rootDir,
|
||||
agentDigest: `${JSON.stringify(
|
||||
{
|
||||
claimCount: 8,
|
||||
contradictionClusters: [{ key: "claim.alpha.db" }],
|
||||
@@ -61,9 +71,9 @@ describe("buildWikiPromptSection", () => {
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
"utf8",
|
||||
);
|
||||
)}\n`,
|
||||
claimsDigest: "",
|
||||
});
|
||||
const builder = createWikiPromptSectionBuilder(
|
||||
resolveMemoryWikiConfig({
|
||||
vault: { path: rootDir },
|
||||
@@ -82,15 +92,14 @@ describe("buildWikiPromptSection", () => {
|
||||
|
||||
it("keeps the digest snapshot disabled by default", async () => {
|
||||
const rootDir = path.join(suiteRoot, "digest-disabled");
|
||||
await fs.mkdir(path.join(rootDir, ".openclaw-wiki", "cache"), { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(rootDir, ".openclaw-wiki", "cache", "agent-digest.json"),
|
||||
JSON.stringify({
|
||||
await writeMemoryWikiCompiledDigests({
|
||||
vaultRoot: rootDir,
|
||||
agentDigest: `${JSON.stringify({
|
||||
claimCount: 1,
|
||||
pages: [{ title: "Alpha", kind: "entity", claimCount: 1, topClaims: [] }],
|
||||
}),
|
||||
"utf8",
|
||||
);
|
||||
})}\n`,
|
||||
claimsDigest: "",
|
||||
});
|
||||
const builder = createWikiPromptSectionBuilder(
|
||||
resolveMemoryWikiConfig({
|
||||
vault: { path: rootDir },
|
||||
@@ -102,8 +111,6 @@ describe("buildWikiPromptSection", () => {
|
||||
|
||||
it("stabilizes digest prompt ordering for prompt-cache-friendly output", async () => {
|
||||
const rootDir = path.join(suiteRoot, "digest-stable");
|
||||
const digestPath = path.join(rootDir, ".openclaw-wiki", "cache", "agent-digest.json");
|
||||
await fs.mkdir(path.dirname(digestPath), { recursive: true });
|
||||
|
||||
const builder = createWikiPromptSectionBuilder(
|
||||
resolveMemoryWikiConfig({
|
||||
@@ -162,10 +169,18 @@ describe("buildWikiPromptSection", () => {
|
||||
],
|
||||
};
|
||||
|
||||
await fs.writeFile(digestPath, JSON.stringify(firstDigest, null, 2), "utf8");
|
||||
await writeMemoryWikiCompiledDigests({
|
||||
vaultRoot: rootDir,
|
||||
agentDigest: `${JSON.stringify(firstDigest, null, 2)}\n`,
|
||||
claimsDigest: "",
|
||||
});
|
||||
const firstLines = builder({ availableTools: new Set(["web_search"]) });
|
||||
|
||||
await fs.writeFile(digestPath, JSON.stringify(secondDigest, null, 2), "utf8");
|
||||
await writeMemoryWikiCompiledDigests({
|
||||
vaultRoot: rootDir,
|
||||
agentDigest: `${JSON.stringify(secondDigest, null, 2)}\n`,
|
||||
claimsDigest: "",
|
||||
});
|
||||
const secondLines = builder({ availableTools: new Set(["web_search"]) });
|
||||
|
||||
expect(firstLines).toEqual(secondLines);
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { MemoryPromptSectionBuilder } from "openclaw/plugin-sdk/memory-host-core";
|
||||
import { resolveMemoryWikiConfig, type ResolvedMemoryWikiConfig } from "./config.js";
|
||||
import { readMemoryWikiAgentDigestSync } from "./digest-state.js";
|
||||
|
||||
const AGENT_DIGEST_PATH = ".openclaw-wiki/cache/agent-digest.json";
|
||||
const DIGEST_MAX_PAGES = 4;
|
||||
const DIGEST_MAX_CLAIMS_PER_PAGE = 2;
|
||||
|
||||
@@ -31,9 +29,11 @@ type PromptDigest = {
|
||||
};
|
||||
|
||||
function tryReadPromptDigest(config: ResolvedMemoryWikiConfig): PromptDigest | null {
|
||||
const digestPath = path.join(config.vault.path, AGENT_DIGEST_PATH);
|
||||
const raw = readMemoryWikiAgentDigestSync(config.vault.path);
|
||||
if (!raw) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
const raw = fs.readFileSync(digestPath, "utf8");
|
||||
const parsed = JSON.parse(raw) as PromptDigest;
|
||||
if (!parsed || typeof parsed !== "object") {
|
||||
return null;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { resetPluginBlobStoreForTests } from "openclaw/plugin-sdk/plugin-state-runtime";
|
||||
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import type { OpenClawConfig } from "../api.js";
|
||||
import { compileMemoryWikiVault } from "./compile.js";
|
||||
@@ -44,6 +45,7 @@ vi.mock("openclaw/plugin-sdk/session-transcript-hit", async (importOriginal) =>
|
||||
const { createVault } = createMemoryWikiTestHarness();
|
||||
let suiteRoot = "";
|
||||
let caseIndex = 0;
|
||||
let previousStateDir: string | undefined;
|
||||
|
||||
function collectWikiResultPaths(results: readonly { corpus: string; path: string }[]): string[] {
|
||||
const paths: string[] = [];
|
||||
@@ -66,9 +68,17 @@ beforeEach(() => {
|
||||
|
||||
beforeAll(async () => {
|
||||
suiteRoot = await fs.mkdtemp(path.join(os.tmpdir(), "memory-wiki-query-suite-"));
|
||||
previousStateDir = process.env.OPENCLAW_STATE_DIR;
|
||||
process.env.OPENCLAW_STATE_DIR = path.join(suiteRoot, "state");
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
resetPluginBlobStoreForTests();
|
||||
if (previousStateDir === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = previousStateDir;
|
||||
}
|
||||
if (suiteRoot) {
|
||||
await fs.rm(suiteRoot, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtim
|
||||
import type { OpenClawConfig } from "../api.js";
|
||||
import { assessClaimFreshness, isClaimContestedStatus } from "./claim-health.js";
|
||||
import type { ResolvedMemoryWikiConfig, WikiSearchBackend, WikiSearchCorpus } from "./config.js";
|
||||
import { readMemoryWikiCompiledDigestBundle } from "./digest-state.js";
|
||||
import {
|
||||
parseWikiMarkdown,
|
||||
toWikiPageSummary,
|
||||
@@ -27,8 +28,6 @@ import {
|
||||
import { initializeMemoryWikiVault } from "./vault.js";
|
||||
|
||||
const QUERY_DIRS = ["entities", "concepts", "sources", "syntheses", "reports"] as const;
|
||||
const AGENT_DIGEST_PATH = ".openclaw-wiki/cache/agent-digest.json";
|
||||
const CLAIMS_DIGEST_PATH = ".openclaw-wiki/cache/claims.jsonl";
|
||||
const RELATED_BLOCK_PATTERN =
|
||||
/<!-- openclaw:wiki:related:start -->[\s\S]*?<!-- openclaw:wiki:related:end -->/g;
|
||||
const MARKDOWN_FRONTMATTER_PATTERN = /^\s*---\r?\n[\s\S]*?\r?\n---\r?\n?/;
|
||||
@@ -286,10 +285,8 @@ function parseClaimsDigest(raw: string): QueryDigestClaim[] {
|
||||
}
|
||||
|
||||
async function readQueryDigestBundle(rootDir: string): Promise<QueryDigestBundle | null> {
|
||||
const [agentDigestRaw, claimsDigestRaw] = await Promise.all([
|
||||
fs.readFile(path.join(rootDir, AGENT_DIGEST_PATH), "utf8").catch(() => null),
|
||||
fs.readFile(path.join(rootDir, CLAIMS_DIGEST_PATH), "utf8").catch(() => null),
|
||||
]);
|
||||
const { agentDigest: agentDigestRaw, claimsDigest: claimsDigestRaw } =
|
||||
await readMemoryWikiCompiledDigestBundle(rootDir);
|
||||
if (!agentDigestRaw && !claimsDigestRaw) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,10 @@ import path from "node:path";
|
||||
import type { MigrationProviderPlugin } from "openclaw/plugin-sdk/migration";
|
||||
import { createMigrationItem, summarizeMigrationItems } from "openclaw/plugin-sdk/migration";
|
||||
import type { ResolvedMemoryWikiConfig } from "./config.js";
|
||||
import {
|
||||
importMemoryWikiLegacyDigestFiles,
|
||||
legacyMemoryWikiDigestFilesExist,
|
||||
} from "./digest-state.js";
|
||||
import { writeMemoryWikiImportRunRecord } from "./import-runs.js";
|
||||
import { importMemoryWikiLegacyLog, resolveMemoryWikiLegacyLogPath } from "./log.js";
|
||||
import {
|
||||
@@ -83,6 +87,7 @@ export function createMemoryWikiSourceSyncMigrationProvider(
|
||||
const buildPlan: MigrationProviderPlugin["plan"] = async () => {
|
||||
const hasSourceSync = await legacySourceExists(config.vault.path);
|
||||
const hasLegacyLog = await legacyLogExists(config.vault.path);
|
||||
const hasLegacyDigests = await legacyMemoryWikiDigestFilesExist(config.vault.path);
|
||||
const importRunFiles = await listLegacyImportRunJsonFiles(config.vault.path);
|
||||
const items = [
|
||||
...(hasSourceSync
|
||||
@@ -122,6 +127,18 @@ export function createMemoryWikiSourceSyncMigrationProvider(
|
||||
}),
|
||||
]
|
||||
: []),
|
||||
...(hasLegacyDigests
|
||||
? [
|
||||
createMigrationItem({
|
||||
id: "memory-wiki-compiled-digest-cache",
|
||||
kind: "state",
|
||||
action: "import",
|
||||
source: path.join(config.vault.path, ".openclaw-wiki", "cache"),
|
||||
target: "global SQLite plugin_blob_entries(memory-wiki/compiled-digest)",
|
||||
message: "Import Memory Wiki compiled digest cache into SQLite plugin state.",
|
||||
}),
|
||||
]
|
||||
: []),
|
||||
];
|
||||
return {
|
||||
providerId: PROVIDER_ID,
|
||||
@@ -140,6 +157,7 @@ export function createMemoryWikiSourceSyncMigrationProvider(
|
||||
const found =
|
||||
(await legacySourceExists(config.vault.path)) ||
|
||||
(await legacyLogExists(config.vault.path)) ||
|
||||
(await legacyMemoryWikiDigestFilesExist(config.vault.path)) ||
|
||||
(await listLegacyImportRunJsonFiles(config.vault.path)).length > 0;
|
||||
return {
|
||||
found,
|
||||
@@ -196,6 +214,18 @@ export function createMemoryWikiSourceSyncMigrationProvider(
|
||||
imported: result.imported,
|
||||
},
|
||||
};
|
||||
} else if (item.id === "memory-wiki-compiled-digest-cache") {
|
||||
const result = await importMemoryWikiLegacyDigestFiles({
|
||||
vaultRoot: config.vault.path,
|
||||
});
|
||||
warnings.push(...result.warnings);
|
||||
items[itemIndex] = {
|
||||
...item,
|
||||
status: "migrated",
|
||||
details: {
|
||||
imported: result.imported,
|
||||
},
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
items[itemIndex] = {
|
||||
|
||||
@@ -18,7 +18,6 @@ export const WIKI_VAULT_DIRECTORIES = [
|
||||
"_views",
|
||||
".openclaw-wiki",
|
||||
".openclaw-wiki/locks",
|
||||
".openclaw-wiki/cache",
|
||||
] as const;
|
||||
|
||||
type InitializeMemoryWikiVaultResult = {
|
||||
@@ -48,7 +47,7 @@ function buildAgentsMarkdown(): string {
|
||||
- Preserve human notes outside managed markers.
|
||||
- Prefer source-backed claims over wiki-to-wiki citation loops.
|
||||
- Prefer structured \`claims\` with evidence over burying key beliefs only in prose.
|
||||
- Use \`.openclaw-wiki/cache/agent-digest.json\` and \`claims.jsonl\` for machine reads; markdown pages are the human view.
|
||||
- Use \`wiki_search\` and \`wiki_get\` for machine reads; markdown pages are the human view.
|
||||
`);
|
||||
}
|
||||
|
||||
@@ -65,7 +64,7 @@ This vault is maintained by the OpenClaw memory-wiki plugin.
|
||||
## Architecture
|
||||
- Raw sources remain the evidence layer.
|
||||
- Wiki pages are the human-readable synthesis layer.
|
||||
- \`.openclaw-wiki/cache/agent-digest.json\` is the agent-facing compiled digest.
|
||||
- Compiled digests live in SQLite plugin state and are refreshed by \`openclaw wiki compile\`.
|
||||
|
||||
## Notes
|
||||
<!-- openclaw:human:start -->
|
||||
|
||||
Reference in New Issue
Block a user