feat(memory-wiki): add ingest compile lint pipeline

This commit is contained in:
Vincent Koc
2026-04-05 20:40:55 +01:00
parent 57d1685a65
commit 516a43f9f2
15 changed files with 983 additions and 0 deletions

View File

@@ -4,6 +4,9 @@
"private": true,
"description": "OpenClaw persistent wiki plugin",
"type": "module",
"dependencies": {
"yaml": "^2.8.3"
},
"devDependencies": {
"openclaw": "workspace:*"
},

View File

@@ -5,6 +5,7 @@ description: Maintain an Obsidian-friendly memory wiki vault with wikilinks, fro
Use this skill when the memory-wiki vault render mode is `obsidian` or the user wants the wiki to play nicely with Obsidian.
- Start from `openclaw wiki status` to confirm the vault mode and whether the official Obsidian CLI is available.
- Prefer `[[Wikilinks]]`, stable filenames, and frontmatter that works with Obsidian dashboards and Dataview-style queries.
- Keep generated sections deterministic so Obsidian users can safely add handwritten notes around them.
- If the official Obsidian CLI is enabled, probe it before depending on it. Do not assume the app is installed, running, or configured.

View File

@@ -6,6 +6,7 @@ description: Maintain the OpenClaw memory wiki vault with deterministic pages, m
Use this skill when working inside a memory-wiki vault.
- Prefer `wiki_status` first when you need to understand the vault mode, path, or Obsidian CLI availability.
- Use `openclaw wiki ingest`, `openclaw wiki compile`, and `openclaw wiki lint` as the default maintenance loop.
- Keep generated sections inside managed markers. Do not overwrite human note blocks.
- Treat raw sources, memory artifacts, and daily notes as evidence. Do not let wiki pages become the only source of truth for new claims.
- Keep page identity stable. Favor updating existing entities and concepts over spawning duplicates with slightly different names.

View File

@@ -1,6 +1,9 @@
import type { Command } from "commander";
import { compileMemoryWikiVault } from "./compile.js";
import type { MemoryWikiPluginConfig, ResolvedMemoryWikiConfig } from "./config.js";
import { resolveMemoryWikiConfig } from "./config.js";
import { ingestMemoryWikiSource } from "./ingest.js";
import { lintMemoryWikiVault } from "./lint.js";
import { renderMemoryWikiStatus, resolveMemoryWikiStatus } from "./status.js";
import { initializeMemoryWikiVault } from "./vault.js";
@@ -12,6 +15,19 @@ type WikiInitCommandOptions = {
json?: boolean;
};
type WikiCompileCommandOptions = {
json?: boolean;
};
type WikiLintCommandOptions = {
json?: boolean;
};
type WikiIngestCommandOptions = {
json?: boolean;
title?: string;
};
function writeOutput(output: string, writer: Pick<NodeJS.WriteStream, "write"> = process.stdout) {
writer.write(output.endsWith("\n") ? output : `${output}\n`);
}
@@ -42,6 +58,51 @@ export async function runWikiInit(params: {
return result;
}
export async function runWikiCompile(params: {
config: ResolvedMemoryWikiConfig;
json?: boolean;
stdout?: Pick<NodeJS.WriteStream, "write">;
}) {
const result = await compileMemoryWikiVault(params.config);
const summary = params.json
? JSON.stringify(result, null, 2)
: `Compiled wiki vault at ${result.vaultRoot} (${result.pages.length} pages, ${result.updatedFiles.length} indexes updated).`;
writeOutput(summary, params.stdout);
return result;
}
export async function runWikiLint(params: {
config: ResolvedMemoryWikiConfig;
json?: boolean;
stdout?: Pick<NodeJS.WriteStream, "write">;
}) {
const result = await lintMemoryWikiVault(params.config);
const summary = params.json
? JSON.stringify(result, null, 2)
: `Linted wiki vault at ${result.vaultRoot} (${result.issueCount} issues, report: ${result.reportPath}).`;
writeOutput(summary, params.stdout);
return result;
}
export async function runWikiIngest(params: {
config: ResolvedMemoryWikiConfig;
inputPath: string;
title?: string;
json?: boolean;
stdout?: Pick<NodeJS.WriteStream, "write">;
}) {
const result = await ingestMemoryWikiSource({
config: params.config,
inputPath: params.inputPath,
title: params.title,
});
const summary = params.json
? JSON.stringify(result, null, 2)
: `Ingested ${result.sourcePath} into ${result.pagePath}.`;
writeOutput(summary, params.stdout);
return result;
}
export function registerWikiCli(program: Command, pluginConfig?: MemoryWikiPluginConfig) {
const config = resolveMemoryWikiConfig(pluginConfig);
const wiki = program.command("wiki").description("Inspect and initialize the memory wiki vault");
@@ -61,4 +122,30 @@ export function registerWikiCli(program: Command, pluginConfig?: MemoryWikiPlugi
.action(async (opts: WikiInitCommandOptions) => {
await runWikiInit({ config, json: opts.json });
});
wiki
.command("compile")
.description("Refresh generated wiki indexes")
.option("--json", "Print JSON")
.action(async (opts: WikiCompileCommandOptions) => {
await runWikiCompile({ config, json: opts.json });
});
wiki
.command("lint")
.description("Lint the wiki vault and write a report")
.option("--json", "Print JSON")
.action(async (opts: WikiLintCommandOptions) => {
await runWikiLint({ config, json: opts.json });
});
wiki
.command("ingest")
.description("Ingest a local file into the wiki sources folder")
.argument("<path>", "Local file path to ingest")
.option("--title <title>", "Override the source title")
.option("--json", "Print JSON")
.action(async (inputPath: string, opts: WikiIngestCommandOptions) => {
await runWikiIngest({ config, inputPath, title: opts.title, json: opts.json });
});
}

View File

@@ -0,0 +1,70 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, it } from "vitest";
import { compileMemoryWikiVault } from "./compile.js";
import { resolveMemoryWikiConfig } from "./config.js";
import { renderWikiMarkdown } from "./markdown.js";
import { initializeMemoryWikiVault } from "./vault.js";
const tempDirs: string[] = [];
afterEach(async () => {
await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true })));
});
describe("compileMemoryWikiVault", () => {
it("writes root and directory indexes for native markdown", async () => {
const rootDir = await fs.mkdtemp(path.join(os.tmpdir(), "memory-wiki-compile-"));
tempDirs.push(rootDir);
const config = resolveMemoryWikiConfig(
{ vault: { path: rootDir } },
{ homedir: "/Users/tester" },
);
await initializeMemoryWikiVault(config);
await fs.writeFile(
path.join(rootDir, "sources", "alpha.md"),
renderWikiMarkdown({
frontmatter: { pageType: "source", id: "source.alpha", title: "Alpha" },
body: "# Alpha\n",
}),
"utf8",
);
const result = await compileMemoryWikiVault(config);
expect(result.pageCounts.source).toBe(1);
await expect(fs.readFile(path.join(rootDir, "index.md"), "utf8")).resolves.toContain(
"[Alpha](sources/alpha.md)",
);
await expect(fs.readFile(path.join(rootDir, "sources", "index.md"), "utf8")).resolves.toContain(
"[Alpha](sources/alpha.md)",
);
});
it("renders obsidian-friendly links when configured", async () => {
const rootDir = await fs.mkdtemp(path.join(os.tmpdir(), "memory-wiki-compile-"));
tempDirs.push(rootDir);
const config = resolveMemoryWikiConfig(
{ vault: { path: rootDir, renderMode: "obsidian" } },
{ homedir: "/Users/tester" },
);
await initializeMemoryWikiVault(config);
await fs.writeFile(
path.join(rootDir, "sources", "alpha.md"),
renderWikiMarkdown({
frontmatter: { pageType: "source", id: "source.alpha", title: "Alpha" },
body: "# Alpha\n",
}),
"utf8",
);
await compileMemoryWikiVault(config);
await expect(fs.readFile(path.join(rootDir, "index.md"), "utf8")).resolves.toContain(
"[[sources/alpha|Alpha]]",
);
});
});

View File

@@ -0,0 +1,195 @@
import fs from "node:fs/promises";
import path from "node:path";
import {
replaceManagedMarkdownBlock,
withTrailingNewline,
} from "openclaw/plugin-sdk/memory-host-markdown";
import type { ResolvedMemoryWikiConfig } from "./config.js";
import { appendMemoryWikiLog } from "./log.js";
import {
formatWikiLink,
toWikiPageSummary,
type WikiPageKind,
type WikiPageSummary,
} from "./markdown.js";
import { initializeMemoryWikiVault } from "./vault.js";
const COMPILE_PAGE_GROUPS: Array<{ kind: WikiPageKind; dir: string; heading: string }> = [
{ kind: "source", dir: "sources", heading: "Sources" },
{ kind: "entity", dir: "entities", heading: "Entities" },
{ kind: "concept", dir: "concepts", heading: "Concepts" },
{ kind: "synthesis", dir: "syntheses", heading: "Syntheses" },
{ kind: "report", dir: "reports", heading: "Reports" },
];
export type CompileMemoryWikiResult = {
vaultRoot: string;
pageCounts: Record<WikiPageKind, number>;
pages: WikiPageSummary[];
updatedFiles: string[];
};
async function collectMarkdownFiles(rootDir: string, relativeDir: string): Promise<string[]> {
const dirPath = path.join(rootDir, relativeDir);
const entries = await fs.readdir(dirPath, { withFileTypes: true }).catch(() => []);
return entries
.filter((entry) => entry.isFile() && entry.name.endsWith(".md"))
.map((entry) => path.join(relativeDir, entry.name))
.filter((relativePath) => path.basename(relativePath) !== "index.md")
.toSorted((left, right) => left.localeCompare(right));
}
async function readPageSummaries(rootDir: string): Promise<WikiPageSummary[]> {
const filePaths = (
await Promise.all(COMPILE_PAGE_GROUPS.map((group) => collectMarkdownFiles(rootDir, group.dir)))
).flat();
const pages = await Promise.all(
filePaths.map(async (relativePath) => {
const absolutePath = path.join(rootDir, relativePath);
const raw = await fs.readFile(absolutePath, "utf8");
return toWikiPageSummary({ absolutePath, relativePath, raw });
}),
);
return pages
.flatMap((page) => (page ? [page] : []))
.toSorted((left, right) => left.title.localeCompare(right.title));
}
function buildPageCounts(pages: WikiPageSummary[]): Record<WikiPageKind, number> {
return {
entity: pages.filter((page) => page.kind === "entity").length,
concept: pages.filter((page) => page.kind === "concept").length,
source: pages.filter((page) => page.kind === "source").length,
synthesis: pages.filter((page) => page.kind === "synthesis").length,
report: pages.filter((page) => page.kind === "report").length,
};
}
function renderSectionList(params: {
config: ResolvedMemoryWikiConfig;
pages: WikiPageSummary[];
emptyText: string;
}): string {
if (params.pages.length === 0) {
return `- ${params.emptyText}`;
}
return params.pages
.map(
(page) =>
`- ${formatWikiLink({
renderMode: params.config.vault.renderMode,
relativePath: page.relativePath,
title: page.title,
})}`,
)
.join("\n");
}
async function writeManagedMarkdownFile(params: {
filePath: string;
title: string;
startMarker: string;
endMarker: string;
body: string;
}): Promise<void> {
const original = await fs.readFile(params.filePath, "utf8").catch(() => `# ${params.title}\n`);
const updated = replaceManagedMarkdownBlock({
original,
heading: "## Generated",
startMarker: params.startMarker,
endMarker: params.endMarker,
body: params.body,
});
await fs.writeFile(params.filePath, withTrailingNewline(updated), "utf8");
}
function buildRootIndexBody(params: {
config: ResolvedMemoryWikiConfig;
pages: WikiPageSummary[];
counts: Record<WikiPageKind, number>;
}): string {
const lines = [
`- Render mode: \`${params.config.vault.renderMode}\``,
`- Total pages: ${params.pages.length}`,
`- Sources: ${params.counts.source}`,
`- Entities: ${params.counts.entity}`,
`- Concepts: ${params.counts.concept}`,
`- Syntheses: ${params.counts.synthesis}`,
`- Reports: ${params.counts.report}`,
];
for (const group of COMPILE_PAGE_GROUPS) {
lines.push("", `### ${group.heading}`);
lines.push(
renderSectionList({
config: params.config,
pages: params.pages.filter((page) => page.kind === group.kind),
emptyText: `No ${group.heading.toLowerCase()} yet.`,
}),
);
}
return lines.join("\n");
}
function buildDirectoryIndexBody(params: {
config: ResolvedMemoryWikiConfig;
pages: WikiPageSummary[];
group: { kind: WikiPageKind; dir: string; heading: string };
}): string {
return renderSectionList({
config: params.config,
pages: params.pages.filter((page) => page.kind === params.group.kind),
emptyText: `No ${params.group.heading.toLowerCase()} yet.`,
});
}
export async function compileMemoryWikiVault(
config: ResolvedMemoryWikiConfig,
): Promise<CompileMemoryWikiResult> {
await initializeMemoryWikiVault(config);
const rootDir = config.vault.path;
const pages = await readPageSummaries(rootDir);
const counts = buildPageCounts(pages);
const updatedFiles: string[] = [];
const rootIndexPath = path.join(rootDir, "index.md");
await writeManagedMarkdownFile({
filePath: rootIndexPath,
title: "Wiki Index",
startMarker: "<!-- openclaw:wiki:index:start -->",
endMarker: "<!-- openclaw:wiki:index:end -->",
body: buildRootIndexBody({ config, pages, counts }),
});
updatedFiles.push(rootIndexPath);
for (const group of COMPILE_PAGE_GROUPS) {
const filePath = path.join(rootDir, group.dir, "index.md");
await writeManagedMarkdownFile({
filePath,
title: group.heading,
startMarker: `<!-- openclaw:wiki:${group.dir}:index:start -->`,
endMarker: `<!-- openclaw:wiki:${group.dir}:index:end -->`,
body: buildDirectoryIndexBody({ config, pages, group }),
});
updatedFiles.push(filePath);
}
await appendMemoryWikiLog(rootDir, {
type: "compile",
timestamp: new Date().toISOString(),
details: {
pageCounts: counts,
updatedFiles: updatedFiles.map((filePath) => path.relative(rootDir, filePath)),
},
});
return {
vaultRoot: rootDir,
pageCounts: counts,
pages,
updatedFiles,
};
}

View File

@@ -0,0 +1,37 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, it } from "vitest";
import { resolveMemoryWikiConfig } from "./config.js";
import { ingestMemoryWikiSource } from "./ingest.js";
const tempDirs: string[] = [];
afterEach(async () => {
await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true })));
});
describe("ingestMemoryWikiSource", () => {
it("copies a local text file into sources markdown", async () => {
const rootDir = await fs.mkdtemp(path.join(os.tmpdir(), "memory-wiki-ingest-"));
tempDirs.push(rootDir);
const inputPath = path.join(rootDir, "meeting-notes.txt");
await fs.writeFile(inputPath, "hello from source\n", "utf8");
const config = resolveMemoryWikiConfig(
{ vault: { path: path.join(rootDir, "vault") } },
{ homedir: "/Users/tester" },
);
const result = await ingestMemoryWikiSource({
config,
inputPath,
nowMs: Date.UTC(2026, 3, 5, 12, 0, 0),
});
expect(result.pageId).toBe("source.meeting-notes");
expect(result.pagePath).toBe("sources/meeting-notes.md");
await expect(
fs.readFile(path.join(config.vault.path, "sources", "meeting-notes.md"), "utf8"),
).resolves.toContain("hello from source");
});
});

View File

@@ -0,0 +1,108 @@
import fs from "node:fs/promises";
import path from "node:path";
import type { ResolvedMemoryWikiConfig } from "./config.js";
import { appendMemoryWikiLog } from "./log.js";
import { renderMarkdownFence, renderWikiMarkdown, slugifyWikiSegment } from "./markdown.js";
import { initializeMemoryWikiVault } from "./vault.js";
export type IngestMemoryWikiSourceResult = {
sourcePath: string;
pageId: string;
pagePath: string;
title: string;
bytes: number;
created: boolean;
};
function pathExists(filePath: string): Promise<boolean> {
return fs
.access(filePath)
.then(() => true)
.catch(() => false);
}
function resolveSourceTitle(sourcePath: string, explicitTitle?: string): string {
if (explicitTitle?.trim()) {
return explicitTitle.trim();
}
return path.basename(sourcePath, path.extname(sourcePath)).replace(/[-_]+/g, " ").trim();
}
function assertUtf8Text(buffer: Buffer, sourcePath: string): string {
const preview = buffer.subarray(0, Math.min(buffer.length, 4096));
if (preview.includes(0)) {
throw new Error(`Cannot ingest binary file as markdown source: ${sourcePath}`);
}
return buffer.toString("utf8");
}
export async function ingestMemoryWikiSource(params: {
config: ResolvedMemoryWikiConfig;
inputPath: string;
title?: string;
nowMs?: number;
}): Promise<IngestMemoryWikiSourceResult> {
await initializeMemoryWikiVault(params.config, { nowMs: params.nowMs });
const sourcePath = path.resolve(params.inputPath);
const buffer = await fs.readFile(sourcePath);
const content = assertUtf8Text(buffer, sourcePath);
const title = resolveSourceTitle(sourcePath, params.title);
const slug = slugifyWikiSegment(title);
const pageId = `source.${slug}`;
const pageRelativePath = path.join("sources", `${slug}.md`);
const pagePath = path.join(params.config.vault.path, pageRelativePath);
const created = !(await pathExists(pagePath));
const timestamp = new Date(params.nowMs ?? Date.now()).toISOString();
const markdown = renderWikiMarkdown({
frontmatter: {
pageType: "source",
id: pageId,
title,
sourceType: "local-file",
sourcePath,
ingestedAt: timestamp,
updatedAt: timestamp,
status: "active",
},
body: [
`# ${title}`,
"",
"## Source",
`- Type: \`local-file\``,
`- Path: \`${sourcePath}\``,
`- Bytes: ${buffer.byteLength}`,
`- Updated: ${timestamp}`,
"",
"## Content",
renderMarkdownFence(content, "text"),
"",
"## Notes",
"<!-- openclaw:human:start -->",
"<!-- openclaw:human:end -->",
"",
].join("\n"),
});
await fs.writeFile(pagePath, markdown, "utf8");
await appendMemoryWikiLog(params.config.vault.path, {
type: "ingest",
timestamp,
details: {
inputPath: sourcePath,
pageId,
pagePath: pageRelativePath.split(path.sep).join("/"),
bytes: buffer.byteLength,
created,
},
});
return {
sourcePath,
pageId,
pagePath: pageRelativePath.split(path.sep).join("/"),
title,
bytes: buffer.byteLength,
created,
};
}

View File

@@ -0,0 +1,45 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, it } from "vitest";
import { resolveMemoryWikiConfig } from "./config.js";
import { lintMemoryWikiVault } from "./lint.js";
import { renderWikiMarkdown } from "./markdown.js";
import { initializeMemoryWikiVault } from "./vault.js";
const tempDirs: string[] = [];
afterEach(async () => {
await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true })));
});
describe("lintMemoryWikiVault", () => {
it("detects duplicate ids and missing sourceIds", async () => {
const rootDir = await fs.mkdtemp(path.join(os.tmpdir(), "memory-wiki-lint-"));
tempDirs.push(rootDir);
const config = resolveMemoryWikiConfig(
{ vault: { path: rootDir, renderMode: "obsidian" } },
{ homedir: "/Users/tester" },
);
await initializeMemoryWikiVault(config);
const duplicate = renderWikiMarkdown({
frontmatter: {
pageType: "entity",
id: "entity.alpha",
title: "Alpha",
},
body: "# Alpha\n\n[[missing-page]]\n",
});
await fs.writeFile(path.join(rootDir, "entities", "alpha.md"), duplicate, "utf8");
await fs.writeFile(path.join(rootDir, "concepts", "alpha.md"), duplicate, "utf8");
const result = await lintMemoryWikiVault(config);
expect(result.issueCount).toBeGreaterThan(0);
expect(result.issues.map((issue) => issue.code)).toContain("duplicate-id");
expect(result.issues.map((issue) => issue.code)).toContain("missing-source-ids");
expect(result.issues.map((issue) => issue.code)).toContain("broken-wikilink");
await expect(fs.readFile(result.reportPath, "utf8")).resolves.toContain("### Errors");
});
});

View File

@@ -0,0 +1,203 @@
import fs from "node:fs/promises";
import path from "node:path";
import {
replaceManagedMarkdownBlock,
withTrailingNewline,
} from "openclaw/plugin-sdk/memory-host-markdown";
import { compileMemoryWikiVault } from "./compile.js";
import type { ResolvedMemoryWikiConfig } from "./config.js";
import { appendMemoryWikiLog } from "./log.js";
import { renderWikiMarkdown, toWikiPageSummary, type WikiPageSummary } from "./markdown.js";
export type MemoryWikiLintIssue = {
severity: "error" | "warning";
code:
| "missing-id"
| "duplicate-id"
| "missing-page-type"
| "page-type-mismatch"
| "missing-title"
| "missing-source-ids"
| "broken-wikilink";
path: string;
message: string;
};
export type LintMemoryWikiResult = {
vaultRoot: string;
issueCount: number;
issues: MemoryWikiLintIssue[];
reportPath: string;
};
function toExpectedPageType(page: WikiPageSummary): string {
return page.kind;
}
function collectBrokenLinkIssues(pages: WikiPageSummary[]): MemoryWikiLintIssue[] {
const validTargets = new Set<string>();
for (const page of pages) {
const withoutExtension = page.relativePath.replace(/\.md$/i, "");
validTargets.add(withoutExtension);
validTargets.add(path.basename(withoutExtension));
}
const issues: MemoryWikiLintIssue[] = [];
for (const page of pages) {
for (const linkTarget of page.linkTargets) {
if (!validTargets.has(linkTarget)) {
issues.push({
severity: "warning",
code: "broken-wikilink",
path: page.relativePath,
message: `Broken wikilink target \`${linkTarget}\`.`,
});
}
}
}
return issues;
}
function collectPageIssues(pages: WikiPageSummary[]): MemoryWikiLintIssue[] {
const issues: MemoryWikiLintIssue[] = [];
const pagesById = new Map<string, WikiPageSummary[]>();
for (const page of pages) {
if (!page.id) {
issues.push({
severity: "error",
code: "missing-id",
path: page.relativePath,
message: "Missing `id` frontmatter.",
});
} else {
const current = pagesById.get(page.id) ?? [];
current.push(page);
pagesById.set(page.id, current);
}
if (!page.pageType) {
issues.push({
severity: "error",
code: "missing-page-type",
path: page.relativePath,
message: "Missing `pageType` frontmatter.",
});
} else if (page.pageType !== toExpectedPageType(page)) {
issues.push({
severity: "error",
code: "page-type-mismatch",
path: page.relativePath,
message: `Expected pageType \`${toExpectedPageType(page)}\`, found \`${page.pageType}\`.`,
});
}
if (!page.title.trim()) {
issues.push({
severity: "error",
code: "missing-title",
path: page.relativePath,
message: "Missing page title.",
});
}
if (page.kind !== "source" && page.kind !== "report" && page.sourceIds.length === 0) {
issues.push({
severity: "warning",
code: "missing-source-ids",
path: page.relativePath,
message: "Non-source page is missing `sourceIds` provenance.",
});
}
}
for (const [id, matches] of pagesById.entries()) {
if (matches.length > 1) {
for (const match of matches) {
issues.push({
severity: "error",
code: "duplicate-id",
path: match.relativePath,
message: `Duplicate page id \`${id}\`.`,
});
}
}
}
issues.push(...collectBrokenLinkIssues(pages));
return issues.toSorted((left, right) => left.path.localeCompare(right.path));
}
function buildLintReportBody(issues: MemoryWikiLintIssue[]): string {
if (issues.length === 0) {
return "No issues found.";
}
const errors = issues.filter((issue) => issue.severity === "error");
const warnings = issues.filter((issue) => issue.severity === "warning");
const lines = [`- Errors: ${errors.length}`, `- Warnings: ${warnings.length}`];
if (errors.length > 0) {
lines.push("", "### Errors");
for (const issue of errors) {
lines.push(`- \`${issue.path}\`: ${issue.message}`);
}
}
if (warnings.length > 0) {
lines.push("", "### Warnings");
for (const issue of warnings) {
lines.push(`- \`${issue.path}\`: ${issue.message}`);
}
}
return lines.join("\n");
}
async function writeLintReport(rootDir: string, issues: MemoryWikiLintIssue[]): Promise<string> {
const reportPath = path.join(rootDir, "reports", "lint.md");
const original = await fs.readFile(reportPath, "utf8").catch(() =>
renderWikiMarkdown({
frontmatter: {
pageType: "report",
id: "report.lint",
title: "Lint Report",
status: "active",
},
body: "# Lint Report\n",
}),
);
const updated = replaceManagedMarkdownBlock({
original,
heading: "## Generated",
startMarker: "<!-- openclaw:wiki:lint:start -->",
endMarker: "<!-- openclaw:wiki:lint:end -->",
body: buildLintReportBody(issues),
});
await fs.writeFile(reportPath, withTrailingNewline(updated), "utf8");
return reportPath;
}
export async function lintMemoryWikiVault(
config: ResolvedMemoryWikiConfig,
): Promise<LintMemoryWikiResult> {
const compileResult = await compileMemoryWikiVault(config);
const issues = collectPageIssues(compileResult.pages);
const reportPath = await writeLintReport(config.vault.path, issues);
await appendMemoryWikiLog(config.vault.path, {
type: "lint",
timestamp: new Date().toISOString(),
details: {
issueCount: issues.length,
reportPath: path.relative(config.vault.path, reportPath),
},
});
return {
vaultRoot: config.vault.path,
issueCount: issues.length,
issues,
reportPath,
};
}

View File

@@ -0,0 +1,17 @@
import fs from "node:fs/promises";
import path from "node:path";
export type MemoryWikiLogEntry = {
type: "init" | "ingest" | "compile" | "lint";
timestamp: string;
details?: Record<string, unknown>;
};
export async function appendMemoryWikiLog(
vaultRoot: string,
entry: MemoryWikiLogEntry,
): Promise<void> {
const logPath = path.join(vaultRoot, ".openclaw-wiki", "log.jsonl");
await fs.mkdir(path.dirname(logPath), { recursive: true });
await fs.appendFile(logPath, `${JSON.stringify(entry)}\n`, "utf8");
}

View File

@@ -0,0 +1,157 @@
import path from "node:path";
import YAML from "yaml";
export const WIKI_PAGE_KINDS = ["entity", "concept", "source", "synthesis", "report"] as const;
export type WikiPageKind = (typeof WIKI_PAGE_KINDS)[number];
export type ParsedWikiMarkdown = {
frontmatter: Record<string, unknown>;
body: string;
};
export type WikiPageSummary = {
absolutePath: string;
relativePath: string;
kind: WikiPageKind;
title: string;
id?: string;
pageType?: string;
sourceIds: string[];
linkTargets: string[];
};
const FRONTMATTER_PATTERN = /^---\n([\s\S]*?)\n---\n?/;
const OBSIDIAN_LINK_PATTERN = /\[\[([^\]|]+)(?:\|[^\]]+)?\]\]/g;
export function slugifyWikiSegment(raw: string): string {
const slug = raw
.trim()
.toLowerCase()
.replace(/[^a-z0-9]+/g, "-")
.replace(/-+/g, "-")
.replace(/^-+|-+$/g, "");
return slug || "page";
}
export function parseWikiMarkdown(content: string): ParsedWikiMarkdown {
const match = content.match(FRONTMATTER_PATTERN);
if (!match) {
return { frontmatter: {}, body: content };
}
const parsed = YAML.parse(match[1]) as unknown;
return {
frontmatter:
parsed && typeof parsed === "object" && !Array.isArray(parsed)
? (parsed as Record<string, unknown>)
: {},
body: content.slice(match[0].length),
};
}
export function renderWikiMarkdown(params: {
frontmatter: Record<string, unknown>;
body: string;
}): string {
const frontmatter = YAML.stringify(params.frontmatter).trimEnd();
return `---\n${frontmatter}\n---\n\n${params.body.trimStart()}`;
}
export function extractTitleFromMarkdown(body: string): string | undefined {
const match = body.match(/^#\s+(.+?)\s*$/m);
return match?.[1]?.trim() || undefined;
}
export function normalizeSourceIds(value: unknown): string[] {
if (Array.isArray(value)) {
return value.flatMap((item) => (typeof item === "string" && item.trim() ? [item.trim()] : []));
}
if (typeof value === "string" && value.trim()) {
return [value.trim()];
}
return [];
}
export function extractWikiLinks(markdown: string): string[] {
const links: string[] = [];
for (const match of markdown.matchAll(OBSIDIAN_LINK_PATTERN)) {
const target = match[1]?.trim();
if (target) {
links.push(target);
}
}
return links;
}
export function formatWikiLink(params: {
renderMode: "native" | "obsidian";
relativePath: string;
title: string;
}): string {
const withoutExtension = params.relativePath.replace(/\.md$/i, "");
return params.renderMode === "obsidian"
? `[[${withoutExtension}|${params.title}]]`
: `[${params.title}](${params.relativePath})`;
}
export function renderMarkdownFence(content: string, infoString = "text"): string {
const fenceSize = Math.max(
3,
...Array.from(content.matchAll(/`+/g), (match) => match[0].length + 1),
);
const fence = "`".repeat(fenceSize);
return `${fence}${infoString}\n${content}\n${fence}`;
}
export function inferWikiPageKind(relativePath: string): WikiPageKind | null {
const normalized = relativePath.split(path.sep).join("/");
if (normalized.startsWith("entities/")) {
return "entity";
}
if (normalized.startsWith("concepts/")) {
return "concept";
}
if (normalized.startsWith("sources/")) {
return "source";
}
if (normalized.startsWith("syntheses/")) {
return "synthesis";
}
if (normalized.startsWith("reports/")) {
return "report";
}
return null;
}
export function toWikiPageSummary(params: {
absolutePath: string;
relativePath: string;
raw: string;
}): WikiPageSummary | null {
const kind = inferWikiPageKind(params.relativePath);
if (!kind) {
return null;
}
const parsed = parseWikiMarkdown(params.raw);
const title =
(typeof parsed.frontmatter.title === "string" && parsed.frontmatter.title.trim()) ||
extractTitleFromMarkdown(parsed.body) ||
path.basename(params.relativePath, ".md");
return {
absolutePath: params.absolutePath,
relativePath: params.relativePath.split(path.sep).join("/"),
kind,
title,
id:
typeof parsed.frontmatter.id === "string" && parsed.frontmatter.id.trim()
? parsed.frontmatter.id.trim()
: undefined,
pageType:
typeof parsed.frontmatter.pageType === "string" && parsed.frontmatter.pageType.trim()
? parsed.frontmatter.pageType.trim()
: undefined,
sourceIds: normalizeSourceIds(parsed.frontmatter.sourceIds),
linkTargets: extractWikiLinks(params.raw),
};
}

View File

@@ -67,10 +67,18 @@ describe("renderMemoryWikiStatus", () => {
allowPrivateMemoryCoreAccess: false,
pathCount: 0,
},
pageCounts: {
source: 0,
entity: 0,
concept: 0,
synthesis: 0,
report: 0,
},
warnings: [{ code: "vault-missing", message: "Wiki vault has not been initialized yet." }],
});
expect(rendered).toContain("Wiki vault mode: isolated");
expect(rendered).toContain("Pages: 0 sources, 0 entities, 0 concepts, 0 syntheses, 0 reports");
expect(rendered).toContain("Warnings:");
expect(rendered).toContain("Wiki vault has not been initialized yet.");
});

View File

@@ -2,6 +2,7 @@ import { constants as fsConstants } from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import type { ResolvedMemoryWikiConfig } from "./config.js";
import { inferWikiPageKind, type WikiPageKind } from "./markdown.js";
export type MemoryWikiStatusWarning = {
code:
@@ -30,6 +31,7 @@ export type MemoryWikiStatus = {
allowPrivateMemoryCoreAccess: boolean;
pathCount: number;
};
pageCounts: Record<WikiPageKind, number>;
warnings: MemoryWikiStatusWarning[];
};
@@ -80,6 +82,32 @@ async function resolveCommandOnPath(command: string): Promise<string | null> {
return null;
}
async function collectPageCounts(vaultPath: string): Promise<Record<WikiPageKind, number>> {
const counts: Record<WikiPageKind, number> = {
entity: 0,
concept: 0,
source: 0,
synthesis: 0,
report: 0,
};
const dirs = ["entities", "concepts", "sources", "syntheses", "reports"] as const;
for (const dir of dirs) {
const entries = await fs
.readdir(path.join(vaultPath, dir), { withFileTypes: true })
.catch(() => []);
for (const entry of entries) {
if (!entry.isFile() || !entry.name.endsWith(".md") || entry.name === "index.md") {
continue;
}
const kind = inferWikiPageKind(path.join(dir, entry.name));
if (kind) {
counts[kind] += 1;
}
}
}
return counts;
}
function buildWarnings(params: {
config: ResolvedMemoryWikiConfig;
vaultExists: boolean;
@@ -147,6 +175,15 @@ export async function resolveMemoryWikiStatus(
const resolveCommand = deps?.resolveCommand ?? resolveCommandOnPath;
const vaultExists = await exists(config.vault.path);
const obsidianCommand = await resolveCommand("obsidian");
const pageCounts = vaultExists
? await collectPageCounts(config.vault.path)
: {
entity: 0,
concept: 0,
source: 0,
synthesis: 0,
report: 0,
};
return {
vaultMode: config.vaultMode,
@@ -164,6 +201,7 @@ export async function resolveMemoryWikiStatus(
allowPrivateMemoryCoreAccess: config.unsafeLocal.allowPrivateMemoryCoreAccess,
pathCount: config.unsafeLocal.paths.length,
},
pageCounts,
warnings: buildWarnings({ config, vaultExists, obsidianCommand }),
};
}
@@ -176,6 +214,7 @@ export function renderMemoryWikiStatus(status: MemoryWikiStatus): string {
`Obsidian CLI: ${status.obsidianCli.available ? "available" : "missing"}${status.obsidianCli.requested ? " (requested)" : ""}`,
`Bridge: ${status.bridge.enabled ? "enabled" : "disabled"}`,
`Unsafe local: ${status.unsafeLocal.allowPrivateMemoryCoreAccess ? `enabled (${status.unsafeLocal.pathCount} paths)` : "disabled"}`,
`Pages: ${status.pageCounts.source} sources, ${status.pageCounts.entity} entities, ${status.pageCounts.concept} concepts, ${status.pageCounts.synthesis} syntheses, ${status.pageCounts.report} reports`,
];
if (status.warnings.length > 0) {

View File

@@ -5,6 +5,7 @@ import {
withTrailingNewline,
} from "openclaw/plugin-sdk/memory-host-markdown";
import type { ResolvedMemoryWikiConfig } from "./config.js";
import { appendMemoryWikiLog } from "./log.js";
export const WIKI_VAULT_DIRECTORIES = [
"entities",
@@ -135,6 +136,17 @@ export async function initializeMemoryWikiVault(
);
await writeFileIfMissing(path.join(rootDir, ".openclaw-wiki", "log.jsonl"), "", createdFiles);
if (createdDirectories.length > 0 || createdFiles.length > 0) {
await appendMemoryWikiLog(rootDir, {
type: "init",
timestamp: new Date(options?.nowMs ?? Date.now()).toISOString(),
details: {
createdDirectories: createdDirectories.map((dir) => path.relative(rootDir, dir) || "."),
createdFiles: createdFiles.map((file) => path.relative(rootDir, file)),
},
});
}
return {
rootDir,
created: createdDirectories.length > 0 || createdFiles.length > 0,