mirror of
https://github.com/moltbot/moltbot.git
synced 2026-03-07 22:44:16 +00:00
fix: harden zip extraction writes
This commit is contained in:
@@ -106,6 +106,7 @@ Docs: https://docs.openclaw.ai
|
||||
- Slack/system-event session routing: resolve reaction/member/pin/interaction system-event session keys through channel/account bindings (with sender-aware DM routing) so inbound Slack events target the correct agent session in multi-account setups instead of defaulting to `agent:main`. (#34045) Thanks @paulomcg, @daht-mad and @vincentkoc.
|
||||
- Slack/native streaming markdown conversion: stop pre-normalizing text passed to Slack native `markdown_text` in streaming start/append/stop paths to prevent Markdown style corruption from double conversion. (#34931)
|
||||
- Gateway/HTTP tools invoke media compatibility: preserve raw media payload access for direct `/tools/invoke` clients by allowing media `nodes` invoke commands only in HTTP tool context, while keeping agent-context media invoke blocking to prevent base64 prompt bloat. (#34365) Thanks @obviyus.
|
||||
- Security/archive ZIP hardening: extract ZIP entries via same-directory temp files plus atomic rename, then re-open and reject post-rename hardlink alias races outside the destination root.
|
||||
- Agents/Nodes media outputs: add dedicated `photos_latest` action handling, block media-returning `nodes invoke` commands, keep metadata-only `camera.list` invoke allowed, and normalize empty `photos_latest` results to a consistent response shape to prevent base64 context bloat. (#34332) Thanks @obviyus.
|
||||
- TUI/session-key canonicalization: normalize `openclaw tui --session` values to lowercase so uppercase session names no longer drop real-time streaming updates due to gateway/TUI key mismatches. (#33866, #34013) thanks @lynnzc.
|
||||
- iMessage/echo loop hardening: strip leaked assistant-internal scaffolding from outbound iMessage replies, drop reflected assistant-content messages before they re-enter inbound processing, extend echo-cache text retention for delayed reflections, and suppress repeated loop traffic before it amplifies into queue overflow. (#33295) Thanks @joelnishanth.
|
||||
|
||||
@@ -3,7 +3,7 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import JSZip from "jszip";
|
||||
import * as tar from "tar";
|
||||
import { afterAll, beforeAll, describe, expect, it } from "vitest";
|
||||
import { afterAll, beforeAll, describe, expect, it, vi } from "vitest";
|
||||
import { withRealpathSymlinkRebindRace } from "../test-utils/symlink-rebind-race.js";
|
||||
import type { ArchiveSecurityError } from "./archive.js";
|
||||
import { extractArchive, resolveArchiveKind, resolvePackedRootDir } from "./archive.js";
|
||||
@@ -180,6 +180,45 @@ describe("archive utils", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it.runIf(process.platform !== "win32")(
|
||||
"rejects zip extraction when a hardlink appears after atomic rename",
|
||||
async () => {
|
||||
await withArchiveCase("zip", async ({ workDir, archivePath, extractDir }) => {
|
||||
const outsideDir = path.join(workDir, "outside");
|
||||
await fs.mkdir(outsideDir, { recursive: true });
|
||||
const outsideAlias = path.join(outsideDir, "payload.bin");
|
||||
const extractedPath = path.join(extractDir, "package", "payload.bin");
|
||||
|
||||
const zip = new JSZip();
|
||||
zip.file("package/payload.bin", "owned");
|
||||
await fs.writeFile(archivePath, await zip.generateAsync({ type: "nodebuffer" }));
|
||||
|
||||
const realRename = fs.rename.bind(fs);
|
||||
let linked = false;
|
||||
const renameSpy = vi.spyOn(fs, "rename").mockImplementation(async (...args) => {
|
||||
await realRename(...args);
|
||||
if (!linked) {
|
||||
linked = true;
|
||||
await fs.link(String(args[1]), outsideAlias);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
await expect(
|
||||
extractArchive({ archivePath, destDir: extractDir, timeoutMs: 5_000 }),
|
||||
).rejects.toMatchObject({
|
||||
code: "destination-symlink-traversal",
|
||||
} satisfies Partial<ArchiveSecurityError>);
|
||||
} finally {
|
||||
renameSpy.mockRestore();
|
||||
}
|
||||
|
||||
await expect(fs.readFile(outsideAlias, "utf8")).resolves.toBe("owned");
|
||||
await expect(fs.stat(extractedPath)).rejects.toMatchObject({ code: "ENOENT" });
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
it("rejects tar path traversal (zip slip)", async () => {
|
||||
await withArchiveCase("tar", async ({ workDir, archivePath, extractDir }) => {
|
||||
const insideDir = path.join(workDir, "inside");
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { constants as fsConstants } from "node:fs";
|
||||
import type { Stats } from "node:fs";
|
||||
import type { FileHandle } from "node:fs/promises";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
@@ -10,7 +13,8 @@ import {
|
||||
stripArchivePath,
|
||||
validateArchiveEntryPath,
|
||||
} from "./archive-path.js";
|
||||
import { openWritableFileWithinRoot, SafeOpenError } from "./fs-safe.js";
|
||||
import { sameFileIdentity } from "./file-identity.js";
|
||||
import { openFileWithinRoot, openWritableFileWithinRoot, SafeOpenError } from "./fs-safe.js";
|
||||
import { isNotFoundPathError, isPathInside } from "./path-guards.js";
|
||||
|
||||
export type ArchiveKind = "tar" | "zip";
|
||||
@@ -63,6 +67,12 @@ const ERROR_ARCHIVE_ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT =
|
||||
"archive entry extracted size exceeds limit";
|
||||
const ERROR_ARCHIVE_EXTRACTED_SIZE_EXCEEDS_LIMIT = "archive extracted size exceeds limit";
|
||||
const ERROR_ARCHIVE_ENTRY_TRAVERSES_SYMLINK = "archive entry traverses symlink in destination";
|
||||
const SUPPORTS_NOFOLLOW = process.platform !== "win32" && "O_NOFOLLOW" in fsConstants;
|
||||
const OPEN_WRITE_CREATE_FLAGS =
|
||||
fsConstants.O_WRONLY |
|
||||
fsConstants.O_CREAT |
|
||||
fsConstants.O_EXCL |
|
||||
(SUPPORTS_NOFOLLOW ? fsConstants.O_NOFOLLOW : 0);
|
||||
|
||||
const TAR_SUFFIXES = [".tgz", ".tar.gz", ".tar"];
|
||||
|
||||
@@ -275,6 +285,7 @@ type OpenZipOutputFileResult = {
|
||||
handle: FileHandle;
|
||||
createdForWrite: boolean;
|
||||
openedRealPath: string;
|
||||
openedStat: Stats;
|
||||
};
|
||||
|
||||
async function openZipOutputFile(params: {
|
||||
@@ -317,6 +328,33 @@ async function cleanupPartialRegularFile(filePath: string): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
function buildArchiveAtomicTempPath(targetPath: string): string {
|
||||
return path.join(
|
||||
path.dirname(targetPath),
|
||||
`.${path.basename(targetPath)}.${process.pid}.${randomUUID()}.tmp`,
|
||||
);
|
||||
}
|
||||
|
||||
async function verifyZipWriteResult(params: {
|
||||
destinationRealDir: string;
|
||||
relPath: string;
|
||||
expectedStat: Stats;
|
||||
}): Promise<string> {
|
||||
const opened = await openFileWithinRoot({
|
||||
rootDir: params.destinationRealDir,
|
||||
relativePath: params.relPath,
|
||||
rejectHardlinks: true,
|
||||
});
|
||||
try {
|
||||
if (!sameFileIdentity(opened.stat, params.expectedStat)) {
|
||||
throw new SafeOpenError("path-mismatch", "path changed during zip extract");
|
||||
}
|
||||
return opened.realPath;
|
||||
} finally {
|
||||
await opened.handle.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
type ZipEntry = {
|
||||
name: string;
|
||||
dir: boolean;
|
||||
@@ -403,36 +441,65 @@ async function writeZipFileEntry(params: {
|
||||
});
|
||||
params.budget.startEntry();
|
||||
const readable = await readZipEntryStream(params.entry);
|
||||
const writable = opened.handle.createWriteStream();
|
||||
const destinationPath = opened.openedRealPath;
|
||||
const targetMode = opened.openedStat.mode & 0o777;
|
||||
await opened.handle.close().catch(() => undefined);
|
||||
|
||||
let tempHandle: FileHandle | null = null;
|
||||
let tempPath: string | null = null;
|
||||
let tempStat: Stats | null = null;
|
||||
let handleClosedByStream = false;
|
||||
writable.once("close", () => {
|
||||
handleClosedByStream = true;
|
||||
});
|
||||
|
||||
try {
|
||||
tempPath = buildArchiveAtomicTempPath(destinationPath);
|
||||
tempHandle = await fs.open(tempPath, OPEN_WRITE_CREATE_FLAGS, targetMode || 0o666);
|
||||
const writable = tempHandle.createWriteStream();
|
||||
writable.once("close", () => {
|
||||
handleClosedByStream = true;
|
||||
});
|
||||
|
||||
await pipeline(
|
||||
readable,
|
||||
createExtractBudgetTransform({ onChunkBytes: params.budget.addBytes }),
|
||||
writable,
|
||||
);
|
||||
tempStat = await fs.stat(tempPath);
|
||||
if (!tempStat) {
|
||||
throw new Error("zip temp write did not produce file metadata");
|
||||
}
|
||||
if (!handleClosedByStream) {
|
||||
await tempHandle.close().catch(() => undefined);
|
||||
handleClosedByStream = true;
|
||||
}
|
||||
tempHandle = null;
|
||||
await fs.rename(tempPath, destinationPath);
|
||||
tempPath = null;
|
||||
const verifiedPath = await verifyZipWriteResult({
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
relPath: params.relPath,
|
||||
expectedStat: tempStat,
|
||||
});
|
||||
|
||||
// Best-effort permission restore for zip entries created on unix.
|
||||
if (typeof params.entry.unixPermissions === "number") {
|
||||
const mode = params.entry.unixPermissions & 0o777;
|
||||
if (mode !== 0) {
|
||||
await fs.chmod(verifiedPath, mode).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (opened.createdForWrite) {
|
||||
await fs.rm(opened.openedRealPath, { force: true }).catch(() => undefined);
|
||||
if (tempPath) {
|
||||
await fs.rm(tempPath, { force: true }).catch(() => undefined);
|
||||
} else {
|
||||
await cleanupPartialRegularFile(opened.openedRealPath).catch(() => undefined);
|
||||
await cleanupPartialRegularFile(destinationPath).catch(() => undefined);
|
||||
}
|
||||
if (err instanceof SafeOpenError) {
|
||||
throw symlinkTraversalError(params.entry.name);
|
||||
}
|
||||
throw err;
|
||||
} finally {
|
||||
if (!handleClosedByStream) {
|
||||
await opened.handle.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
// Best-effort permission restore for zip entries created on unix.
|
||||
if (typeof params.entry.unixPermissions === "number") {
|
||||
const mode = params.entry.unixPermissions & 0o777;
|
||||
if (mode !== 0) {
|
||||
await fs.chmod(opened.openedRealPath, mode).catch(() => undefined);
|
||||
if (tempHandle && !handleClosedByStream) {
|
||||
await tempHandle.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user