fix: harden backup manifest counting

This commit is contained in:
Gustavo Madeira Santana 2026-03-08 15:48:05 -04:00
parent 4f98de2f64
commit 0b0fb1b2c9
4 changed files with 130 additions and 17 deletions

View File

@ -214,4 +214,61 @@ describe("backupVerifyCommand", () => {
await fs.rm(archiveDir, { recursive: true, force: true });
}
});
it("fails when the archive contains duplicate root manifest entries", async () => {
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-backup-duplicate-manifest-"));
const archivePath = path.join(tempDir, "broken.tar.gz");
const manifestPath = path.join(tempDir, "manifest.json");
const payloadPath = path.join(tempDir, "payload.txt");
try {
const rootName = "openclaw-backup-2026-03-09T00-00-00.000Z";
const manifest = {
schemaVersion: 1,
createdAt: "2026-03-09T00:00:00.000Z",
archiveRoot: rootName,
runtimeVersion: "test",
platform: process.platform,
nodeVersion: process.version,
assets: [
{
kind: "state",
sourcePath: "/tmp/.openclaw",
archivePath: `${rootName}/payload/posix/tmp/.openclaw/payload.txt`,
},
],
};
await fs.writeFile(manifestPath, `${JSON.stringify(manifest, null, 2)}\n`, "utf8");
await fs.writeFile(payloadPath, "payload\n", "utf8");
await tar.c(
{
file: archivePath,
gzip: true,
portable: true,
preservePaths: true,
onWriteEntry: (entry) => {
if (entry.path === manifestPath) {
entry.path = `${rootName}/manifest.json`;
return;
}
if (entry.path === payloadPath) {
entry.path = `${rootName}/payload/posix/tmp/.openclaw/payload.txt`;
}
},
},
[manifestPath, manifestPath, payloadPath],
);
const runtime = {
log: vi.fn(),
error: vi.fn(),
exit: vi.fn(),
};
await expect(backupVerifyCommand(runtime, { archive: archivePath })).rejects.toThrow(
/expected exactly one backup manifest entry, found 2/i,
);
} finally {
await fs.rm(tempDir, { recursive: true, force: true });
}
});
});

View File

@ -167,13 +167,13 @@ function parseManifest(raw: string): BackupManifest {
};
}
async function listArchiveEntries(archivePath: string): Promise<Set<string>> {
const entries = new Set<string>();
async function listArchiveEntries(archivePath: string): Promise<string[]> {
const entries: string[] = [];
await tar.t({
file: archivePath,
gzip: true,
onentry: (entry) => {
entries.add(entry.path);
entries.push(entry.path);
},
});
return entries;
@ -220,9 +220,7 @@ function isRootManifestEntry(entryPath: string): boolean {
function verifyManifestAgainstEntries(manifest: BackupManifest, entries: Set<string>): void {
const archiveRoot = normalizeArchiveRoot(manifest.archiveRoot);
const manifestEntryPath = path.posix.join(archiveRoot, "manifest.json");
const normalizedEntries = [...entries].map((entry) =>
normalizeArchivePath(entry, "Archive entry"),
);
const normalizedEntries = [...entries];
const normalizedEntrySet = new Set(normalizedEntries);
if (!normalizedEntrySet.has(manifestEntryPath)) {
@ -267,17 +265,18 @@ export async function backupVerifyCommand(
opts: BackupVerifyOptions,
): Promise<BackupVerifyResult> {
const archivePath = resolveUserPath(opts.archive);
const entries = await listArchiveEntries(archivePath);
if (entries.size === 0) {
const rawEntries = await listArchiveEntries(archivePath);
if (rawEntries.length === 0) {
throw new Error("Backup archive is empty.");
}
const manifestMatches = [...entries]
.map((entry) => ({
raw: entry,
normalized: normalizeArchivePath(entry, "Archive entry"),
}))
.filter((entry) => isRootManifestEntry(entry.normalized));
const entries = rawEntries.map((entry) => ({
raw: entry,
normalized: normalizeArchivePath(entry, "Archive entry"),
}));
const normalizedEntrySet = new Set(entries.map((entry) => entry.normalized));
const manifestMatches = entries.filter((entry) => isRootManifestEntry(entry.normalized));
if (manifestMatches.length !== 1) {
throw new Error(`Expected exactly one backup manifest entry, found ${manifestMatches.length}.`);
}
@ -288,7 +287,7 @@ export async function backupVerifyCommand(
const manifestRaw = await extractManifest({ archivePath, manifestEntryPath });
const manifest = parseManifest(manifestRaw);
verifyManifestAgainstEntries(manifest, entries);
verifyManifestAgainstEntries(manifest, normalizedEntrySet);
const result: BackupVerifyResult = {
ok: true,
@ -297,7 +296,7 @@ export async function backupVerifyCommand(
createdAt: manifest.createdAt,
runtimeVersion: manifest.runtimeVersion,
assetCount: manifest.assets.length,
entryCount: entries.size,
entryCount: rawEntries.length,
};
runtime.log(opts.json ? JSON.stringify(result, null, 2) : formatResult(result));

View File

@ -96,4 +96,38 @@ describe("backupCreateCommand atomic archive write", () => {
await fs.rm(archiveDir, { recursive: true, force: true });
}
});
it("falls back to exclusive copy when hard-link publication is unsupported", async () => {
const stateDir = path.join(tempHome.home, ".openclaw");
const archiveDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-backup-copy-fallback-"));
const linkSpy = vi.spyOn(fs, "link");
try {
await fs.writeFile(path.join(stateDir, "openclaw.json"), JSON.stringify({}), "utf8");
await fs.writeFile(path.join(stateDir, "state.txt"), "state\n", "utf8");
tarCreateMock.mockImplementationOnce(async ({ file }: { file: string }) => {
await fs.writeFile(file, "archive-bytes", "utf8");
});
linkSpy.mockRejectedValueOnce(
Object.assign(new Error("hard links not supported"), { code: "EOPNOTSUPP" }),
);
const runtime = {
log: vi.fn(),
error: vi.fn(),
exit: vi.fn(),
};
const outputPath = path.join(archiveDir, "backup.tar.gz");
const result = await backupCreateCommand(runtime, {
output: outputPath,
});
expect(result.archivePath).toBe(outputPath);
expect(await fs.readFile(outputPath, "utf8")).toBe("archive-bytes");
} finally {
linkSpy.mockRestore();
await fs.rm(archiveDir, { recursive: true, force: true });
}
});
});

View File

@ -1,4 +1,5 @@
import { randomUUID } from "node:crypto";
import { constants as fsConstants } from "node:fs";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
@ -124,6 +125,10 @@ function buildTempArchivePath(outputPath: string): string {
return `${outputPath}.${randomUUID()}.tmp`;
}
function isLinkUnsupportedError(code: string | undefined): boolean {
return code === "ENOTSUP" || code === "EOPNOTSUPP" || code === "EPERM";
}
async function publishTempArchive(params: {
tempArchivePath: string;
outputPath: string;
@ -137,7 +142,25 @@ async function publishTempArchive(params: {
cause: err,
});
}
throw err;
if (!isLinkUnsupportedError(code)) {
throw err;
}
try {
// Some backup targets support ordinary files but not hard links.
await fs.copyFile(params.tempArchivePath, params.outputPath, fsConstants.COPYFILE_EXCL);
} catch (copyErr) {
const copyCode = (copyErr as NodeJS.ErrnoException | undefined)?.code;
if (copyCode !== "EEXIST") {
await fs.rm(params.outputPath, { force: true }).catch(() => undefined);
}
if (copyCode === "EEXIST") {
throw new Error(`Refusing to overwrite existing backup archive: ${params.outputPath}`, {
cause: copyErr,
});
}
throw copyErr;
}
}
await fs.rm(params.tempArchivePath, { force: true });
}