mirror of https://github.com/openclaw/openclaw.git
fix(memory): add opt-in reset session archive indexing
This commit is contained in:
parent
136adb4c02
commit
cd22347f40
|
|
@ -284,9 +284,29 @@ describe("memory search config", () => {
|
|||
expect(resolved?.sync.sessions).toEqual({
|
||||
deltaBytes: 100000,
|
||||
deltaMessages: 50,
|
||||
includeResetArchives: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("resolves reset archive indexing flag from sync session config", () => {
|
||||
const cfg = asConfig({
|
||||
agents: {
|
||||
defaults: {
|
||||
memorySearch: {
|
||||
provider: "openai",
|
||||
sync: {
|
||||
sessions: {
|
||||
includeResetArchives: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
const resolved = resolveMemorySearchConfig(cfg, "main");
|
||||
expect(resolved?.sync.sessions.includeResetArchives).toBe(true);
|
||||
});
|
||||
|
||||
it("merges remote defaults with agent overrides", () => {
|
||||
const cfg = asConfig({
|
||||
agents: {
|
||||
|
|
|
|||
|
|
@ -61,6 +61,7 @@ export type ResolvedMemorySearchConfig = {
|
|||
sessions: {
|
||||
deltaBytes: number;
|
||||
deltaMessages: number;
|
||||
includeResetArchives: boolean;
|
||||
};
|
||||
};
|
||||
query: {
|
||||
|
|
@ -97,6 +98,7 @@ const DEFAULT_CHUNK_OVERLAP = 80;
|
|||
const DEFAULT_WATCH_DEBOUNCE_MS = 1500;
|
||||
const DEFAULT_SESSION_DELTA_BYTES = 100_000;
|
||||
const DEFAULT_SESSION_DELTA_MESSAGES = 50;
|
||||
const DEFAULT_SESSION_INCLUDE_RESET_ARCHIVES = false;
|
||||
const DEFAULT_MAX_RESULTS = 6;
|
||||
const DEFAULT_MIN_SCORE = 0.35;
|
||||
const DEFAULT_HYBRID_ENABLED = true;
|
||||
|
|
@ -248,6 +250,10 @@ function mergeConfig(
|
|||
overrides?.sync?.sessions?.deltaMessages ??
|
||||
defaults?.sync?.sessions?.deltaMessages ??
|
||||
DEFAULT_SESSION_DELTA_MESSAGES,
|
||||
includeResetArchives:
|
||||
overrides?.sync?.sessions?.includeResetArchives ??
|
||||
defaults?.sync?.sessions?.includeResetArchives ??
|
||||
DEFAULT_SESSION_INCLUDE_RESET_ARCHIVES,
|
||||
},
|
||||
};
|
||||
const query = {
|
||||
|
|
@ -336,6 +342,7 @@ function mergeConfig(
|
|||
sessions: {
|
||||
deltaBytes,
|
||||
deltaMessages,
|
||||
includeResetArchives: Boolean(sync.sessions.includeResetArchives),
|
||||
},
|
||||
},
|
||||
query: {
|
||||
|
|
|
|||
|
|
@ -56,6 +56,7 @@ const TARGET_KEYS = [
|
|||
"memory.qmd.sessions.enabled",
|
||||
"memory.qmd.sessions.exportDir",
|
||||
"memory.qmd.sessions.retentionDays",
|
||||
"memory.qmd.sessions.includeResetArchives",
|
||||
"memory.qmd.update.interval",
|
||||
"memory.qmd.update.debounceMs",
|
||||
"memory.qmd.update.onBoot",
|
||||
|
|
@ -104,6 +105,7 @@ const TARGET_KEYS = [
|
|||
"agents.defaults.memorySearch.sync.watch",
|
||||
"agents.defaults.memorySearch.sync.sessions.deltaBytes",
|
||||
"agents.defaults.memorySearch.sync.sessions.deltaMessages",
|
||||
"agents.defaults.memorySearch.sync.sessions.includeResetArchives",
|
||||
"models.mode",
|
||||
"models.providers.*.auth",
|
||||
"models.providers.*.authHeader",
|
||||
|
|
|
|||
|
|
@ -890,6 +890,8 @@ export const FIELD_HELP: Record<string, string> = {
|
|||
"Overrides where sanitized session exports are written before QMD indexing. Use this when default state storage is constrained or when exports must land on a managed volume.",
|
||||
"memory.qmd.sessions.retentionDays":
|
||||
"Defines how long exported session files are kept before automatic pruning, in days (default: unlimited). Set a finite value for storage hygiene or compliance retention policies.",
|
||||
"memory.qmd.sessions.includeResetArchives":
|
||||
"Includes reset transcript archives (`*.jsonl.reset.<timestamp>`) in QMD session indexing (default: false). Enable only when reset snapshots should remain searchable.",
|
||||
"memory.qmd.update.interval":
|
||||
"Sets how often QMD refreshes indexes from source content (duration string, default: 5m). Shorter intervals improve freshness but increase background CPU and I/O.",
|
||||
"memory.qmd.update.debounceMs":
|
||||
|
|
@ -930,6 +932,8 @@ export const FIELD_HELP: Record<string, string> = {
|
|||
"Requires at least this many newly appended bytes before session transcript changes trigger reindex (default: 100000). Increase to reduce frequent small reindexes, or lower for faster transcript freshness.",
|
||||
"agents.defaults.memorySearch.sync.sessions.deltaMessages":
|
||||
"Requires at least this many appended transcript messages before reindex is triggered (default: 50). Lower this for near-real-time transcript recall, or raise it to reduce indexing churn.",
|
||||
"agents.defaults.memorySearch.sync.sessions.includeResetArchives":
|
||||
"Includes reset transcript archives (`*.jsonl.reset.<timestamp>`) in builtin session-memory indexing (default: false). Enable only when reset snapshots should remain searchable.",
|
||||
ui: "UI presentation settings for accenting and assistant identity shown in control surfaces. Use this for branding and readability customization without changing runtime behavior.",
|
||||
"ui.seamColor":
|
||||
"Primary accent/seam color used by UI surfaces for emphasis, badges, and visual identity cues. Use high-contrast values that remain readable across light/dark themes.",
|
||||
|
|
|
|||
|
|
@ -354,6 +354,8 @@ export const FIELD_LABELS: Record<string, string> = {
|
|||
"agents.defaults.memorySearch.sync.watchDebounceMs": "Memory Watch Debounce (ms)",
|
||||
"agents.defaults.memorySearch.sync.sessions.deltaBytes": "Session Delta Bytes",
|
||||
"agents.defaults.memorySearch.sync.sessions.deltaMessages": "Session Delta Messages",
|
||||
"agents.defaults.memorySearch.sync.sessions.includeResetArchives":
|
||||
"Include Reset Session Archives",
|
||||
"agents.defaults.memorySearch.query.maxResults": "Memory Search Max Results",
|
||||
"agents.defaults.memorySearch.query.minScore": "Memory Search Min Score",
|
||||
"agents.defaults.memorySearch.query.hybrid.enabled": "Memory Search Hybrid",
|
||||
|
|
@ -385,6 +387,7 @@ export const FIELD_LABELS: Record<string, string> = {
|
|||
"memory.qmd.sessions.enabled": "QMD Session Indexing",
|
||||
"memory.qmd.sessions.exportDir": "QMD Session Export Directory",
|
||||
"memory.qmd.sessions.retentionDays": "QMD Session Retention (days)",
|
||||
"memory.qmd.sessions.includeResetArchives": "QMD Include Reset Session Archives",
|
||||
"memory.qmd.update.interval": "QMD Update Interval",
|
||||
"memory.qmd.update.debounceMs": "QMD Update Debounce (ms)",
|
||||
"memory.qmd.update.onBoot": "QMD Update on Startup",
|
||||
|
|
|
|||
|
|
@ -46,6 +46,8 @@ export type MemoryQmdSessionConfig = {
|
|||
enabled?: boolean;
|
||||
exportDir?: string;
|
||||
retentionDays?: number;
|
||||
/** Include reset transcript archives (`*.jsonl.reset.<timestamp>`) in QMD indexing. */
|
||||
includeResetArchives?: boolean;
|
||||
};
|
||||
|
||||
export type MemoryQmdUpdateConfig = {
|
||||
|
|
|
|||
|
|
@ -402,6 +402,8 @@ export type MemorySearchConfig = {
|
|||
deltaBytes?: number;
|
||||
/** Minimum appended JSONL lines before session transcripts are reindexed. */
|
||||
deltaMessages?: number;
|
||||
/** Include reset transcript archives (`*.jsonl.reset.<timestamp>`) in session indexing. */
|
||||
includeResetArchives?: boolean;
|
||||
};
|
||||
};
|
||||
/** Query behavior. */
|
||||
|
|
|
|||
|
|
@ -649,6 +649,7 @@ export const MemorySearchSchema = z
|
|||
.object({
|
||||
deltaBytes: z.number().int().nonnegative().optional(),
|
||||
deltaMessages: z.number().int().nonnegative().optional(),
|
||||
includeResetArchives: z.boolean().optional(),
|
||||
})
|
||||
.strict()
|
||||
.optional(),
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ const MemoryQmdSessionSchema = z
|
|||
enabled: z.boolean().optional(),
|
||||
exportDir: z.string().optional(),
|
||||
retentionDays: z.number().int().nonnegative().optional(),
|
||||
includeResetArchives: z.boolean().optional(),
|
||||
})
|
||||
.strict();
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,13 @@ import fs from "node:fs";
|
|||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, test, vi } from "vitest";
|
||||
import { formatSessionArchiveTimestamp } from "../config/sessions.js";
|
||||
import * as transcriptEvents from "../sessions/transcript-events.js";
|
||||
import { createToolSummaryPreviewTranscriptLines } from "./session-preview.test-helpers.js";
|
||||
import {
|
||||
archiveFileOnDisk,
|
||||
archiveSessionTranscripts,
|
||||
cleanupArchivedSessionTranscripts,
|
||||
readFirstUserMessageFromTranscript,
|
||||
readLastMessagePreviewFromTranscript,
|
||||
readSessionMessages,
|
||||
|
|
@ -179,6 +183,67 @@ describe("readFirstUserMessageFromTranscript", () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe("cleanupArchivedSessionTranscripts", () => {
|
||||
let tmpDir: string;
|
||||
let storePath: string;
|
||||
|
||||
registerTempSessionStore("openclaw-cleanup-archive-test-", (nextTmpDir, nextStorePath) => {
|
||||
tmpDir = nextTmpDir;
|
||||
storePath = nextStorePath;
|
||||
});
|
||||
|
||||
beforeAll(() => {
|
||||
vi.stubEnv("OPENCLAW_HOME", tmpDir);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
test("emits transcript updates when removing expired reset archives", async () => {
|
||||
const emitSpy = vi.spyOn(transcriptEvents, "emitSessionTranscriptUpdate");
|
||||
const now = Date.now();
|
||||
const resetName = `cleanup-reset.jsonl.reset.${formatSessionArchiveTimestamp(now - 10_000)}`;
|
||||
const resetPath = path.join(tmpDir, resetName);
|
||||
fs.writeFileSync(resetPath, '{"type":"session"}\n', "utf-8");
|
||||
|
||||
const result = await cleanupArchivedSessionTranscripts({
|
||||
directories: [tmpDir, path.dirname(storePath)],
|
||||
olderThanMs: 1_000,
|
||||
reason: "reset",
|
||||
nowMs: now,
|
||||
});
|
||||
|
||||
expect(result.removed).toBe(1);
|
||||
expect(fs.existsSync(resetPath)).toBe(false);
|
||||
expect(emitSpy).toHaveBeenCalledTimes(1);
|
||||
expect(emitSpy).toHaveBeenCalledWith(resetPath);
|
||||
});
|
||||
|
||||
test("does not emit transcript updates when removing deleted archives", async () => {
|
||||
const emitSpy = vi.spyOn(transcriptEvents, "emitSessionTranscriptUpdate");
|
||||
const now = Date.now();
|
||||
const deletedName = `cleanup-deleted.jsonl.deleted.${formatSessionArchiveTimestamp(now - 10_000)}`;
|
||||
const deletedPath = path.join(tmpDir, deletedName);
|
||||
fs.writeFileSync(deletedPath, '{"type":"session"}\n', "utf-8");
|
||||
|
||||
const result = await cleanupArchivedSessionTranscripts({
|
||||
directories: [tmpDir],
|
||||
olderThanMs: 1_000,
|
||||
reason: "deleted",
|
||||
nowMs: now,
|
||||
});
|
||||
|
||||
expect(result.removed).toBe(1);
|
||||
expect(fs.existsSync(deletedPath)).toBe(false);
|
||||
expect(emitSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("readLastMessagePreviewFromTranscript", () => {
|
||||
let tmpDir: string;
|
||||
let storePath: string;
|
||||
|
|
@ -731,6 +796,29 @@ describe("archiveSessionTranscripts", () => {
|
|||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
test("emits transcript updates for reset archives", () => {
|
||||
const emitSpy = vi.spyOn(transcriptEvents, "emitSessionTranscriptUpdate");
|
||||
const resetPath = path.join(tmpDir, "emit-reset.jsonl");
|
||||
fs.writeFileSync(resetPath, '{"type":"session"}\n', "utf-8");
|
||||
|
||||
const archivedReset = archiveFileOnDisk(resetPath, "reset");
|
||||
|
||||
expect(emitSpy).toHaveBeenCalledTimes(1);
|
||||
expect(emitSpy).toHaveBeenNthCalledWith(1, archivedReset);
|
||||
emitSpy.mockRestore();
|
||||
});
|
||||
|
||||
test("does not emit transcript updates for bak archives", () => {
|
||||
const emitSpy = vi.spyOn(transcriptEvents, "emitSessionTranscriptUpdate");
|
||||
const bakPath = path.join(tmpDir, "emit-bak.jsonl");
|
||||
fs.writeFileSync(bakPath, '{"type":"session"}\n', "utf-8");
|
||||
|
||||
archiveFileOnDisk(bakPath, "bak");
|
||||
|
||||
expect(emitSpy).not.toHaveBeenCalled();
|
||||
emitSpy.mockRestore();
|
||||
});
|
||||
|
||||
test("archives transcript from default and explicit sessionFile paths", () => {
|
||||
const cases = [
|
||||
{
|
||||
|
|
@ -760,6 +848,22 @@ describe("archiveSessionTranscripts", () => {
|
|||
}
|
||||
});
|
||||
|
||||
test("does not re-archive already archived transcript files", () => {
|
||||
const ts = formatSessionArchiveTimestamp(Date.now() - 1_000);
|
||||
const resetArchivePath = path.join(tmpDir, `sess-archive-4.jsonl.reset.${ts}`);
|
||||
fs.writeFileSync(resetArchivePath, '{"type":"session"}\n', "utf-8");
|
||||
|
||||
const archived = archiveSessionTranscripts({
|
||||
sessionId: "sess-archive-4",
|
||||
storePath,
|
||||
sessionFile: resetArchivePath,
|
||||
reason: "deleted",
|
||||
});
|
||||
|
||||
expect(archived).toEqual([]);
|
||||
expect(fs.existsSync(resetArchivePath)).toBe(true);
|
||||
});
|
||||
|
||||
test("returns empty array when no transcript files exist", () => {
|
||||
const archived = archiveSessionTranscripts({
|
||||
sessionId: "nonexistent-session",
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import os from "node:os";
|
|||
import path from "node:path";
|
||||
import {
|
||||
formatSessionArchiveTimestamp,
|
||||
isSessionArchiveArtifactName,
|
||||
parseSessionArchiveTimestamp,
|
||||
type SessionArchiveReason,
|
||||
resolveSessionFilePath,
|
||||
|
|
@ -12,6 +13,7 @@ import {
|
|||
import { resolveRequiredHomeDir } from "../infra/home-dir.js";
|
||||
import { jsonUtf8Bytes } from "../infra/json-utf8-bytes.js";
|
||||
import { hasInterSessionUserProvenance } from "../sessions/input-provenance.js";
|
||||
import { emitSessionTranscriptUpdate } from "../sessions/transcript-events.js";
|
||||
import { stripInlineDirectiveTagsForDisplay } from "../utils/directive-tags.js";
|
||||
import { extractToolCallNames, hasToolCall } from "../utils/transcript-tools.js";
|
||||
import { stripEnvelope } from "./chat-sanitize.js";
|
||||
|
|
@ -178,6 +180,9 @@ export function archiveFileOnDisk(filePath: string, reason: ArchiveFileReason):
|
|||
const ts = formatSessionArchiveTimestamp();
|
||||
const archived = `${filePath}.${reason}.${ts}`;
|
||||
fs.renameSync(filePath, archived);
|
||||
if (reason === "reset") {
|
||||
emitSessionTranscriptUpdate(archived);
|
||||
}
|
||||
return archived;
|
||||
}
|
||||
|
||||
|
|
@ -218,6 +223,9 @@ export function archiveSessionTranscripts(opts: {
|
|||
if (!fs.existsSync(candidatePath)) {
|
||||
continue;
|
||||
}
|
||||
if (isSessionArchiveArtifactName(path.basename(candidatePath))) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
archived.push(archiveFileOnDisk(candidatePath, opts.reason));
|
||||
} catch {
|
||||
|
|
@ -260,6 +268,9 @@ export async function cleanupArchivedSessionTranscripts(opts: {
|
|||
}
|
||||
await fs.promises.rm(fullPath).catch(() => undefined);
|
||||
removed += 1;
|
||||
if (reason === "reset") {
|
||||
emitSessionTranscriptUpdate(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ describe("resolveMemoryBackendConfig", () => {
|
|||
expect(resolved.qmd?.collections.length).toBeGreaterThanOrEqual(3);
|
||||
expect(resolved.qmd?.command).toBe("qmd");
|
||||
expect(resolved.qmd?.searchMode).toBe("search");
|
||||
expect(resolved.qmd?.sessions.includeResetArchives).toBe(false);
|
||||
expect(resolved.qmd?.update.intervalMs).toBeGreaterThan(0);
|
||||
expect(resolved.qmd?.update.waitForBootSync).toBe(false);
|
||||
expect(resolved.qmd?.update.commandTimeoutMs).toBe(30_000);
|
||||
|
|
@ -143,4 +144,21 @@ describe("resolveMemoryBackendConfig", () => {
|
|||
const resolved = resolveMemoryBackendConfig({ cfg, agentId: "main" });
|
||||
expect(resolved.qmd?.searchMode).toBe("vsearch");
|
||||
});
|
||||
|
||||
it("resolves qmd reset archive indexing toggle", () => {
|
||||
const cfg = {
|
||||
agents: { defaults: { workspace: "/tmp/memory-test" } },
|
||||
memory: {
|
||||
backend: "qmd",
|
||||
qmd: {
|
||||
sessions: {
|
||||
enabled: true,
|
||||
includeResetArchives: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as OpenClawConfig;
|
||||
const resolved = resolveMemoryBackendConfig({ cfg, agentId: "main" });
|
||||
expect(resolved.qmd?.sessions.includeResetArchives).toBe(true);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ export type ResolvedQmdSessionConfig = {
|
|||
enabled: boolean;
|
||||
exportDir?: string;
|
||||
retentionDays?: number;
|
||||
includeResetArchives: boolean;
|
||||
};
|
||||
|
||||
export type ResolvedQmdMcporterConfig = {
|
||||
|
|
@ -214,6 +215,7 @@ function resolveSessionConfig(
|
|||
enabled,
|
||||
exportDir,
|
||||
retentionDays,
|
||||
includeResetArchives: cfg?.includeResetArchives === true,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ import {
|
|||
import type { SessionFileEntry } from "./session-files.js";
|
||||
import {
|
||||
buildSessionEntry,
|
||||
isArchivedSessionTranscriptPath,
|
||||
listSessionFilesForAgent,
|
||||
sessionPathForFile,
|
||||
} from "./session-files.js";
|
||||
|
|
@ -56,6 +57,7 @@ type MemoryIndexMeta = {
|
|||
provider: string;
|
||||
providerKey?: string;
|
||||
sources?: MemorySource[];
|
||||
sessionIncludeResetArchives?: boolean;
|
||||
scopeHash?: string;
|
||||
chunkTokens: number;
|
||||
chunkOverlap: number;
|
||||
|
|
@ -466,6 +468,21 @@ export abstract class MemoryManagerSyncOps {
|
|||
this.sessionPendingFiles.clear();
|
||||
let shouldSync = false;
|
||||
for (const sessionFile of pending) {
|
||||
if (
|
||||
isArchivedSessionTranscriptPath(sessionFile, {
|
||||
includeResetArchives: this.settings.sync.sessions.includeResetArchives,
|
||||
})
|
||||
) {
|
||||
this.sessionsDirtyFiles.add(sessionFile);
|
||||
this.sessionsDirty = true;
|
||||
shouldSync = true;
|
||||
continue;
|
||||
}
|
||||
if (isArchivedSessionTranscriptPath(sessionFile, { includeResetArchives: true })) {
|
||||
// Disabled reset archives should not accumulate delta state; cleanup happens on reindex.
|
||||
this.sessionDeltas.delete(sessionFile);
|
||||
continue;
|
||||
}
|
||||
const delta = await this.updateSessionDelta(sessionFile);
|
||||
if (!delta) {
|
||||
continue;
|
||||
|
|
@ -760,8 +777,16 @@ export abstract class MemoryManagerSyncOps {
|
|||
return;
|
||||
}
|
||||
|
||||
const files = await listSessionFilesForAgent(this.agentId);
|
||||
const files = await listSessionFilesForAgent(this.agentId, {
|
||||
includeResetArchives: this.settings.sync.sessions.includeResetArchives,
|
||||
});
|
||||
const activePaths = new Set(files.map((file) => sessionPathForFile(file)));
|
||||
const sessionRowsBefore = params.needsFullReindex
|
||||
? []
|
||||
: (this.db.prepare(`SELECT path FROM files WHERE source = ?`).all("sessions") as Array<{
|
||||
path: string;
|
||||
}>);
|
||||
const knownPaths = new Set(sessionRowsBefore.map((row) => row.path));
|
||||
const indexAll = params.needsFullReindex || this.sessionsDirtyFiles.size === 0;
|
||||
log.debug("memory sync: indexing session files", {
|
||||
files: files.length,
|
||||
|
|
@ -780,7 +805,9 @@ export abstract class MemoryManagerSyncOps {
|
|||
}
|
||||
|
||||
const tasks = files.map((absPath) => async () => {
|
||||
if (!indexAll && !this.sessionsDirtyFiles.has(absPath)) {
|
||||
const sessionPath = sessionPathForFile(absPath);
|
||||
const isKnownPath = knownPaths.has(sessionPath);
|
||||
if (!indexAll && !this.sessionsDirtyFiles.has(absPath) && isKnownPath) {
|
||||
if (params.progress) {
|
||||
params.progress.completed += 1;
|
||||
params.progress.report({
|
||||
|
|
@ -827,10 +854,7 @@ export abstract class MemoryManagerSyncOps {
|
|||
});
|
||||
await runWithConcurrency(tasks, this.getIndexConcurrency());
|
||||
|
||||
const staleRows = this.db
|
||||
.prepare(`SELECT path FROM files WHERE source = ?`)
|
||||
.all("sessions") as Array<{ path: string }>;
|
||||
for (const stale of staleRows) {
|
||||
for (const stale of sessionRowsBefore) {
|
||||
if (activePaths.has(stale.path)) {
|
||||
continue;
|
||||
}
|
||||
|
|
@ -898,6 +922,7 @@ export abstract class MemoryManagerSyncOps {
|
|||
const vectorReady = await this.ensureVectorReady();
|
||||
const meta = this.readMeta();
|
||||
const configuredSources = this.resolveConfiguredSourcesForMeta();
|
||||
const sessionsSourceEnabled = configuredSources.includes("sessions");
|
||||
const configuredScopeHash = this.resolveConfiguredScopeHash();
|
||||
const needsFullReindex =
|
||||
params?.force ||
|
||||
|
|
@ -906,6 +931,11 @@ export abstract class MemoryManagerSyncOps {
|
|||
(this.provider && meta.provider !== this.provider.id) ||
|
||||
meta.providerKey !== this.providerKey ||
|
||||
this.metaSourcesDiffer(meta, configuredSources) ||
|
||||
(sessionsSourceEnabled &&
|
||||
this.metaSessionIncludeResetArchivesDiffers(
|
||||
meta,
|
||||
this.settings.sync.sessions.includeResetArchives,
|
||||
)) ||
|
||||
meta.scopeHash !== configuredScopeHash ||
|
||||
meta.chunkTokens !== this.settings.chunking.tokens ||
|
||||
meta.chunkOverlap !== this.settings.chunking.overlap ||
|
||||
|
|
@ -1120,6 +1150,7 @@ export abstract class MemoryManagerSyncOps {
|
|||
provider: this.provider?.id ?? "none",
|
||||
providerKey: this.providerKey!,
|
||||
sources: this.resolveConfiguredSourcesForMeta(),
|
||||
sessionIncludeResetArchives: this.settings.sync.sessions.includeResetArchives,
|
||||
scopeHash: this.resolveConfiguredScopeHash(),
|
||||
chunkTokens: this.settings.chunking.tokens,
|
||||
chunkOverlap: this.settings.chunking.overlap,
|
||||
|
|
@ -1192,6 +1223,7 @@ export abstract class MemoryManagerSyncOps {
|
|||
provider: this.provider?.id ?? "none",
|
||||
providerKey: this.providerKey!,
|
||||
sources: this.resolveConfiguredSourcesForMeta(),
|
||||
sessionIncludeResetArchives: this.settings.sync.sessions.includeResetArchives,
|
||||
scopeHash: this.resolveConfiguredScopeHash(),
|
||||
chunkTokens: this.settings.chunking.tokens,
|
||||
chunkOverlap: this.settings.chunking.overlap,
|
||||
|
|
@ -1293,4 +1325,12 @@ export abstract class MemoryManagerSyncOps {
|
|||
}
|
||||
return metaSources.some((source, index) => source !== configuredSources[index]);
|
||||
}
|
||||
|
||||
private metaSessionIncludeResetArchivesDiffers(
|
||||
meta: MemoryIndexMeta,
|
||||
configuredIncludeResetArchives: boolean,
|
||||
): boolean {
|
||||
const metaValue = meta.sessionIncludeResetArchives === true;
|
||||
return metaValue !== configuredIncludeResetArchives;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,310 @@
|
|||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import type { OpenClawConfig } from "../config/config.js";
|
||||
import { getMemorySearchManager, type MemoryIndexManager } from "./index.js";
|
||||
import { buildSessionEntry, sessionPathForFile } from "./session-files.js";
|
||||
|
||||
const { watchMock } = vi.hoisted(() => ({
|
||||
watchMock: vi.fn(() => ({
|
||||
on: vi.fn(),
|
||||
close: vi.fn(async () => undefined),
|
||||
})),
|
||||
}));
|
||||
|
||||
vi.mock("chokidar", () => ({
|
||||
default: { watch: watchMock },
|
||||
watch: watchMock,
|
||||
}));
|
||||
|
||||
vi.mock("./sqlite-vec.js", () => ({
|
||||
loadSqliteVecExtension: async () => ({ ok: false, error: "sqlite-vec disabled in tests" }),
|
||||
}));
|
||||
|
||||
vi.mock("./embeddings.js", () => ({
|
||||
createEmbeddingProvider: async () => ({
|
||||
requestedProvider: "openai",
|
||||
provider: {
|
||||
id: "mock",
|
||||
model: "mock-embed",
|
||||
embedQuery: async () => [1, 0],
|
||||
embedBatch: async (texts: string[]) => texts.map(() => [1, 0]),
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
function createMemoryConfig(
|
||||
workspaceDir: string,
|
||||
opts?: {
|
||||
includeResetArchives?: boolean;
|
||||
},
|
||||
): OpenClawConfig {
|
||||
return {
|
||||
agents: {
|
||||
defaults: {
|
||||
workspace: workspaceDir,
|
||||
memorySearch: {
|
||||
experimental: { sessionMemory: true },
|
||||
sources: ["sessions"],
|
||||
provider: "openai",
|
||||
model: "mock-embed",
|
||||
store: { path: path.join(workspaceDir, "index.sqlite"), vector: { enabled: false } },
|
||||
sync: {
|
||||
watch: false,
|
||||
onSessionStart: false,
|
||||
onSearch: false,
|
||||
sessions: {
|
||||
deltaBytes: 999_999,
|
||||
deltaMessages: 999_999,
|
||||
includeResetArchives: opts?.includeResetArchives ?? true,
|
||||
},
|
||||
},
|
||||
query: { minScore: 0, hybrid: { enabled: false } },
|
||||
},
|
||||
},
|
||||
list: [{ id: "main", default: true }],
|
||||
},
|
||||
} as OpenClawConfig;
|
||||
}
|
||||
|
||||
describe("memory session delta archived paths", () => {
|
||||
let manager: MemoryIndexManager | null = null;
|
||||
let workspaceDir = "";
|
||||
|
||||
afterEach(async () => {
|
||||
watchMock.mockClear();
|
||||
vi.unstubAllEnvs();
|
||||
if (manager) {
|
||||
await manager.close();
|
||||
manager = null;
|
||||
}
|
||||
if (workspaceDir) {
|
||||
await fs.rm(workspaceDir, { recursive: true, force: true });
|
||||
workspaceDir = "";
|
||||
}
|
||||
});
|
||||
|
||||
it("marks archived transcripts dirty and syncs without delta-threshold checks", async () => {
|
||||
workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-memory-archived-delta-"));
|
||||
const stateDir = path.join(workspaceDir, "state");
|
||||
vi.stubEnv("OPENCLAW_STATE_DIR", stateDir);
|
||||
const sessionsDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
await fs.mkdir(sessionsDir, { recursive: true });
|
||||
|
||||
const cfg = createMemoryConfig(workspaceDir);
|
||||
|
||||
const result = await getMemorySearchManager({ cfg, agentId: "main" });
|
||||
expect(result.manager).not.toBeNull();
|
||||
if (!result.manager) {
|
||||
throw new Error("manager missing");
|
||||
}
|
||||
manager = result.manager as unknown as MemoryIndexManager;
|
||||
|
||||
const archivedPath = path.join(sessionsDir, "session-1.jsonl.reset.2026-02-18T10-00-00.000Z");
|
||||
const inner = manager as unknown as {
|
||||
sessionPendingFiles: Set<string>;
|
||||
sessionsDirtyFiles: Set<string>;
|
||||
sessionsDirty: boolean;
|
||||
updateSessionDelta: (sessionFile: string) => Promise<unknown>;
|
||||
processSessionDeltaBatch: () => Promise<void>;
|
||||
sync: (params?: { reason?: string; force?: boolean }) => Promise<void>;
|
||||
};
|
||||
|
||||
inner.sessionPendingFiles.add(archivedPath);
|
||||
const updateDeltaSpy = vi.spyOn(inner, "updateSessionDelta");
|
||||
const syncSpy = vi.fn(async () => undefined);
|
||||
inner.sync = syncSpy;
|
||||
|
||||
await inner.processSessionDeltaBatch();
|
||||
|
||||
expect(updateDeltaSpy).not.toHaveBeenCalled();
|
||||
expect(inner.sessionsDirtyFiles.has(archivedPath)).toBe(true);
|
||||
expect(inner.sessionsDirty).toBe(true);
|
||||
expect(syncSpy).toHaveBeenCalledWith({ reason: "session-delta" });
|
||||
});
|
||||
|
||||
it("skips archived transcript deltas entirely when reset archives are disabled", async () => {
|
||||
workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-memory-archived-skip-"));
|
||||
const stateDir = path.join(workspaceDir, "state");
|
||||
vi.stubEnv("OPENCLAW_STATE_DIR", stateDir);
|
||||
const sessionsDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
await fs.mkdir(sessionsDir, { recursive: true });
|
||||
|
||||
const cfg = createMemoryConfig(workspaceDir, { includeResetArchives: false });
|
||||
|
||||
const result = await getMemorySearchManager({ cfg, agentId: "main" });
|
||||
expect(result.manager).not.toBeNull();
|
||||
if (!result.manager) {
|
||||
throw new Error("manager missing");
|
||||
}
|
||||
manager = result.manager as unknown as MemoryIndexManager;
|
||||
|
||||
const archivedPath = path.join(sessionsDir, "session-1.jsonl.reset.2026-02-18T10-00-00.000Z");
|
||||
const inner = manager as unknown as {
|
||||
sessionPendingFiles: Set<string>;
|
||||
sessionDeltas: Map<
|
||||
string,
|
||||
{ lastSize: number; pendingBytes: number; pendingMessages: number }
|
||||
>;
|
||||
sessionsDirtyFiles: Set<string>;
|
||||
sessionsDirty: boolean;
|
||||
updateSessionDelta: (sessionFile: string) => Promise<unknown>;
|
||||
processSessionDeltaBatch: () => Promise<void>;
|
||||
sync: (params?: { reason?: string; force?: boolean }) => Promise<void>;
|
||||
};
|
||||
|
||||
inner.sessionPendingFiles.add(archivedPath);
|
||||
inner.sessionDeltas.set(archivedPath, { lastSize: 12, pendingBytes: 12, pendingMessages: 1 });
|
||||
const updateDeltaSpy = vi.spyOn(inner, "updateSessionDelta");
|
||||
const syncSpy = vi.fn(async () => undefined);
|
||||
inner.sync = syncSpy;
|
||||
|
||||
await inner.processSessionDeltaBatch();
|
||||
|
||||
expect(updateDeltaSpy).not.toHaveBeenCalled();
|
||||
expect(inner.sessionDeltas.has(archivedPath)).toBe(false);
|
||||
expect(inner.sessionsDirtyFiles.has(archivedPath)).toBe(false);
|
||||
expect(inner.sessionsDirty).toBe(false);
|
||||
expect(syncSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("indexes newly discovered archived transcripts during dirty sync and skips unchanged reruns", async () => {
|
||||
workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-memory-archived-sync-"));
|
||||
const stateDir = path.join(workspaceDir, "state");
|
||||
vi.stubEnv("OPENCLAW_STATE_DIR", stateDir);
|
||||
const sessionsDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
await fs.mkdir(sessionsDir, { recursive: true });
|
||||
|
||||
const activePath = path.join(sessionsDir, "active.jsonl");
|
||||
const archivedPath = path.join(sessionsDir, "active.jsonl.reset.2026-02-18T10-00-00.000Z");
|
||||
await fs.writeFile(
|
||||
activePath,
|
||||
'{"type":"message","message":{"role":"user","content":"active message"}}\n',
|
||||
"utf-8",
|
||||
);
|
||||
await fs.writeFile(
|
||||
archivedPath,
|
||||
'{"type":"message","message":{"role":"assistant","content":"archived snapshot"}}\n',
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const cfg = createMemoryConfig(workspaceDir);
|
||||
const result = await getMemorySearchManager({ cfg, agentId: "main" });
|
||||
expect(result.manager).not.toBeNull();
|
||||
if (!result.manager) {
|
||||
throw new Error("manager missing");
|
||||
}
|
||||
manager = result.manager as unknown as MemoryIndexManager;
|
||||
|
||||
const activeEntry = await buildSessionEntry(activePath);
|
||||
expect(activeEntry).not.toBeNull();
|
||||
if (!activeEntry) {
|
||||
throw new Error("active session entry missing");
|
||||
}
|
||||
|
||||
const inner = manager as unknown as {
|
||||
db: {
|
||||
prepare: (sql: string) => {
|
||||
run: (...params: unknown[]) => unknown;
|
||||
all: (...params: unknown[]) => Array<{ path: string }>;
|
||||
};
|
||||
};
|
||||
sessionsDirtyFiles: Set<string>;
|
||||
syncSessionFiles: (params: { needsFullReindex: boolean }) => Promise<void>;
|
||||
indexFile: (
|
||||
entry: { path: string },
|
||||
options: { source: string; content?: string },
|
||||
) => Promise<void>;
|
||||
};
|
||||
|
||||
inner.db
|
||||
.prepare("INSERT INTO files (path, source, hash, mtime, size) VALUES (?, ?, ?, ?, ?)")
|
||||
.run(
|
||||
activeEntry.path,
|
||||
"sessions",
|
||||
activeEntry.hash,
|
||||
Math.floor(activeEntry.mtimeMs),
|
||||
activeEntry.size,
|
||||
);
|
||||
|
||||
inner.sessionsDirtyFiles.add(activePath);
|
||||
const indexSpy = vi.spyOn(inner, "indexFile");
|
||||
const prepareSpy = vi.spyOn(inner.db, "prepare");
|
||||
|
||||
await inner.syncSessionFiles({ needsFullReindex: false });
|
||||
|
||||
const firstRunIndexedPaths = indexSpy.mock.calls.map(([entry]) => entry.path);
|
||||
expect(firstRunIndexedPaths).toContain(sessionPathForFile(archivedPath));
|
||||
expect(
|
||||
prepareSpy.mock.calls.filter(([sql]) => sql === "SELECT path FROM files WHERE source = ?"),
|
||||
).toHaveLength(1);
|
||||
|
||||
await inner.syncSessionFiles({ needsFullReindex: false });
|
||||
expect(indexSpy.mock.calls.length).toBe(firstRunIndexedPaths.length);
|
||||
});
|
||||
|
||||
it("triggers a full session reindex when includeResetArchives toggles", async () => {
|
||||
workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-memory-archived-toggle-"));
|
||||
const stateDir = path.join(workspaceDir, "state");
|
||||
vi.stubEnv("OPENCLAW_STATE_DIR", stateDir);
|
||||
const sessionsDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
await fs.mkdir(sessionsDir, { recursive: true });
|
||||
|
||||
const activePath = path.join(sessionsDir, "active.jsonl");
|
||||
const archivedPath = path.join(sessionsDir, "active.jsonl.reset.2026-02-18T10-00-00.000Z");
|
||||
await fs.writeFile(
|
||||
activePath,
|
||||
'{"type":"message","message":{"role":"user","content":"active message"}}\n',
|
||||
"utf-8",
|
||||
);
|
||||
await fs.writeFile(
|
||||
archivedPath,
|
||||
'{"type":"message","message":{"role":"assistant","content":"archived snapshot"}}\n',
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const cfgWithArchives = createMemoryConfig(workspaceDir, { includeResetArchives: true });
|
||||
const withArchives = await getMemorySearchManager({ cfg: cfgWithArchives, agentId: "main" });
|
||||
expect(withArchives.manager).not.toBeNull();
|
||||
if (!withArchives.manager) {
|
||||
throw new Error("manager missing");
|
||||
}
|
||||
const withArchivesManager = withArchives.manager as unknown as MemoryIndexManager;
|
||||
manager = withArchivesManager;
|
||||
|
||||
await withArchivesManager.sync({ force: true });
|
||||
const withArchivesFiles =
|
||||
withArchivesManager.status().sourceCounts?.find((entry) => entry.source === "sessions")
|
||||
?.files ?? 0;
|
||||
expect(withArchivesFiles).toBe(2);
|
||||
await withArchivesManager.close();
|
||||
manager = null;
|
||||
|
||||
const cfgWithoutArchives = createMemoryConfig(workspaceDir, { includeResetArchives: false });
|
||||
const withoutArchives = await getMemorySearchManager({
|
||||
cfg: cfgWithoutArchives,
|
||||
agentId: "main",
|
||||
});
|
||||
expect(withoutArchives.manager).not.toBeNull();
|
||||
if (!withoutArchives.manager) {
|
||||
throw new Error("manager missing");
|
||||
}
|
||||
const withoutArchivesManager = withoutArchives.manager as unknown as MemoryIndexManager;
|
||||
manager = withoutArchivesManager;
|
||||
|
||||
const inner = withoutArchivesManager as unknown as {
|
||||
syncSessionFiles: (params: { needsFullReindex: boolean }) => Promise<void>;
|
||||
};
|
||||
const syncSessionsSpy = vi.spyOn(inner, "syncSessionFiles");
|
||||
|
||||
await withoutArchivesManager.sync({ reason: "search" });
|
||||
|
||||
expect(syncSessionsSpy).toHaveBeenCalled();
|
||||
expect(syncSessionsSpy.mock.calls.at(-1)?.[0]).toMatchObject({ needsFullReindex: true });
|
||||
const withoutArchivesFiles =
|
||||
withoutArchivesManager.status().sourceCounts?.find((entry) => entry.source === "sessions")
|
||||
?.files ?? 0;
|
||||
expect(withoutArchivesFiles).toBe(1);
|
||||
});
|
||||
});
|
||||
|
|
@ -1080,7 +1080,7 @@ describe("QmdMemoryManager", () => {
|
|||
const platformSpy = vi.spyOn(process, "platform", "get").mockReturnValue("win32");
|
||||
try {
|
||||
const { manager } = await createManager({ mode: "status" });
|
||||
await manager.sync({ reason: "manual" });
|
||||
await manager.sync({ reason: "manual", force: true });
|
||||
|
||||
const qmdCalls = spawnMock.mock.calls.filter((call: unknown[]) => {
|
||||
const args = call[1] as string[] | undefined;
|
||||
|
|
@ -2202,16 +2202,33 @@ describe("QmdMemoryManager", () => {
|
|||
}
|
||||
});
|
||||
|
||||
it("reuses exported session markdown files when inputs are unchanged", async () => {
|
||||
it("reuses exported session markdown files for active and reset sessions", async () => {
|
||||
const sessionsDir = path.join(stateDir, "agents", agentId, "sessions");
|
||||
await fs.mkdir(sessionsDir, { recursive: true });
|
||||
const sessionFile = path.join(sessionsDir, "session-1.jsonl");
|
||||
const resetSessionFile = path.join(
|
||||
sessionsDir,
|
||||
"session-1.jsonl.reset.2026-02-14T19-44-05.473Z",
|
||||
);
|
||||
const exportFile = path.join(stateDir, "agents", agentId, "qmd", "sessions", "session-1.md");
|
||||
const resetExportFile = path.join(
|
||||
stateDir,
|
||||
"agents",
|
||||
agentId,
|
||||
"qmd",
|
||||
"sessions",
|
||||
"session-1.jsonl.reset.2026-02-14T19-44-05.473Z.md",
|
||||
);
|
||||
await fs.writeFile(
|
||||
sessionFile,
|
||||
'{"type":"message","message":{"role":"user","content":"hello"}}\n',
|
||||
"utf-8",
|
||||
);
|
||||
await fs.writeFile(
|
||||
resetSessionFile,
|
||||
'{"type":"message","message":{"role":"assistant","content":"reset snapshot"}}\n',
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const currentMemory = cfg.memory;
|
||||
cfg = {
|
||||
|
|
@ -2222,6 +2239,7 @@ describe("QmdMemoryManager", () => {
|
|||
...currentMemory?.qmd,
|
||||
sessions: {
|
||||
enabled: true,
|
||||
includeResetArchives: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -2230,13 +2248,38 @@ describe("QmdMemoryManager", () => {
|
|||
const { manager } = await createManager();
|
||||
|
||||
try {
|
||||
await manager.sync({ reason: "manual" });
|
||||
await manager.sync({ reason: "manual", force: true });
|
||||
const firstExport = await fs.readFile(exportFile, "utf-8");
|
||||
const firstResetExport = await fs.readFile(resetExportFile, "utf-8");
|
||||
expect(firstExport).toContain("hello");
|
||||
expect(firstResetExport).toContain("reset snapshot");
|
||||
const firstSessionMtimeMs = (await fs.stat(exportFile)).mtimeMs;
|
||||
const firstResetMtimeMs = (await fs.stat(resetExportFile)).mtimeMs;
|
||||
|
||||
await manager.sync({ reason: "manual" });
|
||||
const secondExport = await fs.readFile(exportFile, "utf-8");
|
||||
const secondResetExport = await fs.readFile(resetExportFile, "utf-8");
|
||||
expect(secondExport).toBe(firstExport);
|
||||
expect(secondResetExport).toBe(firstResetExport);
|
||||
const secondSessionMtimeMs = (await fs.stat(exportFile)).mtimeMs;
|
||||
const secondResetMtimeMs = (await fs.stat(resetExportFile)).mtimeMs;
|
||||
expect(secondSessionMtimeMs).toBe(firstSessionMtimeMs);
|
||||
expect(secondResetMtimeMs).toBe(firstResetMtimeMs);
|
||||
|
||||
await fs.writeFile(
|
||||
resetSessionFile,
|
||||
'{"type":"message","message":{"role":"assistant","content":"follow-up update"}}\n',
|
||||
"utf-8",
|
||||
);
|
||||
await manager.sync({ reason: "manual", force: true });
|
||||
const thirdExport = await fs.readFile(exportFile, "utf-8");
|
||||
const thirdResetExport = await fs.readFile(resetExportFile, "utf-8");
|
||||
expect(thirdExport).toBe(secondExport);
|
||||
expect(thirdResetExport).toContain("follow-up update");
|
||||
const thirdSessionMtimeMs = (await fs.stat(exportFile)).mtimeMs;
|
||||
const thirdResetMtimeMs = (await fs.stat(resetExportFile)).mtimeMs;
|
||||
expect(thirdSessionMtimeMs).toBe(secondSessionMtimeMs);
|
||||
expect(thirdResetMtimeMs).toBeGreaterThan(secondResetMtimeMs);
|
||||
} finally {
|
||||
await manager.close();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1435,7 +1435,9 @@ export class QmdMemoryManager implements MemorySearchManager {
|
|||
}
|
||||
const exportDir = this.sessionExporter.dir;
|
||||
await fs.mkdir(exportDir, { recursive: true });
|
||||
const files = await listSessionFilesForAgent(this.agentId);
|
||||
const files = await listSessionFilesForAgent(this.agentId, {
|
||||
includeResetArchives: this.qmd.sessions.includeResetArchives,
|
||||
});
|
||||
const keep = new Set<string>();
|
||||
const tracked = new Set<string>();
|
||||
const cutoff = this.sessionExporter.retentionMs
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
import { buildSessionEntry } from "./session-files.js";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import {
|
||||
buildSessionEntry,
|
||||
isIndexableSessionTranscriptFileName,
|
||||
listSessionFilesForAgent,
|
||||
} from "./session-files.js";
|
||||
|
||||
describe("buildSessionEntry", () => {
|
||||
let tmpDir: string;
|
||||
|
|
@ -12,6 +16,7 @@ describe("buildSessionEntry", () => {
|
|||
});
|
||||
|
||||
afterEach(async () => {
|
||||
vi.unstubAllEnvs();
|
||||
await fs.rm(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
|
|
@ -84,4 +89,96 @@ describe("buildSessionEntry", () => {
|
|||
expect(entry).not.toBeNull();
|
||||
expect(entry!.lineMap).toEqual([3, 5]);
|
||||
});
|
||||
|
||||
it("identifies indexable transcript file names", () => {
|
||||
expect(isIndexableSessionTranscriptFileName("abc.jsonl")).toBe(true);
|
||||
expect(isIndexableSessionTranscriptFileName("abc.jsonl.reset.2026-02-14T19-44-05.473Z")).toBe(
|
||||
false,
|
||||
);
|
||||
expect(
|
||||
isIndexableSessionTranscriptFileName("abc.jsonl.reset.2026-02-14T19-44-05.473Z", {
|
||||
includeResetArchives: true,
|
||||
}),
|
||||
).toBe(true);
|
||||
expect(
|
||||
isIndexableSessionTranscriptFileName(
|
||||
"abc.jsonl.reset.2026-02-14T19-44-05.473Z.deleted.2026-02-16T10-46-16.541Z",
|
||||
),
|
||||
).toBe(false);
|
||||
expect(
|
||||
isIndexableSessionTranscriptFileName(
|
||||
"abc.jsonl.reset.2026-02-14T19-44-05.473Z.bak.2026-02-16T10-46-16.541Z",
|
||||
),
|
||||
).toBe(false);
|
||||
expect(isIndexableSessionTranscriptFileName("abc.jsonl.deleted.2026-02-16T10-46-16.541Z")).toBe(
|
||||
false,
|
||||
);
|
||||
expect(isIndexableSessionTranscriptFileName("abc.jsonl.bak.2026-02-16T10-46-16.541Z")).toBe(
|
||||
false,
|
||||
);
|
||||
expect(isIndexableSessionTranscriptFileName("abc.md")).toBe(false);
|
||||
});
|
||||
|
||||
it("lists only active transcript files by default", async () => {
|
||||
const stateDir = path.join(tmpDir, "state");
|
||||
vi.stubEnv("OPENCLAW_STATE_DIR", stateDir);
|
||||
const sessionsDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
await fs.mkdir(sessionsDir, { recursive: true });
|
||||
await fs.writeFile(path.join(sessionsDir, "active.jsonl"), "");
|
||||
await fs.writeFile(path.join(sessionsDir, "archived.jsonl.reset.2026-02-14T19-44-05.473Z"), "");
|
||||
await fs.writeFile(
|
||||
path.join(
|
||||
sessionsDir,
|
||||
"archived.jsonl.reset.2026-02-14T19-44-05.473Z.deleted.2026-02-16T10-46-16.541Z",
|
||||
),
|
||||
"",
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(
|
||||
sessionsDir,
|
||||
"archived.jsonl.reset.2026-02-14T19-44-05.473Z.bak.2026-02-16T10-46-16.541Z",
|
||||
),
|
||||
"",
|
||||
);
|
||||
await fs.writeFile(path.join(sessionsDir, "pruned.jsonl.deleted.2026-02-16T10-46-16.541Z"), "");
|
||||
await fs.writeFile(path.join(sessionsDir, "ignored.jsonl.bak.2026-02-16T10-46-16.541Z"), "");
|
||||
await fs.writeFile(path.join(sessionsDir, "ignored.md"), "");
|
||||
|
||||
const files = await listSessionFilesForAgent("main");
|
||||
expect(files.map((file) => path.basename(file)).toSorted()).toEqual(["active.jsonl"]);
|
||||
});
|
||||
|
||||
it("lists active and reset transcript files when reset archives are enabled", async () => {
|
||||
const stateDir = path.join(tmpDir, "state");
|
||||
vi.stubEnv("OPENCLAW_STATE_DIR", stateDir);
|
||||
const sessionsDir = path.join(stateDir, "agents", "main", "sessions");
|
||||
await fs.mkdir(sessionsDir, { recursive: true });
|
||||
await fs.writeFile(path.join(sessionsDir, "active.jsonl"), "");
|
||||
await fs.writeFile(path.join(sessionsDir, "archived.jsonl.reset.2026-02-14T19-44-05.473Z"), "");
|
||||
await fs.writeFile(
|
||||
path.join(
|
||||
sessionsDir,
|
||||
"archived.jsonl.reset.2026-02-14T19-44-05.473Z.deleted.2026-02-16T10-46-16.541Z",
|
||||
),
|
||||
"",
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(
|
||||
sessionsDir,
|
||||
"archived.jsonl.reset.2026-02-14T19-44-05.473Z.bak.2026-02-16T10-46-16.541Z",
|
||||
),
|
||||
"",
|
||||
);
|
||||
await fs.writeFile(path.join(sessionsDir, "pruned.jsonl.deleted.2026-02-16T10-46-16.541Z"), "");
|
||||
await fs.writeFile(path.join(sessionsDir, "ignored.jsonl.bak.2026-02-16T10-46-16.541Z"), "");
|
||||
await fs.writeFile(path.join(sessionsDir, "ignored.md"), "");
|
||||
|
||||
const files = await listSessionFilesForAgent("main", {
|
||||
includeResetArchives: true,
|
||||
});
|
||||
expect(files.map((file) => path.basename(file)).toSorted()).toEqual([
|
||||
"active.jsonl",
|
||||
"archived.jsonl.reset.2026-02-14T19-44-05.473Z",
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { parseSessionArchiveTimestamp } from "../config/sessions/artifacts.js";
|
||||
import { resolveSessionTranscriptsDirForAgent } from "../config/sessions/paths.js";
|
||||
import { redactSensitiveText } from "../logging/redact.js";
|
||||
import { createSubsystemLogger } from "../logging/subsystem.js";
|
||||
|
|
@ -7,6 +8,16 @@ import { hashText } from "./internal.js";
|
|||
|
||||
const log = createSubsystemLogger("memory");
|
||||
|
||||
function isResetArchiveTranscriptFileName(fileName: string): boolean {
|
||||
return (
|
||||
fileName.includes(".jsonl.reset.") && parseSessionArchiveTimestamp(fileName, "reset") !== null
|
||||
);
|
||||
}
|
||||
|
||||
type SessionTranscriptFilterOptions = {
|
||||
includeResetArchives?: boolean;
|
||||
};
|
||||
|
||||
export type SessionFileEntry = {
|
||||
path: string;
|
||||
absPath: string;
|
||||
|
|
@ -18,14 +29,45 @@ export type SessionFileEntry = {
|
|||
lineMap: number[];
|
||||
};
|
||||
|
||||
export async function listSessionFilesForAgent(agentId: string): Promise<string[]> {
|
||||
export function isIndexableSessionTranscriptFileName(
|
||||
fileName: string,
|
||||
opts?: SessionTranscriptFilterOptions,
|
||||
): boolean {
|
||||
const normalized = fileName.trim();
|
||||
if (!normalized) {
|
||||
return false;
|
||||
}
|
||||
if (normalized.endsWith(".jsonl")) {
|
||||
return true;
|
||||
}
|
||||
// Reset archives can include sensitive historical content; keep them opt-in.
|
||||
if (opts?.includeResetArchives !== true) {
|
||||
return false;
|
||||
}
|
||||
return isResetArchiveTranscriptFileName(normalized);
|
||||
}
|
||||
|
||||
export function isArchivedSessionTranscriptPath(
|
||||
filePath: string,
|
||||
opts?: SessionTranscriptFilterOptions,
|
||||
): boolean {
|
||||
if (opts?.includeResetArchives !== true) {
|
||||
return false;
|
||||
}
|
||||
return isResetArchiveTranscriptFileName(path.basename(filePath).trim());
|
||||
}
|
||||
|
||||
export async function listSessionFilesForAgent(
|
||||
agentId: string,
|
||||
opts?: SessionTranscriptFilterOptions,
|
||||
): Promise<string[]> {
|
||||
const dir = resolveSessionTranscriptsDirForAgent(agentId);
|
||||
try {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
return entries
|
||||
.filter((entry) => entry.isFile())
|
||||
.map((entry) => entry.name)
|
||||
.filter((name) => name.endsWith(".jsonl"))
|
||||
.filter((name) => isIndexableSessionTranscriptFileName(name, opts))
|
||||
.map((name) => path.join(dir, name));
|
||||
} catch {
|
||||
return [];
|
||||
|
|
|
|||
Loading…
Reference in New Issue