refactor: share auto-reply reply helpers

This commit is contained in:
Peter Steinberger 2026-03-26 16:47:48 +00:00
parent 615fe4a06b
commit e1f0a85128
8 changed files with 117 additions and 189 deletions

View File

@ -13,30 +13,9 @@ import { buildContextReply } from "./commands-context-report.js";
import { buildExportSessionReply } from "./commands-export-session.js";
import { buildStatusReply } from "./commands-status.js";
import type { CommandHandler } from "./commands-types.js";
import { extractExplicitGroupId } from "./group-id.js";
import { resolveReplyToMode } from "./reply-threading.js";
function extractGroupId(raw: string | undefined | null): string | undefined {
const trimmed = (raw ?? "").trim();
if (!trimmed) {
return undefined;
}
const parts = trimmed.split(":").filter(Boolean);
if (parts.length >= 3 && (parts[1] === "group" || parts[1] === "channel")) {
return parts.slice(2).join(":") || undefined;
}
if (
parts.length >= 2 &&
parts[0]?.toLowerCase() === "whatsapp" &&
trimmed.toLowerCase().includes("@g.us")
) {
return parts.slice(1).join(":") || undefined;
}
if (parts.length >= 2 && (parts[0] === "group" || parts[0] === "channel")) {
return parts.slice(1).join(":") || undefined;
}
return undefined;
}
export const handleHelpCommand: CommandHandler = async (params, allowTextCommands) => {
if (!allowTextCommands) {
return null;
@ -166,7 +145,7 @@ export const handleToolsCommand: CommandHandler = async (params, allowTextComman
? String(params.ctx.MessageThreadId)
: undefined,
currentMessageId: threadingContext.currentMessageId,
groupId: params.sessionEntry?.groupId ?? extractGroupId(params.ctx.From),
groupId: params.sessionEntry?.groupId ?? extractExplicitGroupId(params.ctx.From),
groupChannel:
params.sessionEntry?.groupChannel ?? params.ctx.GroupChannel ?? params.ctx.GroupSubject,
groupSpace: params.sessionEntry?.space ?? params.ctx.GroupSpace,

View File

@ -7,6 +7,37 @@ import type { OpenClawConfig } from "../../config/config.js";
import { buildStatusReply } from "./commands-status.js";
import { buildCommandTestParams } from "./commands.test-harness.js";
const baseCfg = {
commands: { text: true },
channels: { whatsapp: { allowFrom: ["*"] } },
session: { mainKey: "main", scope: "per-sender" },
} as OpenClawConfig;
async function buildStatusReplyForTest(params: { sessionKey?: string; verbose?: boolean }) {
const commandParams = buildCommandTestParams("/status", baseCfg);
const sessionKey = params.sessionKey ?? commandParams.sessionKey;
return await buildStatusReply({
cfg: baseCfg,
command: commandParams.command,
sessionEntry: commandParams.sessionEntry,
sessionKey,
parentSessionKey: sessionKey,
sessionScope: commandParams.sessionScope,
storePath: commandParams.storePath,
provider: "anthropic",
model: "claude-opus-4-5",
contextTokens: 0,
resolvedThinkLevel: commandParams.resolvedThinkLevel,
resolvedFastMode: false,
resolvedVerboseLevel: params.verbose ? "on" : commandParams.resolvedVerboseLevel,
resolvedReasoningLevel: commandParams.resolvedReasoningLevel,
resolvedElevatedLevel: commandParams.resolvedElevatedLevel,
resolveDefaultThinkingLevel: commandParams.resolveDefaultThinkingLevel,
isGroup: commandParams.isGroup,
defaultGroupActivation: commandParams.defaultGroupActivation,
});
}
describe("buildStatusReply subagent summary", () => {
beforeEach(() => {
resetSubagentRegistryForTests();
@ -41,32 +72,7 @@ describe("buildStatusReply subagent summary", () => {
startedAt: Date.now() - 60_000,
});
const cfg = {
commands: { text: true },
channels: { whatsapp: { allowFrom: ["*"] } },
session: { mainKey: "main", scope: "per-sender" },
} as OpenClawConfig;
const params = buildCommandTestParams("/status", cfg);
const reply = await buildStatusReply({
cfg,
command: params.command,
sessionEntry: params.sessionEntry,
sessionKey: params.sessionKey,
parentSessionKey: params.sessionKey,
sessionScope: params.sessionScope,
storePath: params.storePath,
provider: "anthropic",
model: "claude-opus-4-5",
contextTokens: 0,
resolvedThinkLevel: params.resolvedThinkLevel,
resolvedFastMode: false,
resolvedVerboseLevel: params.resolvedVerboseLevel,
resolvedReasoningLevel: params.resolvedReasoningLevel,
resolvedElevatedLevel: params.resolvedElevatedLevel,
resolveDefaultThinkingLevel: params.resolveDefaultThinkingLevel,
isGroup: params.isGroup,
defaultGroupActivation: params.defaultGroupActivation,
});
const reply = await buildStatusReplyForTest({});
expect(reply?.text).toContain("🤖 Subagents: 1 active");
});
@ -96,32 +102,7 @@ describe("buildStatusReply subagent summary", () => {
outcome: { status: "ok" },
});
const cfg = {
commands: { text: true },
channels: { whatsapp: { allowFrom: ["*"] } },
session: { mainKey: "main", scope: "per-sender" },
} as OpenClawConfig;
const params = buildCommandTestParams("/status", cfg);
const reply = await buildStatusReply({
cfg,
command: params.command,
sessionEntry: params.sessionEntry,
sessionKey: params.sessionKey,
parentSessionKey: params.sessionKey,
sessionScope: params.sessionScope,
storePath: params.storePath,
provider: "anthropic",
model: "claude-opus-4-5",
contextTokens: 0,
resolvedThinkLevel: params.resolvedThinkLevel,
resolvedFastMode: false,
resolvedVerboseLevel: "on",
resolvedReasoningLevel: params.resolvedReasoningLevel,
resolvedElevatedLevel: params.resolvedElevatedLevel,
resolveDefaultThinkingLevel: params.resolveDefaultThinkingLevel,
isGroup: params.isGroup,
defaultGroupActivation: params.defaultGroupActivation,
});
const reply = await buildStatusReplyForTest({ verbose: true });
expect(reply?.text).toContain("🤖 Subagents: 1 active");
expect(reply?.text).not.toContain("· 1 done");
@ -174,32 +155,7 @@ describe("buildStatusReply subagent summary", () => {
startedAt: Date.now() - 30_000,
});
const cfg = {
commands: { text: true },
channels: { whatsapp: { allowFrom: ["*"] } },
session: { mainKey: "main", scope: "per-sender" },
} as OpenClawConfig;
const params = buildCommandTestParams("/status", cfg);
const reply = await buildStatusReply({
cfg,
command: params.command,
sessionEntry: params.sessionEntry,
sessionKey: oldParentKey,
parentSessionKey: oldParentKey,
sessionScope: params.sessionScope,
storePath: params.storePath,
provider: "anthropic",
model: "claude-opus-4-5",
contextTokens: 0,
resolvedThinkLevel: params.resolvedThinkLevel,
resolvedFastMode: false,
resolvedVerboseLevel: "on",
resolvedReasoningLevel: params.resolvedReasoningLevel,
resolvedElevatedLevel: params.resolvedElevatedLevel,
resolveDefaultThinkingLevel: params.resolveDefaultThinkingLevel,
isGroup: params.isGroup,
defaultGroupActivation: params.defaultGroupActivation,
});
const reply = await buildStatusReplyForTest({ sessionKey: oldParentKey, verbose: true });
expect(reply?.text).not.toContain("🤖 Subagents: 1 active");
expect(reply?.text).not.toContain("stale old parent child");

View File

@ -1,21 +1,12 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../../config/config.js";
import type { MsgContext } from "../templating.js";
import { registerGetReplyCommonMocks } from "./get-reply.test-mocks.js";
import "./get-reply.test-runtime-mocks.js";
const mocks = vi.hoisted(() => ({
resolveReplyDirectives: vi.fn(),
initSessionState: vi.fn(),
}));
registerGetReplyCommonMocks();
vi.mock("../../link-understanding/apply.runtime.js", () => ({
applyLinkUnderstanding: vi.fn(async () => undefined),
}));
vi.mock("../../media-understanding/apply.runtime.js", () => ({
applyMediaUnderstanding: vi.fn(async () => undefined),
}));
vi.mock("./directive-handling.defaults.js", () => ({
resolveDefaultModel: vi.fn(() => ({
defaultProvider: "openai",

View File

@ -1,6 +1,6 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { MsgContext } from "../templating.js";
import { registerGetReplyCommonMocks } from "./get-reply.test-mocks.js";
import "./get-reply.test-runtime-mocks.js";
const mocks = vi.hoisted(() => ({
resolveReplyDirectives: vi.fn(),
@ -8,15 +8,6 @@ const mocks = vi.hoisted(() => ({
emitResetCommandHooks: vi.fn(),
initSessionState: vi.fn(),
}));
registerGetReplyCommonMocks();
vi.mock("../../link-understanding/apply.runtime.js", () => ({
applyLinkUnderstanding: vi.fn(async () => undefined),
}));
vi.mock("../../media-understanding/apply.runtime.js", () => ({
applyMediaUnderstanding: vi.fn(async () => undefined),
}));
vi.mock("./commands-core.js", () => ({
emitResetCommandHooks: (...args: unknown[]) => mocks.emitResetCommandHooks(...args),
}));

View File

@ -0,0 +1,12 @@
import { vi } from "vitest";
import { registerGetReplyCommonMocks } from "./get-reply.test-mocks.js";
registerGetReplyCommonMocks();
vi.mock("../../link-understanding/apply.runtime.js", () => ({
applyLinkUnderstanding: vi.fn(async () => undefined),
}));
vi.mock("../../media-understanding/apply.runtime.js", () => ({
applyMediaUnderstanding: vi.fn(async () => undefined),
}));

View File

@ -0,0 +1,21 @@
export function extractExplicitGroupId(raw: string | undefined | null): string | undefined {
const trimmed = (raw ?? "").trim();
if (!trimmed) {
return undefined;
}
const parts = trimmed.split(":").filter(Boolean);
if (parts.length >= 3 && (parts[1] === "group" || parts[1] === "channel")) {
return parts.slice(2).join(":") || undefined;
}
if (
parts.length >= 2 &&
parts[0]?.toLowerCase() === "whatsapp" &&
trimmed.toLowerCase().includes("@g.us")
) {
return parts.slice(1).join(":") || undefined;
}
if (parts.length >= 2 && (parts[0] === "group" || parts[0] === "channel")) {
return parts.slice(1).join(":") || undefined;
}
return undefined;
}

View File

@ -12,27 +12,11 @@ import type { GroupKeyResolution, SessionEntry } from "../../config/sessions.js"
import { isInternalMessageChannel } from "../../utils/message-channel.js";
import { normalizeGroupActivation } from "../group-activation.js";
import type { TemplateContext } from "../templating.js";
import { extractExplicitGroupId } from "./group-id.js";
function extractGroupId(raw: string | undefined | null): string | undefined {
function resolveGroupId(raw: string | undefined | null): string | undefined {
const trimmed = (raw ?? "").trim();
if (!trimmed) {
return undefined;
}
const parts = trimmed.split(":").filter(Boolean);
if (parts.length >= 3 && (parts[1] === "group" || parts[1] === "channel")) {
return parts.slice(2).join(":") || undefined;
}
if (
parts.length >= 2 &&
parts[0]?.toLowerCase() === "whatsapp" &&
trimmed.toLowerCase().includes("@g.us")
) {
return parts.slice(1).join(":") || undefined;
}
if (parts.length >= 2 && (parts[0] === "group" || parts[0] === "channel")) {
return parts.slice(1).join(":") || undefined;
}
return trimmed;
return extractExplicitGroupId(trimmed) ?? (trimmed || undefined);
}
function resolveDockChannelId(raw?: string | null): ChannelId | null {
@ -83,7 +67,7 @@ export function resolveGroupRequireMention(params: {
if (!channel) {
return true;
}
const groupId = groupResolution?.id ?? extractGroupId(ctx.From);
const groupId = groupResolution?.id ?? resolveGroupId(ctx.From);
const groupChannel = ctx.GroupChannel?.trim() ?? ctx.GroupSubject?.trim();
const groupSpace = ctx.GroupSpace?.trim();
let requireMention: boolean | undefined;
@ -184,7 +168,7 @@ export function buildGroupIntro(params: {
activation === "always"
? "Activation: always-on (you receive every group message)."
: "Activation: trigger-only (you are invoked only when explicitly mentioned; recent context may be included).";
const groupId = params.sessionEntry?.groupId ?? extractGroupId(params.sessionCtx.From);
const groupId = params.sessionEntry?.groupId ?? resolveGroupId(params.sessionCtx.From);
const groupChannel =
params.sessionCtx.GroupChannel?.trim() ?? params.sessionCtx.GroupSubject?.trim();
const groupSpace = params.sessionCtx.GroupSpace?.trim();

View File

@ -170,6 +170,42 @@ export function resolveMemoryFlushContextWindowTokens(params: {
);
}
function resolvePositiveTokenCount(value: number | undefined): number | undefined {
return typeof value === "number" && Number.isFinite(value) && value > 0
? Math.floor(value)
: undefined;
}
function resolveMemoryFlushGateState<
TEntry extends Pick<SessionEntry, "totalTokens" | "totalTokensFresh">,
>(params: {
entry?: TEntry;
tokenCount?: number;
contextWindowTokens: number;
reserveTokensFloor: number;
softThresholdTokens: number;
}): { entry: TEntry; totalTokens: number; threshold: number } | null {
if (!params.entry) {
return null;
}
const totalTokens =
resolvePositiveTokenCount(params.tokenCount) ?? resolveFreshSessionTotalTokens(params.entry);
if (!totalTokens || totalTokens <= 0) {
return null;
}
const contextWindow = Math.max(1, Math.floor(params.contextWindowTokens));
const reserveTokens = Math.max(0, Math.floor(params.reserveTokensFloor));
const softThreshold = Math.max(0, Math.floor(params.softThresholdTokens));
const threshold = Math.max(0, contextWindow - reserveTokens - softThreshold);
if (threshold <= 0) {
return null;
}
return { entry: params.entry, totalTokens, threshold };
}
export function shouldRunMemoryFlush(params: {
entry?: Pick<
SessionEntry,
@ -185,32 +221,12 @@ export function shouldRunMemoryFlush(params: {
reserveTokensFloor: number;
softThresholdTokens: number;
}): boolean {
if (!params.entry) {
const state = resolveMemoryFlushGateState(params);
if (!state || state.totalTokens < state.threshold) {
return false;
}
const override = params.tokenCount;
const overrideTokens =
typeof override === "number" && Number.isFinite(override) && override > 0
? Math.floor(override)
: undefined;
const totalTokens = overrideTokens ?? resolveFreshSessionTotalTokens(params.entry);
if (!totalTokens || totalTokens <= 0) {
return false;
}
const contextWindow = Math.max(1, Math.floor(params.contextWindowTokens));
const reserveTokens = Math.max(0, Math.floor(params.reserveTokensFloor));
const softThreshold = Math.max(0, Math.floor(params.softThresholdTokens));
const threshold = Math.max(0, contextWindow - reserveTokens - softThreshold);
if (threshold <= 0) {
return false;
}
if (totalTokens < threshold) {
return false;
}
if (hasAlreadyFlushedForCurrentCompaction(params.entry)) {
if (hasAlreadyFlushedForCurrentCompaction(state.entry)) {
return false;
}
@ -229,30 +245,8 @@ export function shouldRunPreflightCompaction(params: {
reserveTokensFloor: number;
softThresholdTokens: number;
}): boolean {
if (!params.entry) {
return false;
}
const override = params.tokenCount;
const overrideTokens =
typeof override === "number" && Number.isFinite(override) && override > 0
? Math.floor(override)
: undefined;
const totalTokens = overrideTokens ?? resolveFreshSessionTotalTokens(params.entry);
if (!totalTokens || totalTokens <= 0) {
return false;
}
const contextWindow = Math.max(1, Math.floor(params.contextWindowTokens));
const reserveTokens = Math.max(0, Math.floor(params.reserveTokensFloor));
const softThreshold = Math.max(0, Math.floor(params.softThresholdTokens));
const threshold = Math.max(0, contextWindow - reserveTokens - softThreshold);
if (threshold <= 0) {
return false;
}
return totalTokens >= threshold;
const state = resolveMemoryFlushGateState(params);
return Boolean(state && state.totalTokens >= state.threshold);
}
/**