mirror of https://github.com/openclaw/openclaw.git
fix(runtime): duplicate messages, share singleton state across bundled chunks (#43683)
* Tests: add fresh module import helper * Process: share command queue runtime state * Agents: share embedded run runtime state * Reply: share followup queue runtime state * Reply: share followup drain callback state * Reply: share queued message dedupe state * Reply: share inbound dedupe state * Tests: cover shared command queue runtime state * Tests: cover shared embedded run runtime state * Tests: cover shared followup queue runtime state * Tests: cover shared inbound dedupe state * Tests: cover shared Slack thread participation state * Slack: share sent thread participation state * Tests: document fresh import helper * Telegram: share draft stream runtime state * Tests: cover shared Telegram draft stream state * Telegram: share sent message cache state * Tests: cover shared Telegram sent message cache * Telegram: share thread binding runtime state * Tests: cover shared Telegram thread binding state * Tests: avoid duplicate shared queue reset * refactor(runtime): centralize global singleton access * refactor(runtime): preserve undefined global singleton values * test(runtime): cover undefined global singleton values --------- Co-authored-by: Nimrod Gutman <nimrod.gutman@gmail.com>
This commit is contained in:
parent
08aa57a3de
commit
4ca84acf24
|
|
@ -1,4 +1,5 @@
|
|||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../../test/helpers/import-fresh.js";
|
||||
import {
|
||||
__testing,
|
||||
abortEmbeddedPiRun,
|
||||
|
|
@ -105,4 +106,35 @@ describe("pi-embedded runner run registry", () => {
|
|||
vi.useRealTimers();
|
||||
}
|
||||
});
|
||||
|
||||
it("shares active run state across distinct module instances", async () => {
|
||||
const runsA = await importFreshModule<typeof import("./runs.js")>(
|
||||
import.meta.url,
|
||||
"./runs.js?scope=shared-a",
|
||||
);
|
||||
const runsB = await importFreshModule<typeof import("./runs.js")>(
|
||||
import.meta.url,
|
||||
"./runs.js?scope=shared-b",
|
||||
);
|
||||
const handle = {
|
||||
queueMessage: async () => {},
|
||||
isStreaming: () => true,
|
||||
isCompacting: () => false,
|
||||
abort: vi.fn(),
|
||||
};
|
||||
|
||||
runsA.__testing.resetActiveEmbeddedRuns();
|
||||
runsB.__testing.resetActiveEmbeddedRuns();
|
||||
|
||||
try {
|
||||
runsA.setActiveEmbeddedRun("session-shared", handle);
|
||||
expect(runsB.isEmbeddedPiRunActive("session-shared")).toBe(true);
|
||||
|
||||
runsB.clearActiveEmbeddedRun("session-shared", handle);
|
||||
expect(runsA.isEmbeddedPiRunActive("session-shared")).toBe(false);
|
||||
} finally {
|
||||
runsA.__testing.resetActiveEmbeddedRuns();
|
||||
runsB.__testing.resetActiveEmbeddedRuns();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import {
|
|||
logMessageQueued,
|
||||
logSessionStateChange,
|
||||
} from "../../logging/diagnostic.js";
|
||||
import { resolveGlobalSingleton } from "../../shared/global-singleton.js";
|
||||
|
||||
type EmbeddedPiQueueHandle = {
|
||||
queueMessage: (text: string) => Promise<void>;
|
||||
|
|
@ -11,12 +12,23 @@ type EmbeddedPiQueueHandle = {
|
|||
abort: () => void;
|
||||
};
|
||||
|
||||
const ACTIVE_EMBEDDED_RUNS = new Map<string, EmbeddedPiQueueHandle>();
|
||||
type EmbeddedRunWaiter = {
|
||||
resolve: (ended: boolean) => void;
|
||||
timer: NodeJS.Timeout;
|
||||
};
|
||||
const EMBEDDED_RUN_WAITERS = new Map<string, Set<EmbeddedRunWaiter>>();
|
||||
|
||||
/**
|
||||
* Use global singleton state so busy/streaming checks stay consistent even
|
||||
* when the bundler emits multiple copies of this module into separate chunks.
|
||||
*/
|
||||
const EMBEDDED_RUN_STATE_KEY = Symbol.for("openclaw.embeddedRunState");
|
||||
|
||||
const embeddedRunState = resolveGlobalSingleton(EMBEDDED_RUN_STATE_KEY, () => ({
|
||||
activeRuns: new Map<string, EmbeddedPiQueueHandle>(),
|
||||
waiters: new Map<string, Set<EmbeddedRunWaiter>>(),
|
||||
}));
|
||||
const ACTIVE_EMBEDDED_RUNS = embeddedRunState.activeRuns;
|
||||
const EMBEDDED_RUN_WAITERS = embeddedRunState.waiters;
|
||||
|
||||
export function queueEmbeddedPiMessage(sessionId: string, text: string): boolean {
|
||||
const handle = ACTIVE_EMBEDDED_RUNS.get(sessionId);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { importFreshModule } from "../../../test/helpers/import-fresh.js";
|
||||
import type { MsgContext } from "../templating.js";
|
||||
import { resetInboundDedupe } from "./inbound-dedupe.js";
|
||||
|
||||
const sharedInboundContext: MsgContext = {
|
||||
Provider: "discord",
|
||||
Surface: "discord",
|
||||
From: "discord:user-1",
|
||||
To: "channel:c1",
|
||||
OriginatingChannel: "discord",
|
||||
OriginatingTo: "channel:c1",
|
||||
SessionKey: "agent:main:discord:channel:c1",
|
||||
MessageSid: "msg-1",
|
||||
};
|
||||
|
||||
describe("inbound dedupe", () => {
|
||||
afterEach(() => {
|
||||
resetInboundDedupe();
|
||||
});
|
||||
|
||||
it("shares dedupe state across distinct module instances", async () => {
|
||||
const inboundA = await importFreshModule<typeof import("./inbound-dedupe.js")>(
|
||||
import.meta.url,
|
||||
"./inbound-dedupe.js?scope=shared-a",
|
||||
);
|
||||
const inboundB = await importFreshModule<typeof import("./inbound-dedupe.js")>(
|
||||
import.meta.url,
|
||||
"./inbound-dedupe.js?scope=shared-b",
|
||||
);
|
||||
|
||||
inboundA.resetInboundDedupe();
|
||||
inboundB.resetInboundDedupe();
|
||||
|
||||
try {
|
||||
expect(inboundA.shouldSkipDuplicateInbound(sharedInboundContext)).toBe(false);
|
||||
expect(inboundB.shouldSkipDuplicateInbound(sharedInboundContext)).toBe(true);
|
||||
} finally {
|
||||
inboundA.resetInboundDedupe();
|
||||
inboundB.resetInboundDedupe();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
@ -1,15 +1,24 @@
|
|||
import { logVerbose, shouldLogVerbose } from "../../globals.js";
|
||||
import { createDedupeCache, type DedupeCache } from "../../infra/dedupe.js";
|
||||
import { parseAgentSessionKey } from "../../sessions/session-key-utils.js";
|
||||
import { resolveGlobalSingleton } from "../../shared/global-singleton.js";
|
||||
import type { MsgContext } from "../templating.js";
|
||||
|
||||
const DEFAULT_INBOUND_DEDUPE_TTL_MS = 20 * 60_000;
|
||||
const DEFAULT_INBOUND_DEDUPE_MAX = 5000;
|
||||
|
||||
const inboundDedupeCache = createDedupeCache({
|
||||
ttlMs: DEFAULT_INBOUND_DEDUPE_TTL_MS,
|
||||
maxSize: DEFAULT_INBOUND_DEDUPE_MAX,
|
||||
});
|
||||
/**
|
||||
* Keep inbound dedupe shared across bundled chunks so the same provider
|
||||
* message cannot bypass dedupe by entering through a different chunk copy.
|
||||
*/
|
||||
const INBOUND_DEDUPE_CACHE_KEY = Symbol.for("openclaw.inboundDedupeCache");
|
||||
|
||||
const inboundDedupeCache = resolveGlobalSingleton<DedupeCache>(INBOUND_DEDUPE_CACHE_KEY, () =>
|
||||
createDedupeCache({
|
||||
ttlMs: DEFAULT_INBOUND_DEDUPE_TTL_MS,
|
||||
maxSize: DEFAULT_INBOUND_DEDUPE_MAX,
|
||||
}),
|
||||
);
|
||||
|
||||
const normalizeProvider = (value?: string | null) => value?.trim().toLowerCase() || "";
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { defaultRuntime } from "../../../runtime.js";
|
||||
import { resolveGlobalMap } from "../../../shared/global-singleton.js";
|
||||
import {
|
||||
buildCollectPrompt,
|
||||
beginQueueDrain,
|
||||
|
|
@ -15,7 +16,11 @@ import type { FollowupRun } from "./types.js";
|
|||
|
||||
// Persists the most recent runFollowup callback per queue key so that
|
||||
// enqueueFollowupRun can restart a drain that finished and deleted the queue.
|
||||
const FOLLOWUP_RUN_CALLBACKS = new Map<string, (run: FollowupRun) => Promise<void>>();
|
||||
const FOLLOWUP_DRAIN_CALLBACKS_KEY = Symbol.for("openclaw.followupDrainCallbacks");
|
||||
|
||||
const FOLLOWUP_RUN_CALLBACKS = resolveGlobalMap<string, (run: FollowupRun) => Promise<void>>(
|
||||
FOLLOWUP_DRAIN_CALLBACKS_KEY,
|
||||
);
|
||||
|
||||
export function clearFollowupDrainCallback(key: string): void {
|
||||
FOLLOWUP_RUN_CALLBACKS.delete(key);
|
||||
|
|
|
|||
|
|
@ -1,13 +1,22 @@
|
|||
import { createDedupeCache } from "../../../infra/dedupe.js";
|
||||
import { resolveGlobalSingleton } from "../../../shared/global-singleton.js";
|
||||
import { applyQueueDropPolicy, shouldSkipQueueItem } from "../../../utils/queue-helpers.js";
|
||||
import { kickFollowupDrainIfIdle } from "./drain.js";
|
||||
import { getExistingFollowupQueue, getFollowupQueue } from "./state.js";
|
||||
import type { FollowupRun, QueueDedupeMode, QueueSettings } from "./types.js";
|
||||
|
||||
const RECENT_QUEUE_MESSAGE_IDS = createDedupeCache({
|
||||
ttlMs: 5 * 60 * 1000,
|
||||
maxSize: 10_000,
|
||||
});
|
||||
/**
|
||||
* Keep queued message-id dedupe shared across bundled chunks so redeliveries
|
||||
* are rejected no matter which chunk receives the enqueue call.
|
||||
*/
|
||||
const RECENT_QUEUE_MESSAGE_IDS_KEY = Symbol.for("openclaw.recentQueueMessageIds");
|
||||
|
||||
const RECENT_QUEUE_MESSAGE_IDS = resolveGlobalSingleton(RECENT_QUEUE_MESSAGE_IDS_KEY, () =>
|
||||
createDedupeCache({
|
||||
ttlMs: 5 * 60 * 1000,
|
||||
maxSize: 10_000,
|
||||
}),
|
||||
);
|
||||
|
||||
function buildRecentMessageIdKey(run: FollowupRun, queueKey: string): string | undefined {
|
||||
const messageId = run.messageId?.trim();
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { resolveGlobalMap } from "../../../shared/global-singleton.js";
|
||||
import { applyQueueRuntimeSettings } from "../../../utils/queue-helpers.js";
|
||||
import type { FollowupRun, QueueDropPolicy, QueueMode, QueueSettings } from "./types.js";
|
||||
|
||||
|
|
@ -18,7 +19,13 @@ export const DEFAULT_QUEUE_DEBOUNCE_MS = 1000;
|
|||
export const DEFAULT_QUEUE_CAP = 20;
|
||||
export const DEFAULT_QUEUE_DROP: QueueDropPolicy = "summarize";
|
||||
|
||||
export const FOLLOWUP_QUEUES = new Map<string, FollowupQueueState>();
|
||||
/**
|
||||
* Share followup queues across bundled chunks so busy-session enqueue/drain
|
||||
* logic observes one queue registry per process.
|
||||
*/
|
||||
const FOLLOWUP_QUEUES_KEY = Symbol.for("openclaw.followupQueues");
|
||||
|
||||
export const FOLLOWUP_QUEUES = resolveGlobalMap<string, FollowupQueueState>(FOLLOWUP_QUEUES_KEY);
|
||||
|
||||
export function getExistingFollowupQueue(key: string): FollowupQueueState | undefined {
|
||||
const cleaned = key.trim();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../../test/helpers/import-fresh.js";
|
||||
import { expectInboundContextContract } from "../../../test/helpers/inbound-contract.js";
|
||||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import { defaultRuntime } from "../../runtime.js";
|
||||
|
|
@ -743,6 +744,71 @@ describe("followup queue deduplication", () => {
|
|||
expect(calls).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("deduplicates same message_id across distinct enqueue module instances", async () => {
|
||||
const enqueueA = await importFreshModule<typeof import("./queue/enqueue.js")>(
|
||||
import.meta.url,
|
||||
"./queue/enqueue.js?scope=dedupe-a",
|
||||
);
|
||||
const enqueueB = await importFreshModule<typeof import("./queue/enqueue.js")>(
|
||||
import.meta.url,
|
||||
"./queue/enqueue.js?scope=dedupe-b",
|
||||
);
|
||||
const { clearSessionQueues } = await import("./queue.js");
|
||||
const key = `test-dedup-cross-module-${Date.now()}`;
|
||||
const calls: FollowupRun[] = [];
|
||||
const done = createDeferred<void>();
|
||||
const runFollowup = async (run: FollowupRun) => {
|
||||
calls.push(run);
|
||||
done.resolve();
|
||||
};
|
||||
const settings: QueueSettings = {
|
||||
mode: "collect",
|
||||
debounceMs: 0,
|
||||
cap: 50,
|
||||
dropPolicy: "summarize",
|
||||
};
|
||||
|
||||
enqueueA.resetRecentQueuedMessageIdDedupe();
|
||||
enqueueB.resetRecentQueuedMessageIdDedupe();
|
||||
|
||||
try {
|
||||
expect(
|
||||
enqueueA.enqueueFollowupRun(
|
||||
key,
|
||||
createRun({
|
||||
prompt: "first",
|
||||
messageId: "same-id",
|
||||
originatingChannel: "signal",
|
||||
originatingTo: "+10000000000",
|
||||
}),
|
||||
settings,
|
||||
),
|
||||
).toBe(true);
|
||||
|
||||
scheduleFollowupDrain(key, runFollowup);
|
||||
await done.promise;
|
||||
await new Promise<void>((resolve) => setImmediate(resolve));
|
||||
|
||||
expect(
|
||||
enqueueB.enqueueFollowupRun(
|
||||
key,
|
||||
createRun({
|
||||
prompt: "first-redelivery",
|
||||
messageId: "same-id",
|
||||
originatingChannel: "signal",
|
||||
originatingTo: "+10000000000",
|
||||
}),
|
||||
settings,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(calls).toHaveLength(1);
|
||||
} finally {
|
||||
clearSessionQueues([key]);
|
||||
enqueueA.resetRecentQueuedMessageIdDedupe();
|
||||
enqueueB.resetRecentQueuedMessageIdDedupe();
|
||||
}
|
||||
});
|
||||
|
||||
it("does not collide recent message-id keys when routing contains delimiters", async () => {
|
||||
const key = `test-dedup-key-collision-${Date.now()}`;
|
||||
const calls: FollowupRun[] = [];
|
||||
|
|
@ -1264,6 +1330,55 @@ describe("followup queue drain restart after idle window", () => {
|
|||
expect(calls[1]?.prompt).toBe("after-idle");
|
||||
});
|
||||
|
||||
it("restarts an idle drain across distinct enqueue and drain module instances", async () => {
|
||||
const drainA = await importFreshModule<typeof import("./queue/drain.js")>(
|
||||
import.meta.url,
|
||||
"./queue/drain.js?scope=restart-a",
|
||||
);
|
||||
const enqueueB = await importFreshModule<typeof import("./queue/enqueue.js")>(
|
||||
import.meta.url,
|
||||
"./queue/enqueue.js?scope=restart-b",
|
||||
);
|
||||
const { clearSessionQueues } = await import("./queue.js");
|
||||
const key = `test-idle-window-cross-module-${Date.now()}`;
|
||||
const calls: FollowupRun[] = [];
|
||||
const settings: QueueSettings = { mode: "followup", debounceMs: 0, cap: 50 };
|
||||
const firstProcessed = createDeferred<void>();
|
||||
|
||||
enqueueB.resetRecentQueuedMessageIdDedupe();
|
||||
|
||||
try {
|
||||
const runFollowup = async (run: FollowupRun) => {
|
||||
calls.push(run);
|
||||
if (calls.length === 1) {
|
||||
firstProcessed.resolve();
|
||||
}
|
||||
};
|
||||
|
||||
enqueueB.enqueueFollowupRun(key, createRun({ prompt: "before-idle" }), settings);
|
||||
drainA.scheduleFollowupDrain(key, runFollowup);
|
||||
await firstProcessed.promise;
|
||||
|
||||
await new Promise<void>((resolve) => setImmediate(resolve));
|
||||
|
||||
enqueueB.enqueueFollowupRun(key, createRun({ prompt: "after-idle" }), settings);
|
||||
|
||||
await vi.waitFor(
|
||||
() => {
|
||||
expect(calls).toHaveLength(2);
|
||||
},
|
||||
{ timeout: 1_000 },
|
||||
);
|
||||
|
||||
expect(calls[0]?.prompt).toBe("before-idle");
|
||||
expect(calls[1]?.prompt).toBe("after-idle");
|
||||
} finally {
|
||||
clearSessionQueues([key]);
|
||||
drainA.clearFollowupDrainCallback(key);
|
||||
enqueueB.resetRecentQueuedMessageIdDedupe();
|
||||
}
|
||||
});
|
||||
|
||||
it("does not double-drain when a message arrives while drain is still running", async () => {
|
||||
const key = `test-no-double-drain-${Date.now()}`;
|
||||
const calls: FollowupRun[] = [];
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../test/helpers/import-fresh.js";
|
||||
|
||||
const diagnosticMocks = vi.hoisted(() => ({
|
||||
logLaneEnqueue: vi.fn(),
|
||||
|
|
@ -334,4 +335,42 @@ describe("command queue", () => {
|
|||
resetAllLanes();
|
||||
await expect(enqueueCommand(async () => "ok")).resolves.toBe("ok");
|
||||
});
|
||||
|
||||
it("shares lane state across distinct module instances", async () => {
|
||||
const commandQueueA = await importFreshModule<typeof import("./command-queue.js")>(
|
||||
import.meta.url,
|
||||
"./command-queue.js?scope=shared-a",
|
||||
);
|
||||
const commandQueueB = await importFreshModule<typeof import("./command-queue.js")>(
|
||||
import.meta.url,
|
||||
"./command-queue.js?scope=shared-b",
|
||||
);
|
||||
const lane = `shared-state-${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
||||
|
||||
let release!: () => void;
|
||||
const blocker = new Promise<void>((resolve) => {
|
||||
release = resolve;
|
||||
});
|
||||
|
||||
commandQueueA.resetAllLanes();
|
||||
|
||||
try {
|
||||
const task = commandQueueA.enqueueCommandInLane(lane, async () => {
|
||||
await blocker;
|
||||
return "done";
|
||||
});
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(commandQueueB.getQueueSize(lane)).toBe(1);
|
||||
expect(commandQueueB.getActiveTaskCount()).toBe(1);
|
||||
});
|
||||
|
||||
release();
|
||||
await expect(task).resolves.toBe("done");
|
||||
expect(commandQueueB.getQueueSize(lane)).toBe(0);
|
||||
} finally {
|
||||
release();
|
||||
commandQueueA.resetAllLanes();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { diagnosticLogger as diag, logLaneDequeue, logLaneEnqueue } from "../logging/diagnostic.js";
|
||||
import { resolveGlobalSingleton } from "../shared/global-singleton.js";
|
||||
import { CommandLane } from "./lanes.js";
|
||||
/**
|
||||
* Dedicated error type thrown when a queued command is rejected because
|
||||
|
|
@ -23,9 +24,6 @@ export class GatewayDrainingError extends Error {
|
|||
}
|
||||
}
|
||||
|
||||
// Set while gateway is draining for restart; new enqueues are rejected.
|
||||
let gatewayDraining = false;
|
||||
|
||||
// Minimal in-process queue to serialize command executions.
|
||||
// Default lane ("main") preserves the existing behavior. Additional lanes allow
|
||||
// low-risk parallelism (e.g. cron jobs) without interleaving stdin / logs for
|
||||
|
|
@ -49,11 +47,20 @@ type LaneState = {
|
|||
generation: number;
|
||||
};
|
||||
|
||||
const lanes = new Map<string, LaneState>();
|
||||
let nextTaskId = 1;
|
||||
/**
|
||||
* Keep queue runtime state on globalThis so every bundled entry/chunk shares
|
||||
* the same lanes, counters, and draining flag in production builds.
|
||||
*/
|
||||
const COMMAND_QUEUE_STATE_KEY = Symbol.for("openclaw.commandQueueState");
|
||||
|
||||
const queueState = resolveGlobalSingleton(COMMAND_QUEUE_STATE_KEY, () => ({
|
||||
gatewayDraining: false,
|
||||
lanes: new Map<string, LaneState>(),
|
||||
nextTaskId: 1,
|
||||
}));
|
||||
|
||||
function getLaneState(lane: string): LaneState {
|
||||
const existing = lanes.get(lane);
|
||||
const existing = queueState.lanes.get(lane);
|
||||
if (existing) {
|
||||
return existing;
|
||||
}
|
||||
|
|
@ -65,7 +72,7 @@ function getLaneState(lane: string): LaneState {
|
|||
draining: false,
|
||||
generation: 0,
|
||||
};
|
||||
lanes.set(lane, created);
|
||||
queueState.lanes.set(lane, created);
|
||||
return created;
|
||||
}
|
||||
|
||||
|
|
@ -105,7 +112,7 @@ function drainLane(lane: string) {
|
|||
);
|
||||
}
|
||||
logLaneDequeue(lane, waitedMs, state.queue.length);
|
||||
const taskId = nextTaskId++;
|
||||
const taskId = queueState.nextTaskId++;
|
||||
const taskGeneration = state.generation;
|
||||
state.activeTaskIds.add(taskId);
|
||||
void (async () => {
|
||||
|
|
@ -148,7 +155,7 @@ function drainLane(lane: string) {
|
|||
* `GatewayDrainingError` instead of being silently killed on shutdown.
|
||||
*/
|
||||
export function markGatewayDraining(): void {
|
||||
gatewayDraining = true;
|
||||
queueState.gatewayDraining = true;
|
||||
}
|
||||
|
||||
export function setCommandLaneConcurrency(lane: string, maxConcurrent: number) {
|
||||
|
|
@ -166,7 +173,7 @@ export function enqueueCommandInLane<T>(
|
|||
onWait?: (waitMs: number, queuedAhead: number) => void;
|
||||
},
|
||||
): Promise<T> {
|
||||
if (gatewayDraining) {
|
||||
if (queueState.gatewayDraining) {
|
||||
return Promise.reject(new GatewayDrainingError());
|
||||
}
|
||||
const cleaned = lane.trim() || CommandLane.Main;
|
||||
|
|
@ -198,7 +205,7 @@ export function enqueueCommand<T>(
|
|||
|
||||
export function getQueueSize(lane: string = CommandLane.Main) {
|
||||
const resolved = lane.trim() || CommandLane.Main;
|
||||
const state = lanes.get(resolved);
|
||||
const state = queueState.lanes.get(resolved);
|
||||
if (!state) {
|
||||
return 0;
|
||||
}
|
||||
|
|
@ -207,7 +214,7 @@ export function getQueueSize(lane: string = CommandLane.Main) {
|
|||
|
||||
export function getTotalQueueSize() {
|
||||
let total = 0;
|
||||
for (const s of lanes.values()) {
|
||||
for (const s of queueState.lanes.values()) {
|
||||
total += s.queue.length + s.activeTaskIds.size;
|
||||
}
|
||||
return total;
|
||||
|
|
@ -215,7 +222,7 @@ export function getTotalQueueSize() {
|
|||
|
||||
export function clearCommandLane(lane: string = CommandLane.Main) {
|
||||
const cleaned = lane.trim() || CommandLane.Main;
|
||||
const state = lanes.get(cleaned);
|
||||
const state = queueState.lanes.get(cleaned);
|
||||
if (!state) {
|
||||
return 0;
|
||||
}
|
||||
|
|
@ -242,9 +249,9 @@ export function clearCommandLane(lane: string = CommandLane.Main) {
|
|||
* `enqueueCommandInLane()` call (which may never come).
|
||||
*/
|
||||
export function resetAllLanes(): void {
|
||||
gatewayDraining = false;
|
||||
queueState.gatewayDraining = false;
|
||||
const lanesToDrain: string[] = [];
|
||||
for (const state of lanes.values()) {
|
||||
for (const state of queueState.lanes.values()) {
|
||||
state.generation += 1;
|
||||
state.activeTaskIds.clear();
|
||||
state.draining = false;
|
||||
|
|
@ -264,7 +271,7 @@ export function resetAllLanes(): void {
|
|||
*/
|
||||
export function getActiveTaskCount(): number {
|
||||
let total = 0;
|
||||
for (const s of lanes.values()) {
|
||||
for (const s of queueState.lanes.values()) {
|
||||
total += s.activeTaskIds.size;
|
||||
}
|
||||
return total;
|
||||
|
|
@ -283,7 +290,7 @@ export function waitForActiveTasks(timeoutMs: number): Promise<{ drained: boolea
|
|||
const POLL_INTERVAL_MS = 50;
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
const activeAtStart = new Set<number>();
|
||||
for (const state of lanes.values()) {
|
||||
for (const state of queueState.lanes.values()) {
|
||||
for (const taskId of state.activeTaskIds) {
|
||||
activeAtStart.add(taskId);
|
||||
}
|
||||
|
|
@ -297,7 +304,7 @@ export function waitForActiveTasks(timeoutMs: number): Promise<{ drained: boolea
|
|||
}
|
||||
|
||||
let hasPending = false;
|
||||
for (const state of lanes.values()) {
|
||||
for (const state of queueState.lanes.values()) {
|
||||
for (const taskId of state.activeTaskIds) {
|
||||
if (activeAtStart.has(taskId)) {
|
||||
hasPending = true;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,39 @@
|
|||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { resolveGlobalMap, resolveGlobalSingleton } from "./global-singleton.js";
|
||||
|
||||
const TEST_KEY = Symbol("global-singleton:test");
|
||||
const TEST_MAP_KEY = Symbol("global-singleton:test-map");
|
||||
|
||||
afterEach(() => {
|
||||
delete (globalThis as Record<PropertyKey, unknown>)[TEST_KEY];
|
||||
delete (globalThis as Record<PropertyKey, unknown>)[TEST_MAP_KEY];
|
||||
});
|
||||
|
||||
describe("resolveGlobalSingleton", () => {
|
||||
it("reuses an initialized singleton", () => {
|
||||
const create = vi.fn(() => ({ value: 1 }));
|
||||
|
||||
const first = resolveGlobalSingleton(TEST_KEY, create);
|
||||
const second = resolveGlobalSingleton(TEST_KEY, create);
|
||||
|
||||
expect(first).toBe(second);
|
||||
expect(create).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("does not re-run the factory when undefined was already stored", () => {
|
||||
const create = vi.fn(() => undefined);
|
||||
|
||||
expect(resolveGlobalSingleton(TEST_KEY, create)).toBeUndefined();
|
||||
expect(resolveGlobalSingleton(TEST_KEY, create)).toBeUndefined();
|
||||
expect(create).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveGlobalMap", () => {
|
||||
it("reuses the same map instance", () => {
|
||||
const first = resolveGlobalMap<string, number>(TEST_MAP_KEY);
|
||||
const second = resolveGlobalMap<string, number>(TEST_MAP_KEY);
|
||||
|
||||
expect(first).toBe(second);
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
export function resolveGlobalSingleton<T>(key: symbol, create: () => T): T {
|
||||
const globalStore = globalThis as Record<PropertyKey, unknown>;
|
||||
const existing = globalStore[key] as T | undefined;
|
||||
if (Object.prototype.hasOwnProperty.call(globalStore, key)) {
|
||||
return existing;
|
||||
}
|
||||
const created = create();
|
||||
globalStore[key] = created;
|
||||
return created;
|
||||
}
|
||||
|
||||
export function resolveGlobalMap<TKey, TValue>(key: symbol): Map<TKey, TValue> {
|
||||
return resolveGlobalSingleton(key, () => new Map<TKey, TValue>());
|
||||
}
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../test/helpers/import-fresh.js";
|
||||
import {
|
||||
clearSlackThreadParticipationCache,
|
||||
hasSlackThreadParticipation,
|
||||
|
|
@ -49,6 +50,29 @@ describe("slack sent-thread-cache", () => {
|
|||
expect(hasSlackThreadParticipation("A1", "C456", "1700000000.000002")).toBe(false);
|
||||
});
|
||||
|
||||
it("shares thread participation across distinct module instances", async () => {
|
||||
const cacheA = await importFreshModule<typeof import("./sent-thread-cache.js")>(
|
||||
import.meta.url,
|
||||
"./sent-thread-cache.js?scope=shared-a",
|
||||
);
|
||||
const cacheB = await importFreshModule<typeof import("./sent-thread-cache.js")>(
|
||||
import.meta.url,
|
||||
"./sent-thread-cache.js?scope=shared-b",
|
||||
);
|
||||
|
||||
cacheA.clearSlackThreadParticipationCache();
|
||||
|
||||
try {
|
||||
cacheA.recordSlackThreadParticipation("A1", "C123", "1700000000.000001");
|
||||
expect(cacheB.hasSlackThreadParticipation("A1", "C123", "1700000000.000001")).toBe(true);
|
||||
|
||||
cacheB.clearSlackThreadParticipationCache();
|
||||
expect(cacheA.hasSlackThreadParticipation("A1", "C123", "1700000000.000001")).toBe(false);
|
||||
} finally {
|
||||
cacheA.clearSlackThreadParticipationCache();
|
||||
}
|
||||
});
|
||||
|
||||
it("expired entries return false and are cleaned up on read", () => {
|
||||
recordSlackThreadParticipation("A1", "C123", "1700000000.000001");
|
||||
// Advance time past the 24-hour TTL
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { resolveGlobalMap } from "../shared/global-singleton.js";
|
||||
|
||||
/**
|
||||
* In-memory cache of Slack threads the bot has participated in.
|
||||
* Used to auto-respond in threads without requiring @mention after the first reply.
|
||||
|
|
@ -7,7 +9,13 @@
|
|||
const TTL_MS = 24 * 60 * 60 * 1000; // 24 hours
|
||||
const MAX_ENTRIES = 5000;
|
||||
|
||||
const threadParticipation = new Map<string, number>();
|
||||
/**
|
||||
* Keep Slack thread participation shared across bundled chunks so thread
|
||||
* auto-reply gating does not diverge between prepare/dispatch call paths.
|
||||
*/
|
||||
const SLACK_THREAD_PARTICIPATION_KEY = Symbol.for("openclaw.slackThreadParticipation");
|
||||
|
||||
const threadParticipation = resolveGlobalMap<string, number>(SLACK_THREAD_PARTICIPATION_KEY);
|
||||
|
||||
function makeKey(accountId: string, channelId: string, threadTs: string): string {
|
||||
return `${accountId}:${channelId}:${threadTs}`;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import type { Bot } from "grammy";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { createTelegramDraftStream } from "./draft-stream.js";
|
||||
import { importFreshModule } from "../../test/helpers/import-fresh.js";
|
||||
import { __testing, createTelegramDraftStream } from "./draft-stream.js";
|
||||
|
||||
type TelegramDraftStreamParams = Parameters<typeof createTelegramDraftStream>[0];
|
||||
|
||||
|
|
@ -65,6 +66,10 @@ function createForceNewMessageHarness(params: { throttleMs?: number } = {}) {
|
|||
}
|
||||
|
||||
describe("createTelegramDraftStream", () => {
|
||||
afterEach(() => {
|
||||
__testing.resetTelegramDraftStreamForTests();
|
||||
});
|
||||
|
||||
it("sends stream preview message with message_thread_id when provided", async () => {
|
||||
const api = createMockDraftApi();
|
||||
const stream = createForumDraftStream(api);
|
||||
|
|
@ -355,6 +360,46 @@ describe("createTelegramDraftStream", () => {
|
|||
expect(api.editMessageText).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("shares draft-id allocation across distinct module instances", async () => {
|
||||
const draftA = await importFreshModule<typeof import("./draft-stream.js")>(
|
||||
import.meta.url,
|
||||
"./draft-stream.js?scope=shared-a",
|
||||
);
|
||||
const draftB = await importFreshModule<typeof import("./draft-stream.js")>(
|
||||
import.meta.url,
|
||||
"./draft-stream.js?scope=shared-b",
|
||||
);
|
||||
const apiA = createMockDraftApi();
|
||||
const apiB = createMockDraftApi();
|
||||
|
||||
draftA.__testing.resetTelegramDraftStreamForTests();
|
||||
|
||||
try {
|
||||
const streamA = draftA.createTelegramDraftStream({
|
||||
api: apiA as unknown as Bot["api"],
|
||||
chatId: 123,
|
||||
thread: { id: 42, scope: "dm" },
|
||||
previewTransport: "draft",
|
||||
});
|
||||
const streamB = draftB.createTelegramDraftStream({
|
||||
api: apiB as unknown as Bot["api"],
|
||||
chatId: 123,
|
||||
thread: { id: 42, scope: "dm" },
|
||||
previewTransport: "draft",
|
||||
});
|
||||
|
||||
streamA.update("Message A");
|
||||
await streamA.flush();
|
||||
streamB.update("Message B");
|
||||
await streamB.flush();
|
||||
|
||||
expect(apiA.sendMessageDraft.mock.calls[0]?.[1]).toBe(1);
|
||||
expect(apiB.sendMessageDraft.mock.calls[0]?.[1]).toBe(2);
|
||||
} finally {
|
||||
draftA.__testing.resetTelegramDraftStreamForTests();
|
||||
}
|
||||
});
|
||||
|
||||
it("creates new message after forceNewMessage is called", async () => {
|
||||
const { api, stream } = createForceNewMessageHarness();
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import type { Bot } from "grammy";
|
||||
import { createFinalizableDraftLifecycle } from "../channels/draft-stream-controls.js";
|
||||
import { resolveGlobalSingleton } from "../shared/global-singleton.js";
|
||||
import { buildTelegramThreadParams, type TelegramThreadSpec } from "./bot/helpers.js";
|
||||
import { isSafeToRetrySendError, isTelegramClientRejection } from "./network-errors.js";
|
||||
|
||||
|
|
@ -21,11 +22,20 @@ type TelegramSendMessageDraft = (
|
|||
},
|
||||
) => Promise<unknown>;
|
||||
|
||||
let nextDraftId = 0;
|
||||
/**
|
||||
* Keep draft-id allocation shared across bundled chunks so concurrent preview
|
||||
* lanes do not accidentally reuse draft ids when code-split entries coexist.
|
||||
*/
|
||||
const TELEGRAM_DRAFT_STREAM_STATE_KEY = Symbol.for("openclaw.telegramDraftStreamState");
|
||||
|
||||
const draftStreamState = resolveGlobalSingleton(TELEGRAM_DRAFT_STREAM_STATE_KEY, () => ({
|
||||
nextDraftId: 0,
|
||||
}));
|
||||
|
||||
function allocateTelegramDraftId(): number {
|
||||
nextDraftId = nextDraftId >= TELEGRAM_DRAFT_ID_MAX ? 1 : nextDraftId + 1;
|
||||
return nextDraftId;
|
||||
draftStreamState.nextDraftId =
|
||||
draftStreamState.nextDraftId >= TELEGRAM_DRAFT_ID_MAX ? 1 : draftStreamState.nextDraftId + 1;
|
||||
return draftStreamState.nextDraftId;
|
||||
}
|
||||
|
||||
function resolveSendMessageDraftApi(api: Bot["api"]): TelegramSendMessageDraft | undefined {
|
||||
|
|
@ -441,3 +451,9 @@ export function createTelegramDraftStream(params: {
|
|||
sendMayHaveLanded: () => messageSendAttempted && typeof streamMessageId !== "number",
|
||||
};
|
||||
}
|
||||
|
||||
export const __testing = {
|
||||
resetTelegramDraftStreamForTests() {
|
||||
draftStreamState.nextDraftId = 0;
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import type { Bot } from "grammy";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../test/helpers/import-fresh.js";
|
||||
import {
|
||||
getTelegramSendTestMocks,
|
||||
importTelegramSendModule,
|
||||
|
|
@ -88,6 +89,29 @@ describe("sent-message-cache", () => {
|
|||
clearSentMessageCache();
|
||||
expect(wasSentByBot(123, 1)).toBe(false);
|
||||
});
|
||||
|
||||
it("shares sent-message state across distinct module instances", async () => {
|
||||
const cacheA = await importFreshModule<typeof import("./sent-message-cache.js")>(
|
||||
import.meta.url,
|
||||
"./sent-message-cache.js?scope=shared-a",
|
||||
);
|
||||
const cacheB = await importFreshModule<typeof import("./sent-message-cache.js")>(
|
||||
import.meta.url,
|
||||
"./sent-message-cache.js?scope=shared-b",
|
||||
);
|
||||
|
||||
cacheA.clearSentMessageCache();
|
||||
|
||||
try {
|
||||
cacheA.recordSentMessage(123, 1);
|
||||
expect(cacheB.wasSentByBot(123, 1)).toBe(true);
|
||||
|
||||
cacheB.clearSentMessageCache();
|
||||
expect(cacheA.wasSentByBot(123, 1)).toBe(false);
|
||||
} finally {
|
||||
cacheA.clearSentMessageCache();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildInlineKeyboard", () => {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { resolveGlobalMap } from "../shared/global-singleton.js";
|
||||
|
||||
/**
|
||||
* In-memory cache of sent message IDs per chat.
|
||||
* Used to identify bot's own messages for reaction filtering ("own" mode).
|
||||
|
|
@ -9,7 +11,13 @@ type CacheEntry = {
|
|||
timestamps: Map<number, number>;
|
||||
};
|
||||
|
||||
const sentMessages = new Map<string, CacheEntry>();
|
||||
/**
|
||||
* Keep sent-message tracking shared across bundled chunks so Telegram reaction
|
||||
* filters see the same sent-message history regardless of which chunk recorded it.
|
||||
*/
|
||||
const TELEGRAM_SENT_MESSAGES_KEY = Symbol.for("openclaw.telegramSentMessages");
|
||||
|
||||
const sentMessages = resolveGlobalMap<string, CacheEntry>(TELEGRAM_SENT_MESSAGES_KEY);
|
||||
|
||||
function getChatKey(chatId: number | string): string {
|
||||
return String(chatId);
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import fs from "node:fs";
|
|||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { importFreshModule } from "../../test/helpers/import-fresh.js";
|
||||
import { resolveStateDir } from "../config/paths.js";
|
||||
import { getSessionBindingService } from "../infra/outbound/session-binding-service.js";
|
||||
import {
|
||||
|
|
@ -79,6 +80,53 @@ describe("telegram thread bindings", () => {
|
|||
});
|
||||
});
|
||||
|
||||
it("shares binding state across distinct module instances", async () => {
|
||||
const bindingsA = await importFreshModule<typeof import("./thread-bindings.js")>(
|
||||
import.meta.url,
|
||||
"./thread-bindings.js?scope=shared-a",
|
||||
);
|
||||
const bindingsB = await importFreshModule<typeof import("./thread-bindings.js")>(
|
||||
import.meta.url,
|
||||
"./thread-bindings.js?scope=shared-b",
|
||||
);
|
||||
|
||||
bindingsA.__testing.resetTelegramThreadBindingsForTests();
|
||||
|
||||
try {
|
||||
const managerA = bindingsA.createTelegramThreadBindingManager({
|
||||
accountId: "shared-runtime",
|
||||
persist: false,
|
||||
enableSweeper: false,
|
||||
});
|
||||
const managerB = bindingsB.createTelegramThreadBindingManager({
|
||||
accountId: "shared-runtime",
|
||||
persist: false,
|
||||
enableSweeper: false,
|
||||
});
|
||||
|
||||
expect(managerB).toBe(managerA);
|
||||
|
||||
await getSessionBindingService().bind({
|
||||
targetSessionKey: "agent:main:subagent:child-shared",
|
||||
targetKind: "subagent",
|
||||
conversation: {
|
||||
channel: "telegram",
|
||||
accountId: "shared-runtime",
|
||||
conversationId: "-100200300:topic:44",
|
||||
},
|
||||
placement: "current",
|
||||
});
|
||||
|
||||
expect(
|
||||
bindingsB
|
||||
.getTelegramThreadBindingManager("shared-runtime")
|
||||
?.getByConversationId("-100200300:topic:44")?.targetSessionKey,
|
||||
).toBe("agent:main:subagent:child-shared");
|
||||
} finally {
|
||||
bindingsA.__testing.resetTelegramThreadBindingsForTests();
|
||||
}
|
||||
});
|
||||
|
||||
it("updates lifecycle windows by session key", async () => {
|
||||
vi.useFakeTimers();
|
||||
vi.setSystemTime(new Date("2026-03-06T10:00:00.000Z"));
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import {
|
|||
type SessionBindingRecord,
|
||||
} from "../infra/outbound/session-binding-service.js";
|
||||
import { normalizeAccountId } from "../routing/session-key.js";
|
||||
import { resolveGlobalSingleton } from "../shared/global-singleton.js";
|
||||
|
||||
const DEFAULT_THREAD_BINDING_IDLE_TIMEOUT_MS = 24 * 60 * 60 * 1000;
|
||||
const DEFAULT_THREAD_BINDING_MAX_AGE_MS = 0;
|
||||
|
|
@ -62,8 +63,26 @@ export type TelegramThreadBindingManager = {
|
|||
stop: () => void;
|
||||
};
|
||||
|
||||
const MANAGERS_BY_ACCOUNT_ID = new Map<string, TelegramThreadBindingManager>();
|
||||
const BINDINGS_BY_ACCOUNT_CONVERSATION = new Map<string, TelegramThreadBindingRecord>();
|
||||
type TelegramThreadBindingsState = {
|
||||
managersByAccountId: Map<string, TelegramThreadBindingManager>;
|
||||
bindingsByAccountConversation: Map<string, TelegramThreadBindingRecord>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Keep Telegram thread binding state shared across bundled chunks so routing,
|
||||
* binding lookups, and binding mutations all observe the same live registry.
|
||||
*/
|
||||
const TELEGRAM_THREAD_BINDINGS_STATE_KEY = Symbol.for("openclaw.telegramThreadBindingsState");
|
||||
|
||||
const threadBindingsState = resolveGlobalSingleton<TelegramThreadBindingsState>(
|
||||
TELEGRAM_THREAD_BINDINGS_STATE_KEY,
|
||||
() => ({
|
||||
managersByAccountId: new Map<string, TelegramThreadBindingManager>(),
|
||||
bindingsByAccountConversation: new Map<string, TelegramThreadBindingRecord>(),
|
||||
}),
|
||||
);
|
||||
const MANAGERS_BY_ACCOUNT_ID = threadBindingsState.managersByAccountId;
|
||||
const BINDINGS_BY_ACCOUNT_CONVERSATION = threadBindingsState.bindingsByAccountConversation;
|
||||
|
||||
function normalizeDurationMs(raw: unknown, fallback: number): number {
|
||||
if (typeof raw !== "number" || !Number.isFinite(raw)) {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
export async function importFreshModule<TModule>(
|
||||
from: string,
|
||||
specifier: string,
|
||||
): Promise<TModule> {
|
||||
// Vitest keys module instances by the full URL string, including the query
|
||||
// suffix. These tests rely on that behavior to emulate code-split chunks.
|
||||
return (await import(/* @vite-ignore */ new URL(specifier, from).href)) as TModule;
|
||||
}
|
||||
Loading…
Reference in New Issue