fix: preserve deferred TUI history sync (#53130) (thanks @joelnishanth)

This commit is contained in:
Peter Steinberger 2026-03-23 23:15:20 -07:00
parent cc8ed8d25b
commit ff2e9a52ff
4 changed files with 43 additions and 8 deletions

View File

@ -12,6 +12,7 @@ Docs: https://docs.openclaw.ai
- Feishu/docx block ordering: preserve the document tree order from `docx.document.convert` when inserting blocks, fixing heading/paragraph/list misordering in newly written Feishu documents. (#40524) Thanks @TaoXieSZ.
- Agents/cron: suppress the default heartbeat system prompt for cron-triggered embedded runs even when they target non-cron session keys, so cron tasks stop reading `HEARTBEAT.md` and polluting unrelated threads. (#53152) Thanks @Protocol-zero-0.
- TUI/chat: preserve pending user messages when a slow local run emits an empty final event, but still defer and flush the needed history reload after the newer active run finishes so silent/tool-only runs do not stay incomplete. (#53130) Thanks @joelnishanth.
## 2026.3.23

View File

@ -607,4 +607,27 @@ describe("tui-event-handlers: handleAgentEvent", () => {
expect(state.activeChatRunId).toBe("run-main");
expect(loadHistory).not.toHaveBeenCalled();
});
it("flushes deferred history reload after the newer local run finishes", () => {
const { state, loadHistory, noteLocalRunId, handleChatEvent } = createHandlersHarness({
state: { activeChatRunId: "run-main" },
});
noteLocalRunId("run-local-empty");
handleChatEvent({
runId: "run-local-empty",
sessionKey: state.currentSessionKey,
state: "final",
});
noteLocalRunId("run-main");
handleChatEvent({
runId: "run-main",
sessionKey: state.currentSessionKey,
state: "final",
message: { content: [{ type: "text", text: "done" }] },
});
expect(loadHistory).toHaveBeenCalledTimes(1);
});
});

View File

@ -61,6 +61,7 @@ export function createEventHandlers(context: EventHandlerContext) {
const sessionRuns = new Map<string, number>();
let streamAssembler = new TuiStreamAssembler();
let lastSessionKey = state.currentSessionKey;
let pendingHistoryRefresh = false;
const pruneRunMap = (runs: Map<string, number>) => {
if (runs.size <= 200) {
@ -93,11 +94,20 @@ export function createEventHandlers(context: EventHandlerContext) {
finalizedRuns.clear();
sessionRuns.clear();
streamAssembler = new TuiStreamAssembler();
pendingHistoryRefresh = false;
clearLocalRunIds?.();
clearLocalBtwRunIds?.();
btw.clear();
};
const flushPendingHistoryRefreshIfIdle = () => {
if (!pendingHistoryRefresh || state.activeChatRunId) {
return;
}
pendingHistoryRefresh = false;
void loadHistory?.();
};
const noteSessionRun = (runId: string) => {
sessionRuns.set(runId, Date.now());
pruneRunMap(sessionRuns);
@ -123,6 +133,7 @@ export function createEventHandlers(context: EventHandlerContext) {
}) => {
noteFinalizedRun(params.runId);
clearActiveRunIfMatch(params.runId);
flushPendingHistoryRefreshIfIdle();
if (params.wasActiveRun) {
setActivityStatus(params.status);
}
@ -137,6 +148,7 @@ export function createEventHandlers(context: EventHandlerContext) {
streamAssembler.drop(params.runId);
sessionRuns.delete(params.runId);
clearActiveRunIfMatch(params.runId);
flushPendingHistoryRefreshIfIdle();
if (params.wasActiveRun) {
setActivityStatus(params.status);
}
@ -158,22 +170,21 @@ export function createEventHandlers(context: EventHandlerContext) {
const isLocalRun = isLocalRunId?.(runId) ?? false;
if (isLocalRun) {
forgetLocalRunId?.(runId);
// Never reload history for local runs that ended without displayable output.
// This prevents the user's message from disappearing when the backend is slow
// (e.g., Ollama) and sends an empty final event before the response is ready.
// Local runs with displayable output do not need a history reload.
if (!opts?.allowLocalWithoutDisplayableFinal) {
return;
}
// Skip history reload if a DIFFERENT run is still active.
// This prevents clearing the user's pending message when a stale/concurrent
// empty final event arrives while a new message is being processed.
// Defer the reload if a newer run is active so we preserve the pending
// user message, then flush once that active run finishes.
if (state.activeChatRunId && state.activeChatRunId !== runId) {
pendingHistoryRefresh = true;
return;
}
}
if (hasConcurrentActiveRun(runId)) {
return;
}
pendingHistoryRefresh = false;
void loadHistory?.();
};

View File

@ -149,8 +149,8 @@ describe("handleSendChat", () => {
model: "gpt-5-mini",
});
expect(host.chatModelOverrides.main).toEqual({
kind: "qualified",
value: "openai/gpt-5-mini",
kind: "raw",
value: "gpt-5-mini",
});
});
});