fix(agents): strip unsupported responses store payloads (#39219, thanks @ademczuk)

Co-authored-by: ademczuk <andrew.demczuk@gmail.com>
This commit is contained in:
Peter Steinberger 2026-03-07 22:47:41 +00:00
parent ab704b7aca
commit 3a761fbcf8
3 changed files with 35 additions and 3 deletions

View File

@ -295,6 +295,7 @@ Docs: https://docs.openclaw.ai
- Gateway/webchat route safety: block webchat/control-ui clients from inheriting stored external delivery routes on channel-scoped sessions (while preserving route inheritance for UI/TUI clients), preventing cross-channel leakage from scoped chats. (#39175) Thanks @widingmarcus-cyber.
- Telegram error-surface resilience: return a user-visible fallback reply when dispatch/debounce processing fails instead of going silent, while preserving draft-stream cleanup and best-effort thread-scoped fallback delivery. (#39209) Thanks @riftzen-bit.
- Gateway/password auth startup diagnostics: detect unresolved provider-reference objects in `gateway.auth.password` and fail with a specific bootstrap-secrets error message instead of generic misconfiguration output. (#39230) Thanks @ademczuk.
- Agents/OpenAI-responses compatibility: strip unsupported `store` payload fields when `supportsStore=false` (including OpenAI-compatible non-OpenAI providers) while preserving server-compaction payload behavior. (#39219) Thanks @ademczuk.
## 2026.3.2

View File

@ -1387,7 +1387,7 @@ describe("applyExtraParamsToAgent", () => {
expect(payload.store).toBe(false);
});
it("does not force store for models that declare supportsStore=false", () => {
it("strips store from payload for models that declare supportsStore=false", () => {
const payload = runResponsesPayloadMutationCase({
applyProvider: "azure-openai-responses",
applyModelId: "gpt-4o",
@ -1405,7 +1405,28 @@ describe("applyExtraParamsToAgent", () => {
compat: { supportsStore: false },
} as unknown as Model<"openai-responses">,
});
expect(payload.store).toBe(false);
expect(payload).not.toHaveProperty("store");
});
it("strips store from payload for non-OpenAI responses providers with supportsStore=false", () => {
const payload = runResponsesPayloadMutationCase({
applyProvider: "custom-openai-responses",
applyModelId: "gemini-2.5-pro",
model: {
api: "openai-responses",
provider: "custom-openai-responses",
id: "gemini-2.5-pro",
name: "gemini-2.5-pro",
baseUrl: "https://gateway.ai.cloudflare.com/v1/account/gateway/openai",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_000_000,
maxTokens: 65_536,
compat: { supportsStore: false },
} as unknown as Model<"openai-responses">,
});
expect(payload).not.toHaveProperty("store");
});
it("auto-injects OpenAI Responses context_management compaction for direct OpenAI models", () => {

View File

@ -305,7 +305,14 @@ function createOpenAIResponsesContextManagementWrapper(
return (model, context, options) => {
const forceStore = shouldForceResponsesStore(model);
const useServerCompaction = shouldEnableOpenAIResponsesServerCompaction(model, extraParams);
if (!forceStore && !useServerCompaction) {
// Strip `store` from the payload when the model declares supportsStore=false.
// pi-ai upstream hardcodes `store: false` for Responses API; strict
// OpenAI-compatible endpoints (e.g. Gemini via Cloudflare) reject it.
const stripStore =
!forceStore &&
OPENAI_RESPONSES_APIS.has(String(model.api ?? "")) &&
(model as { compat?: { supportsStore?: boolean } }).compat?.supportsStore === false;
if (!forceStore && !useServerCompaction && !stripStore) {
return underlying(model, context, options);
}
@ -321,6 +328,9 @@ function createOpenAIResponsesContextManagementWrapper(
if (forceStore) {
payloadObj.store = true;
}
if (stripStore) {
delete payloadObj.store;
}
if (useServerCompaction && payloadObj.context_management === undefined) {
payloadObj.context_management = [
{