fix: route Copilot Claude through Anthropic

This commit is contained in:
Peter Steinberger 2026-04-04 02:57:06 +09:00
parent e588a363f9
commit 5d20c73e05
No known key found for this signature in database
10 changed files with 213 additions and 7 deletions

View File

@ -39,6 +39,7 @@ Docs: https://docs.openclaw.ai
- Plugins/manifest registry: stop warning when an explicit manifest `id` intentionally differs from the discovery hint. (#59185) Thanks @samzong.
- WhatsApp/streaming: honor `channels.whatsapp.blockStreaming` again for inbound auto-replies so progressive block replies can be enabled explicitly instead of being forced to final-only delivery. Thanks @mcaxtr.
- Auth/failover: shorten `auth_permanent` lockouts, add dedicated config knobs for permanent-auth backoff, and downgrade ambiguous auth-ish upstream incidents to retryable auth failures so providers recover automatically after transient outages. (#60404) Thanks @extrasmall0.
- Providers/GitHub Copilot: route Claude models through Anthropic Messages with Copilot-compatible headers and Anthropic prompt-cache markers instead of forcing the OpenAI Responses transport.
- Plugins/runtime: reuse compatible active registries for `web_search` and `web_fetch` provider snapshot resolution so repeated runtime reads do not re-import the same bundled plugin set on each agent message. Related #48380.
- Infra/tailscale: ignore `OPENCLAW_TEST_TAILSCALE_BINARY` outside explicit test environments and block it from workspace `.env`, so test-only binary overrides cannot be injected through trusted repository state. (#58468) Thanks @eleqtrizit.
- Plugins/OpenAI: enable reference-image edits for `gpt-image-1` by routing edit calls to `/images/edits` with multipart image uploads, and update image-generation capability/docs metadata accordingly. Thanks @steipete.

View File

@ -68,5 +68,7 @@ openclaw models set github-copilot/gpt-4o
- Requires an interactive TTY; run it directly in a terminal.
- Copilot model availability depends on your plan; if a model is rejected, try
another ID (for example `github-copilot/gpt-4.1`).
- Claude model IDs use the Anthropic Messages transport automatically; GPT, o-series,
and Gemini models keep the OpenAI Responses transport.
- The login stores a GitHub token in the auth profile store and exchanges it for a
Copilot API token when OpenClaw runs.

View File

@ -6,6 +6,7 @@ import {
} from "openclaw/plugin-sdk/provider-auth";
import { githubCopilotLoginCommand } from "openclaw/plugin-sdk/provider-auth-login";
import { PROVIDER_ID, resolveCopilotForwardCompatModel } from "./models.js";
import { wrapCopilotAnthropicStream } from "./stream.js";
import { DEFAULT_COPILOT_API_BASE_URL, resolveCopilotApiToken } from "./token.js";
import { fetchCopilotUsage } from "./usage.js";
@ -152,6 +153,7 @@ export default definePluginEntry({
},
},
resolveDynamicModel: (ctx) => resolveCopilotForwardCompatModel(ctx),
wrapStreamFn: (ctx) => wrapCopilotAnthropicStream(ctx.streamFn),
buildReplayPolicy: ({ modelId }) => buildGithubCopilotReplayPolicy(modelId),
supportsXHighThinking: ({ modelId }) =>
COPILOT_XHIGH_MODEL_IDS.includes(modelId.trim().toLowerCase() as never),

View File

@ -1,4 +1,5 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { resolveCopilotTransportApi } from "./models.js";
const DEFAULT_CONTEXT_WINDOW = 128_000;
const DEFAULT_MAX_TOKENS = 8192;
@ -30,10 +31,7 @@ export function buildCopilotModelDefinition(modelId: string): ModelDefinitionCon
return {
id,
name: id,
// pi-coding-agent's registry schema doesn't know about a "github-copilot" API.
// We use OpenAI-compatible responses API, while keeping the provider id as
// "github-copilot" (pi-ai uses that to attach Copilot-specific headers).
api: "openai-responses",
api: resolveCopilotTransportApi(id),
reasoning: false,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },

View File

@ -83,12 +83,13 @@ describe("github-copilot model defaults", () => {
it("builds a valid definition for claude-sonnet-4.6", () => {
const def = buildCopilotModelDefinition("claude-sonnet-4.6");
expect(def.id).toBe("claude-sonnet-4.6");
expect(def.api).toBe("openai-responses");
expect(def.api).toBe("anthropic-messages");
});
it("trims whitespace from model id", () => {
const def = buildCopilotModelDefinition(" gpt-4o ");
expect(def.id).toBe("gpt-4o");
expect(def.api).toBe("openai-responses");
});
it("throws on empty model id", () => {

View File

@ -11,6 +11,14 @@ const CODEX_TEMPLATE_MODEL_IDS = ["gpt-5.2-codex"] as const;
const DEFAULT_CONTEXT_WINDOW = 128_000;
const DEFAULT_MAX_TOKENS = 8192;
export function resolveCopilotTransportApi(
modelId: string,
): "anthropic-messages" | "openai-responses" {
return modelId.trim().toLowerCase().includes("claude")
? "anthropic-messages"
: "openai-responses";
}
export function resolveCopilotForwardCompatModel(
ctx: ProviderResolveDynamicModelContext,
): ProviderRuntimeModel | undefined {
@ -56,7 +64,7 @@ export function resolveCopilotForwardCompatModel(
id: trimmedModelId,
name: trimmedModelId,
provider: PROVIDER_ID,
api: "openai-responses",
api: resolveCopilotTransportApi(trimmedModelId),
reasoning,
// Optimistic: most Copilot models support images, and the API rejects
// image payloads for text-only models rather than failing silently.

View File

@ -0,0 +1,87 @@
import { describe, expect, it, vi } from "vitest";
import { wrapCopilotAnthropicStream } from "./stream.js";
describe("wrapCopilotAnthropicStream", () => {
it("adds Copilot headers and Anthropic cache markers for Claude payloads", async () => {
const payloads: Array<Record<string, unknown>> = [];
const baseStreamFn = vi.fn((model, _context, options) => {
const payload = {
messages: [
{ role: "system", content: "system prompt" },
{
role: "assistant",
content: [{ type: "thinking", text: "draft", cache_control: { type: "ephemeral" } }],
},
],
};
options?.onPayload?.(payload, model);
payloads.push(payload as Record<string, unknown>);
return {
async *[Symbol.asyncIterator]() {},
} as never;
});
const wrapped = wrapCopilotAnthropicStream(baseStreamFn);
const context = {
messages: [
{
role: "user",
content: [
{ type: "text", text: "look" },
{ type: "image", image: "data:image/png;base64,abc" },
],
},
],
} as never;
wrapped(
{
provider: "github-copilot",
api: "anthropic-messages",
id: "claude-sonnet-4.6",
} as never,
context,
{
headers: { "X-Test": "1" },
},
);
expect(baseStreamFn).toHaveBeenCalledOnce();
expect(baseStreamFn.mock.calls[0]?.[2]).toMatchObject({
headers: {
"X-Initiator": "user",
"Openai-Intent": "conversation-edits",
"Copilot-Vision-Request": "true",
"X-Test": "1",
},
});
expect(payloads[0]?.messages).toEqual([
{
role: "system",
content: [{ type: "text", text: "system prompt", cache_control: { type: "ephemeral" } }],
},
{
role: "assistant",
content: [{ type: "thinking", text: "draft" }],
},
]);
});
it("leaves non-Anthropic Copilot models untouched", () => {
const baseStreamFn = vi.fn(() => ({ async *[Symbol.asyncIterator]() {} }) as never);
const wrapped = wrapCopilotAnthropicStream(baseStreamFn);
const options = { headers: { Existing: "1" } };
wrapped(
{
provider: "github-copilot",
api: "openai-responses",
id: "gpt-4.1",
} as never,
{ messages: [{ role: "user", content: "hi" }] } as never,
options as never,
);
expect(baseStreamFn).toHaveBeenCalledWith(expect.anything(), expect.anything(), options);
});
});

View File

@ -0,0 +1,96 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import { streamSimple } from "@mariozechner/pi-ai";
import { streamWithPayloadPatch } from "openclaw/plugin-sdk/provider-stream";
type StreamContext = Parameters<StreamFn>[1];
type StreamMessage = StreamContext["messages"][number];
function inferCopilotInitiator(messages: StreamContext["messages"]): "agent" | "user" {
const last = messages[messages.length - 1];
return last && last.role !== "user" ? "agent" : "user";
}
function hasCopilotVisionInput(messages: StreamContext["messages"]): boolean {
return messages.some((message: StreamMessage) => {
if (message.role === "user" && Array.isArray(message.content)) {
return message.content.some((item) => item.type === "image");
}
if (message.role === "toolResult" && Array.isArray(message.content)) {
return message.content.some((item) => item.type === "image");
}
return false;
});
}
function buildCopilotDynamicHeaders(params: {
messages: StreamContext["messages"];
}): Record<string, string> {
return {
"X-Initiator": inferCopilotInitiator(params.messages),
"Openai-Intent": "conversation-edits",
...(hasCopilotVisionInput(params.messages) ? { "Copilot-Vision-Request": "true" } : {}),
};
}
function applyAnthropicPromptCacheMarkers(payloadObj: Record<string, unknown>): void {
const messages = payloadObj.messages;
if (!Array.isArray(messages)) {
return;
}
for (const message of messages as Array<{ role?: string; content?: unknown }>) {
if (message.role === "system" || message.role === "developer") {
if (typeof message.content === "string") {
message.content = [
{ type: "text", text: message.content, cache_control: { type: "ephemeral" } },
];
continue;
}
if (Array.isArray(message.content) && message.content.length > 0) {
const last = message.content[message.content.length - 1];
if (last && typeof last === "object") {
const record = last as Record<string, unknown>;
if (record.type !== "thinking" && record.type !== "redacted_thinking") {
record.cache_control = { type: "ephemeral" };
}
}
}
continue;
}
if (message.role === "assistant" && Array.isArray(message.content)) {
for (const block of message.content) {
if (!block || typeof block !== "object") {
continue;
}
const record = block as Record<string, unknown>;
if (record.type === "thinking" || record.type === "redacted_thinking") {
delete record.cache_control;
}
}
}
}
}
export function wrapCopilotAnthropicStream(baseStreamFn: StreamFn | undefined): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) => {
if (model.provider !== "github-copilot" || model.api !== "anthropic-messages") {
return underlying(model, context, options);
}
return streamWithPayloadPatch(
underlying,
model,
context,
{
...options,
headers: {
...buildCopilotDynamicHeaders({ messages: context.messages }),
...(options?.headers ?? {}),
},
},
applyAnthropicPromptCacheMarkers,
);
};
}

View File

@ -171,7 +171,7 @@ function buildDynamicModel(
id: modelId,
name: modelId,
provider: "github-copilot",
api: "openai-responses",
api: lower.includes("claude") ? "anthropic-messages" : "openai-responses",
reasoning: /^o[13](\b|$)/.test(lower),
input: ["text", "image"],
cost: OPENROUTER_FALLBACK_COST,

View File

@ -992,6 +992,17 @@ describe("resolveModel", () => {
);
});
it("resolves github-copilot Claude dynamic models to anthropic-messages by default", () => {
const result = resolveModelForTest("github-copilot", "claude-sonnet-4.6", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "github-copilot",
id: "claude-sonnet-4.6",
api: "anthropic-messages",
});
});
it("builds an openai fallback for gpt-5.4 mini from the gpt-5-mini template", () => {
mockDiscoveredModel(discoverModels, {
provider: "openai",