refactor: generalize provider transport hooks

This commit is contained in:
Peter Steinberger 2026-03-29 23:05:18 +09:00
parent 8109195ad8
commit edc58a6864
No known key found for this signature in database
12 changed files with 396 additions and 95 deletions

View File

@ -49,9 +49,27 @@ const OPENAI_MODERN_MODEL_IDS = [
const OPENAI_DIRECT_SPARK_MODEL_ID = "gpt-5.3-codex-spark";
const SUPPRESSED_SPARK_PROVIDERS = new Set(["openai", "azure-openai-responses"]);
function shouldUseOpenAIResponsesTransport(params: {
provider: string;
api?: string | null;
baseUrl?: string;
}): boolean {
if (params.api !== "openai-completions") {
return false;
}
const isOwnerProvider = normalizeProviderId(params.provider) === PROVIDER_ID;
if (isOwnerProvider) {
return !params.baseUrl || isOpenAIApiBaseUrl(params.baseUrl);
}
return typeof params.baseUrl === "string" && isOpenAIApiBaseUrl(params.baseUrl);
}
function normalizeOpenAITransport(model: ProviderRuntimeModel): ProviderRuntimeModel {
const useResponsesTransport =
model.api === "openai-completions" && (!model.baseUrl || isOpenAIApiBaseUrl(model.baseUrl));
const useResponsesTransport = shouldUseOpenAIResponsesTransport({
provider: model.provider,
api: model.api,
baseUrl: model.baseUrl,
});
if (!useResponsesTransport) {
return model;
@ -168,6 +186,10 @@ export function buildOpenAIProvider(): ProviderPlugin {
}
return normalizeOpenAITransport(ctx.model);
},
normalizeTransport: ({ provider, api, baseUrl }) =>
shouldUseOpenAIResponsesTransport({ provider, api, baseUrl })
? { api: "openai-responses", baseUrl }
: undefined,
capabilities: {
providerFamily: "openai",
},

View File

@ -13,7 +13,6 @@ import {
createOpenRouterWrapper,
isProxyReasoningUnsupported,
} from "openclaw/plugin-sdk/provider-stream";
import { applyXaiModelCompat } from "openclaw/plugin-sdk/xai";
import { openrouterMediaUnderstandingProvider } from "./media-understanding-provider.js";
import { applyOpenrouterConfig, OPENROUTER_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildOpenrouterProvider } from "./provider-catalog.js";
@ -75,10 +74,6 @@ function isOpenRouterCacheTtlModel(modelId: string): boolean {
return OPENROUTER_CACHE_TTL_MODEL_PREFIXES.some((prefix) => modelId.startsWith(prefix));
}
function isXaiOpenRouterModel(modelId: string): boolean {
return modelId.trim().toLowerCase().startsWith("x-ai/");
}
export default definePluginEntry({
id: "openrouter",
name: "OpenRouter Provider",
@ -135,8 +130,6 @@ export default definePluginEntry({
geminiThoughtSignatureSanitization: true,
geminiThoughtSignatureModelHints: ["gemini"],
},
normalizeResolvedModel: ({ modelId, model }) =>
isXaiOpenRouterModel(modelId) ? applyXaiModelCompat(model) : undefined,
isModernModelRef: () => true,
wrapStreamFn: (ctx) => {
let streamFn = ctx.streamFn;

View File

@ -1,4 +1,7 @@
import { applyModelCompatPatch } from "openclaw/plugin-sdk/provider-model-shared";
import {
applyModelCompatPatch,
normalizeProviderId,
} from "openclaw/plugin-sdk/provider-model-shared";
import type { ModelCompatConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { XAI_UNSUPPORTED_SCHEMA_KEYWORDS } from "openclaw/plugin-sdk/provider-tools";
@ -35,3 +38,56 @@ export function applyXaiModelCompat<T extends { compat?: unknown }>(model: T): T
resolveXaiModelCompatPatch(),
) as T;
}
function isXaiBaseUrl(baseUrl: unknown): boolean {
if (typeof baseUrl !== "string" || !baseUrl.trim()) {
return false;
}
try {
return new URL(baseUrl).hostname.toLowerCase() === "api.x.ai";
} catch {
return baseUrl.toLowerCase().includes("api.x.ai");
}
}
function isXaiModelHint(modelId: string): boolean {
return modelId.trim().toLowerCase().startsWith("x-ai/");
}
function shouldUseXaiResponsesTransport(params: {
provider: string;
api?: unknown;
baseUrl?: unknown;
}): boolean {
if (params.api !== "openai-completions") {
return false;
}
if (isXaiBaseUrl(params.baseUrl)) {
return true;
}
return normalizeProviderId(params.provider) === "xai" && !params.baseUrl;
}
export function shouldContributeXaiCompat(params: {
modelId: string;
model: { api?: unknown; baseUrl?: unknown };
}): boolean {
if (params.model.api !== "openai-completions") {
return false;
}
return isXaiBaseUrl(params.model.baseUrl) || isXaiModelHint(params.modelId);
}
export function resolveXaiTransport(params: {
provider: string;
api?: unknown;
baseUrl?: unknown;
}): { api: "openai-responses"; baseUrl?: string } | undefined {
if (!shouldUseXaiResponsesTransport(params)) {
return undefined;
}
return {
api: "openai-responses",
baseUrl: typeof params.baseUrl === "string" ? params.baseUrl : undefined,
};
}

View File

@ -6,7 +6,13 @@ import { defineSingleProviderPluginEntry } from "openclaw/plugin-sdk/provider-en
import { createToolStreamWrapper } from "openclaw/plugin-sdk/provider-stream";
import { resolveProviderWebSearchPluginConfig } from "openclaw/plugin-sdk/provider-web-search";
import { normalizeSecretInputString } from "openclaw/plugin-sdk/secret-input";
import { applyXaiModelCompat, normalizeXaiModelId } from "./api.js";
import {
applyXaiModelCompat,
normalizeXaiModelId,
resolveXaiTransport,
resolveXaiModelCompatPatch,
shouldContributeXaiCompat,
} from "./api.js";
import { createCodeExecutionTool } from "./code-execution.js";
import { applyXaiConfig, XAI_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildXaiProvider } from "./provider-catalog.js";
@ -132,6 +138,10 @@ export default defineSingleProviderPluginEntry({
};
},
normalizeResolvedModel: ({ model }) => applyXaiModelCompat(model),
normalizeTransport: ({ provider, api, baseUrl }) =>
resolveXaiTransport({ provider, api, baseUrl }),
contributeResolvedModelCompat: ({ modelId, model }) =>
shouldContributeXaiCompat({ modelId, model }) ? resolveXaiModelCompatPatch() : undefined,
normalizeModelId: ({ modelId }) => normalizeXaiModelId(modelId),
resolveDynamicModel: (ctx) => resolveXaiForwardCompatModel({ providerId: PROVIDER_ID, ctx }),
isModernModelRef: ({ modelId }) => isModernXaiModel(modelId),

View File

@ -1,74 +1,9 @@
import type { Api, Model } from "@mariozechner/pi-ai";
import { normalizeModelCompat } from "../../plugins/provider-model-compat.js";
import { normalizeProviderId } from "../model-selection.js";
function isOpenAIApiBaseUrl(baseUrl?: string): boolean {
const trimmed = baseUrl?.trim();
if (!trimmed) {
return false;
}
return /^https?:\/\/api\.openai\.com(?:\/v1)?\/?$/i.test(trimmed);
}
function isXaiApiBaseUrl(baseUrl?: string): boolean {
const trimmed = baseUrl?.trim();
if (!trimmed) {
return false;
}
return /^https?:\/\/api\.x\.ai(?:\/v1)?\/?$/i.test(trimmed);
}
function normalizeOpenAITransport(params: { provider: string; model: Model<Api> }): Model<Api> {
if (normalizeProviderId(params.provider) !== "openai") {
return params.model;
}
const useResponsesTransport =
params.model.api === "openai-completions" &&
(!params.model.baseUrl || isOpenAIApiBaseUrl(params.model.baseUrl));
if (!useResponsesTransport) {
return params.model;
}
return {
...params.model,
api: "openai-responses",
} as Model<Api>;
}
function normalizeXaiTransport(params: { provider: string; model: Model<Api> }): Model<Api> {
if (normalizeProviderId(params.provider) !== "xai") {
return params.model;
}
const useResponsesTransport =
params.model.api === "openai-completions" &&
(!params.model.baseUrl || isXaiApiBaseUrl(params.model.baseUrl));
if (!useResponsesTransport) {
return params.model;
}
return {
...params.model,
api: "openai-responses",
} as Model<Api>;
}
export function applyBuiltInResolvedProviderTransportNormalization(params: {
provider: string;
model: Model<Api>;
}): Model<Api> {
return normalizeXaiTransport({
...params,
model: normalizeOpenAITransport(params),
});
}
export function normalizeResolvedProviderModel(params: {
provider: string;
model: Model<Api>;
}): Model<Api> {
return normalizeModelCompat(applyBuiltInResolvedProviderTransportNormalization(params));
return normalizeModelCompat(params.model);
}

View File

@ -4,6 +4,7 @@ const OPENAI_BASE_URL = "https://api.openai.com/v1";
const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";
const OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1";
const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
const XAI_BASE_URL = "https://api.x.ai/v1";
const ZAI_BASE_URL = "https://api.z.ai/api/paas/v4";
const GOOGLE_GENERATIVE_AI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta";
const DEFAULT_CONTEXT_WINDOW = 200_000;
@ -58,12 +59,6 @@ function cloneTemplate(
}
function normalizeDynamicModel(params: { provider: string; model: ResolvedModelLike }) {
if (params.provider === "openai") {
const baseUrl = typeof params.model.baseUrl === "string" ? params.model.baseUrl : undefined;
if (params.model.api === "openai-completions" && (!baseUrl || baseUrl === OPENAI_BASE_URL)) {
return { ...params.model, api: "openai-responses" };
}
}
if (params.provider !== "openai-codex") {
return undefined;
}
@ -306,6 +301,7 @@ export function createProviderRuntimeTestMock(options: ProviderRuntimeTestMockOp
"github-copilot",
"openai-codex",
"openai",
"xai",
"anthropic",
"zai",
],
@ -389,14 +385,37 @@ export function createProviderRuntimeTestMock(options: ProviderRuntimeTestMockOp
})
: undefined,
normalizeProviderTransportWithPlugin: (params: {
provider: string;
context: { api?: string | null; baseUrl?: string };
}) =>
params.context.api === "google-generative-ai" &&
params.context.baseUrl === "https://generativelanguage.googleapis.com"
? {
api: params.context.api,
baseUrl: GOOGLE_GENERATIVE_AI_BASE_URL,
}
: undefined,
}) => {
if (
params.context.api === "google-generative-ai" &&
params.context.baseUrl === "https://generativelanguage.googleapis.com"
) {
return {
api: params.context.api,
baseUrl: GOOGLE_GENERATIVE_AI_BASE_URL,
};
}
if (
params.context.api === "openai-completions" &&
(params.provider === "openai" || params.context.baseUrl === OPENAI_BASE_URL)
) {
return {
api: "openai-responses",
baseUrl: params.context.baseUrl,
};
}
if (
params.context.api === "openai-completions" &&
(params.provider === "xai" || params.context.baseUrl === XAI_BASE_URL)
) {
return {
api: "openai-responses",
baseUrl: params.context.baseUrl,
};
}
return undefined;
},
};
}

View File

@ -351,6 +351,64 @@ describe("resolveModel", () => {
expect(result.model?.baseUrl).toBe("https://generativelanguage.googleapis.com/v1beta");
});
it("normalizes custom api.openai.com providers to responses transport", () => {
const cfg = {
models: {
providers: {
"custom-openai": {
baseUrl: "https://api.openai.com/v1",
api: "openai-completions",
models: [
{
...makeModel("gpt-5.4"),
provider: "custom-openai",
},
],
},
},
},
} as unknown as OpenClawConfig;
const result = resolveModelForTest("custom-openai", "gpt-5.4", "/tmp/agent", cfg);
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "custom-openai",
id: "gpt-5.4",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
});
});
it("normalizes custom api.x.ai providers to responses transport", () => {
const cfg = {
models: {
providers: {
"custom-xai": {
baseUrl: "https://api.x.ai/v1",
api: "openai-completions",
models: [
{
...makeModel("grok-4.1-fast"),
provider: "custom-xai",
},
],
},
},
},
} as unknown as OpenClawConfig;
const result = resolveModelForTest("custom-xai", "grok-4.1-fast", "/tmp/agent", cfg);
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "custom-xai",
id: "grok-4.1-fast",
api: "openai-responses",
baseUrl: "https://api.x.ai/v1",
});
});
it("includes provider headers in provider fallback model", () => {
const cfg = {
models: {

View File

@ -4,6 +4,7 @@ import type { OpenClawConfig } from "../../config/config.js";
import type { ModelDefinitionConfig } from "../../config/types.js";
import {
applyProviderResolvedModelCompatWithPlugins,
applyProviderResolvedTransportWithPlugin,
buildProviderUnknownModelHintWithPlugin,
clearProviderRuntimeHookCache,
normalizeProviderTransportWithPlugin,
@ -40,6 +41,9 @@ type ProviderRuntimeHooks = {
applyProviderResolvedModelCompatWithPlugins?: (
params: Parameters<typeof applyProviderResolvedModelCompatWithPlugins>[0],
) => unknown;
applyProviderResolvedTransportWithPlugin?: (
params: Parameters<typeof applyProviderResolvedTransportWithPlugin>[0],
) => unknown;
buildProviderUnknownModelHintWithPlugin: (
params: Parameters<typeof buildProviderUnknownModelHintWithPlugin>[0],
) => string | undefined;
@ -57,6 +61,7 @@ type ProviderRuntimeHooks = {
const DEFAULT_PROVIDER_RUNTIME_HOOKS: ProviderRuntimeHooks = {
applyProviderResolvedModelCompatWithPlugins,
applyProviderResolvedTransportWithPlugin,
buildProviderUnknownModelHintWithPlugin,
prepareProviderDynamicModel,
runProviderDynamicModel,
@ -137,9 +142,20 @@ function normalizeResolvedModel(params: {
model: (pluginNormalized ?? normalizedInputModel) as never,
},
}) as Model<Api> | undefined;
const transportNormalized = runtimeHooks.applyProviderResolvedTransportWithPlugin?.({
provider: params.provider,
config: params.cfg,
context: {
config: params.cfg,
agentDir: params.agentDir,
provider: params.provider,
modelId: normalizedInputModel.id,
model: (compatNormalized ?? pluginNormalized ?? normalizedInputModel) as never,
},
}) as Model<Api> | undefined;
return normalizeResolvedProviderModel({
provider: params.provider,
model: compatNormalized ?? pluginNormalized ?? normalizedInputModel,
model: transportNormalized ?? compatNormalized ?? pluginNormalized ?? normalizedInputModel,
});
}

View File

@ -380,4 +380,101 @@ describe("discoverAuthStorage", () => {
expect(model?.compat?.maxTokensField).toBe("max_tokens");
});
});
it("normalizes discovered xAI compat flags for OpenRouter x-ai model ids", async () => {
await withAgentDir(async (agentDir) => {
saveAuthProfileStore(
{
version: 1,
profiles: {
"openrouter:default": {
type: "api_key",
provider: "openrouter",
key: "sk-or-v1-runtime",
},
},
},
agentDir,
);
await writeModelsJson(agentDir, {
providers: {
openrouter: {
api: "openai-completions",
baseUrl: "https://openrouter.ai/api/v1",
apiKey: "OPENROUTER_API_KEY",
models: [
{
id: "x-ai/grok-4.1-fast",
name: "Grok via OpenRouter",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 256000,
maxTokens: 8192,
},
],
},
},
});
const authStorage = discoverAuthStorage(agentDir);
const modelRegistry = discoverModels(authStorage, agentDir);
const model = modelRegistry.find("openrouter", "x-ai/grok-4.1-fast") as {
compat?: {
toolSchemaProfile?: string;
nativeWebSearchTool?: boolean;
toolCallArgumentsEncoding?: string;
};
} | null;
expect(model?.compat?.toolSchemaProfile).toBe("xai");
expect(model?.compat?.nativeWebSearchTool).toBe(true);
expect(model?.compat?.toolCallArgumentsEncoding).toBe("html-entities");
});
});
it("normalizes discovered custom xAI-compatible providers by host", async () => {
await withAgentDir(async (agentDir) => {
await writeModelsJson(agentDir, {
providers: {
"custom-xai": {
api: "openai-completions",
baseUrl: "https://api.x.ai/v1",
apiKey: "XAI_API_KEY",
models: [
{
id: "grok-4.1-fast",
name: "Custom Grok",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 256000,
maxTokens: 8192,
},
],
},
},
});
const authStorage = discoverAuthStorage(agentDir);
const modelRegistry = discoverModels(authStorage, agentDir);
const model = modelRegistry
.getAll()
.find((entry) => entry.provider === "custom-xai" && entry.id === "grok-4.1-fast") as
| {
api?: string;
compat?: {
toolSchemaProfile?: string;
nativeWebSearchTool?: boolean;
toolCallArgumentsEncoding?: string;
};
}
| undefined;
expect(model?.api).toBe("openai-responses");
expect(model?.compat?.toolSchemaProfile).toBe("xai");
expect(model?.compat?.nativeWebSearchTool).toBe(true);
expect(model?.compat?.toolCallArgumentsEncoding).toBe("html-entities");
});
});
});

View File

@ -9,6 +9,7 @@ import type {
import { normalizeModelCompat } from "../plugins/provider-model-compat.js";
import {
applyProviderResolvedModelCompatWithPlugins,
applyProviderResolvedTransportWithPlugin,
normalizeProviderResolvedModelWithPlugin,
} from "../plugins/provider-runtime.js";
import type { ProviderRuntimeModel } from "../plugins/types.js";
@ -88,7 +89,17 @@ function normalizeRegistryModel<T>(value: T, agentDir: string): T {
agentDir,
},
}) ?? pluginNormalized;
return normalizeModelCompat(compatNormalized as Model<Api>) as T;
const transportNormalized =
applyProviderResolvedTransportWithPlugin({
provider: model.provider,
context: {
provider: model.provider,
modelId: model.id,
model: compatNormalized,
agentDir,
},
}) ?? compatNormalized;
return normalizeModelCompat(transportNormalized as Model<Api>) as T;
}
class OpenClawModelRegistry extends PiModelRegistryClass {

View File

@ -30,6 +30,7 @@ let formatProviderAuthProfileApiKeyWithPlugin: typeof import("./provider-runtime
let normalizeProviderConfigWithPlugin: typeof import("./provider-runtime.js").normalizeProviderConfigWithPlugin;
let normalizeProviderModelIdWithPlugin: typeof import("./provider-runtime.js").normalizeProviderModelIdWithPlugin;
let applyProviderResolvedModelCompatWithPlugins: typeof import("./provider-runtime.js").applyProviderResolvedModelCompatWithPlugins;
let applyProviderResolvedTransportWithPlugin: typeof import("./provider-runtime.js").applyProviderResolvedTransportWithPlugin;
let normalizeProviderTransportWithPlugin: typeof import("./provider-runtime.js").normalizeProviderTransportWithPlugin;
let prepareProviderExtraParams: typeof import("./provider-runtime.js").prepareProviderExtraParams;
let resolveProviderConfigApiKeyWithPlugin: typeof import("./provider-runtime.js").resolveProviderConfigApiKeyWithPlugin;
@ -213,6 +214,7 @@ describe("provider-runtime", () => {
buildProviderUnknownModelHintWithPlugin,
applyProviderNativeStreamingUsageCompatWithPlugin,
applyProviderResolvedModelCompatWithPlugins,
applyProviderResolvedTransportWithPlugin,
formatProviderAuthProfileApiKeyWithPlugin,
normalizeProviderConfigWithPlugin,
normalizeProviderModelIdWithPlugin,
@ -910,6 +912,50 @@ describe("provider-runtime", () => {
});
});
it("applies foreign transport normalization for custom provider hosts", () => {
resolvePluginProvidersMock.mockImplementation((params) => {
const onlyPluginIds = params.onlyPluginIds ?? [];
const plugins: ProviderPlugin[] = [
{
id: "openai",
label: "OpenAI",
auth: [],
normalizeTransport: ({ provider, api, baseUrl }) =>
provider === "custom-openai" &&
api === "openai-completions" &&
baseUrl === "https://api.openai.com/v1"
? { api: "openai-responses", baseUrl }
: undefined,
},
];
return onlyPluginIds.length > 0
? plugins.filter((plugin) => onlyPluginIds.includes(plugin.id))
: plugins;
});
expect(
applyProviderResolvedTransportWithPlugin({
provider: "custom-openai",
context: createDemoResolvedModelContext({
provider: "custom-openai",
modelId: "gpt-5.4",
model: {
...MODEL,
provider: "custom-openai",
id: "gpt-5.4",
api: "openai-completions",
baseUrl: "https://api.openai.com/v1",
},
}),
}),
).toMatchObject({
provider: "custom-openai",
id: "gpt-5.4",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
});
});
it("resolves bundled catalog hooks through provider plugins", async () => {
resolveCatalogHookProviderPluginIdsMock.mockReturnValue(["openai"]);
resolvePluginProvidersMock.mockImplementation((params?: { onlyPluginIds?: string[] }) => {

View File

@ -298,6 +298,41 @@ export function applyProviderResolvedModelCompatWithPlugins(params: {
return changed ? nextModel : undefined;
}
export function applyProviderResolvedTransportWithPlugin(params: {
provider: string;
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
context: ProviderNormalizeResolvedModelContext;
}): ProviderRuntimeModel | undefined {
const normalized = normalizeProviderTransportWithPlugin({
provider: params.provider,
config: params.config,
workspaceDir: params.workspaceDir,
env: params.env,
context: {
provider: params.context.provider,
api: params.context.model.api,
baseUrl: params.context.model.baseUrl,
},
});
if (!normalized) {
return undefined;
}
const nextApi = normalized.api ?? params.context.model.api;
const nextBaseUrl = normalized.baseUrl ?? params.context.model.baseUrl;
if (nextApi === params.context.model.api && nextBaseUrl === params.context.model.baseUrl) {
return undefined;
}
return {
...params.context.model,
api: nextApi as ProviderRuntimeModel["api"],
baseUrl: nextBaseUrl,
};
}
function resolveProviderHookPlugin(params: {
provider: string;
config?: OpenClawConfig;
@ -334,9 +369,12 @@ export function normalizeProviderTransportWithPlugin(params: {
env?: NodeJS.ProcessEnv;
context: ProviderNormalizeTransportContext;
}): { api?: string | null; baseUrl?: string } | undefined {
const hasTransportChange = (normalized: { api?: string | null; baseUrl?: string }) =>
(normalized.api ?? params.context.api) !== params.context.api ||
(normalized.baseUrl ?? params.context.baseUrl) !== params.context.baseUrl;
const matchedPlugin = resolveProviderHookPlugin(params);
const normalizedMatched = matchedPlugin?.normalizeTransport?.(params.context);
if (normalizedMatched) {
if (normalizedMatched && hasTransportChange(normalizedMatched)) {
return normalizedMatched;
}
@ -345,7 +383,7 @@ export function normalizeProviderTransportWithPlugin(params: {
continue;
}
const normalized = candidate.normalizeTransport(params.context);
if (normalized) {
if (normalized && hasTransportChange(normalized)) {
return normalized;
}
}