fix: break plugin-sdk provider barrel recursion

This commit is contained in:
Peter Steinberger 2026-03-27 23:45:20 +00:00
parent ac68494dae
commit b39a7e8073
62 changed files with 120 additions and 59 deletions

View File

@ -9,7 +9,7 @@ import type {
BedrockDiscoveryConfig,
ModelDefinitionConfig,
ModelProviderConfig,
} from "openclaw/plugin-sdk/provider-models";
} from "openclaw/plugin-sdk/provider-model-shared";
const log = createSubsystemLogger("bedrock-discovery");

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
export {
ANTHROPIC_VERTEX_DEFAULT_MODEL_ID,
buildAnthropicVertexProvider,

View File

@ -1,7 +1,7 @@
import type {
ModelDefinitionConfig,
ModelProviderConfig,
} from "openclaw/plugin-sdk/provider-models";
} from "openclaw/plugin-sdk/provider-model-shared";
import { resolveAnthropicVertexRegion } from "./region.js";
export const ANTHROPIC_VERTEX_DEFAULT_MODEL_ID = "claude-sonnet-4-6";
const ANTHROPIC_VERTEX_DEFAULT_CONTEXT_WINDOW = 1_000_000;

View File

@ -25,7 +25,7 @@ import {
validateApiKeyInput,
} from "openclaw/plugin-sdk/provider-auth";
import { createProviderApiKeyAuthMethod } from "openclaw/plugin-sdk/provider-auth-api-key";
import { normalizeModelCompat } from "openclaw/plugin-sdk/provider-models";
import { normalizeModelCompat } from "openclaw/plugin-sdk/provider-model-shared";
import { fetchClaudeUsage } from "openclaw/plugin-sdk/provider-usage";
import { buildAnthropicCliBackend } from "./cli-backend.js";
import { buildAnthropicCliMigrationResult, hasClaudeCliAuth } from "./cli-migration.js";

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
type VolcModelCatalogEntry = {
id: string;

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import {
buildBytePlusModelDefinition,
BYTEPLUS_BASE_URL,

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { createSubsystemLogger } from "openclaw/plugin-sdk/runtime-env";
const log = createSubsystemLogger("chutes-models");

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import {
CHUTES_BASE_URL,
CHUTES_MODEL_CATALOG,

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const CLOUDFLARE_AI_GATEWAY_PROVIDER_ID = "cloudflare-ai-gateway";
export const CLOUDFLARE_AI_GATEWAY_DEFAULT_MODEL_ID = "claude-sonnet-4-5";

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const DEEPSEEK_BASE_URL = "https://api.deepseek.com";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { buildDeepSeekModelDefinition, DEEPSEEK_BASE_URL, DEEPSEEK_MODEL_CATALOG } from "./api.js";
export function buildDeepSeekProvider(): ModelProviderConfig {

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
const DEFAULT_CONTEXT_WINDOW = 128_000;
const DEFAULT_MAX_TOKENS = 8192;

View File

@ -2,7 +2,7 @@ import type {
ProviderResolveDynamicModelContext,
ProviderRuntimeModel,
} from "openclaw/plugin-sdk/core";
import { normalizeModelCompat } from "openclaw/plugin-sdk/provider-models";
import { normalizeModelCompat } from "openclaw/plugin-sdk/provider-model-shared";
export const PROVIDER_ID = "github-copilot";
const CODEX_GPT_54_MODEL_ID = "gpt-5.4";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import {
applyAgentDefaultModelPrimary,
type OpenClawConfig,

View File

@ -7,7 +7,7 @@ import {
type ProviderFetchUsageSnapshotContext,
} from "openclaw/plugin-sdk/plugin-entry";
import { createProviderApiKeyAuthMethod } from "openclaw/plugin-sdk/provider-auth-api-key";
import type { ProviderPlugin } from "openclaw/plugin-sdk/provider-models";
import type { ProviderPlugin } from "openclaw/plugin-sdk/provider-model-shared";
import { createGoogleThinkingPayloadWrapper } from "openclaw/plugin-sdk/provider-stream";
import { GOOGLE_GEMINI_DEFAULT_MODEL, applyGoogleGeminiModelDefault } from "./api.js";
import { buildGoogleGeminiCliBackend } from "./cli-backend.js";

View File

@ -2,7 +2,7 @@ import type {
ProviderResolveDynamicModelContext,
ProviderRuntimeModel,
} from "openclaw/plugin-sdk/plugin-entry";
import { cloneFirstTemplateModel } from "openclaw/plugin-sdk/provider-models";
import { cloneFirstTemplateModel } from "openclaw/plugin-sdk/provider-model-shared";
const GEMINI_3_1_PRO_PREFIX = "gemini-3.1-pro";
const GEMINI_3_1_FLASH_PREFIX = "gemini-3.1-flash";

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const HUGGINGFACE_BASE_URL = "https://router.huggingface.co/v1";
export const HUGGINGFACE_POLICY_SUFFIXES = ["cheapest", "fastest"] as const;

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import {
buildHuggingfaceModelDefinition,
discoverHuggingfaceModels,

View File

@ -1,4 +1,4 @@
import { type ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import { type ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import {
discoverKilocodeModels,
KILOCODE_BASE_URL as LOCAL_KILOCODE_BASE_URL,

View File

@ -1,5 +1,5 @@
import type { KilocodeModelCatalogEntry } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { KilocodeModelCatalogEntry } from "openclaw/plugin-sdk/provider-model-shared";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { createSubsystemLogger } from "openclaw/plugin-sdk/runtime-env";
const log = createSubsystemLogger("kilocode-models");

View File

@ -9,4 +9,4 @@ export {
KILOCODE_MODEL_CATALOG,
} from "./provider-models.js";
export type { KilocodeModelCatalogEntry } from "openclaw/plugin-sdk/provider-models";
export type { KilocodeModelCatalogEntry } from "openclaw/plugin-sdk/provider-model-shared";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const KIMI_BASE_URL = "https://api.kimi.com/coding/";
const KIMI_CODING_USER_AGENT = "claude-code/0.1.0";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { buildLitellmModelDefinition, LITELLM_BASE_URL } from "./onboard.js";
export function buildLitellmProvider(): ModelProviderConfig {

View File

@ -1,5 +1,8 @@
import type { ProviderNormalizeResolvedModelContext } from "openclaw/plugin-sdk/core";
import type { ModelProviderConfig, ProviderPlugin } from "openclaw/plugin-sdk/provider-models";
import type {
ModelProviderConfig,
ProviderPlugin,
} from "openclaw/plugin-sdk/provider-model-shared";
import { apiKeyAuthMethod, entraIdAuthMethod } from "./auth.js";
import { prepareFoundryRuntimeAuth } from "./runtime.js";
import {

View File

@ -4,7 +4,7 @@ import {
type ProviderAuthResult,
type SecretInput,
} from "openclaw/plugin-sdk/provider-auth";
import type { ModelApi, ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelApi, ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const PROVIDER_ID = "microsoft-foundry";
export const DEFAULT_API = "openai-completions";

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { MINIMAX_DEFAULT_MODEL_ID, MINIMAX_TEXT_MODEL_CATALOG } from "./provider-models.js";
export const DEFAULT_MINIMAX_BASE_URL = "https://api.minimax.io/v1";

View File

@ -1,7 +1,7 @@
import type {
ModelDefinitionConfig,
ModelProviderConfig,
} from "openclaw/plugin-sdk/provider-models";
} from "openclaw/plugin-sdk/provider-model-shared";
import {
MINIMAX_DEFAULT_MODEL_ID,
MINIMAX_TEXT_MODEL_CATALOG,

View File

@ -1,4 +1,4 @@
import { matchesExactOrPrefix } from "openclaw/plugin-sdk/provider-models";
import { matchesExactOrPrefix } from "openclaw/plugin-sdk/provider-model-shared";
export const MINIMAX_DEFAULT_MODEL_ID = "MiniMax-M2.7";
export const MINIMAX_DEFAULT_MODEL_REF = `minimax/${MINIMAX_DEFAULT_MODEL_ID}`;

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const MISTRAL_BASE_URL = "https://api.mistral.ai/v1";
export const MISTRAL_DEFAULT_MODEL_ID = "mistral-large-latest";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { buildMistralCatalogModels, MISTRAL_BASE_URL } from "./model-definitions.js";
export function buildMistralProvider(): ModelProviderConfig {

View File

@ -1,7 +1,7 @@
import type {
ModelDefinitionConfig,
ModelProviderConfig,
} from "openclaw/plugin-sdk/provider-models";
} from "openclaw/plugin-sdk/provider-model-shared";
export const MODELSTUDIO_BASE_URL = "https://coding-intl.dashscope.aliyuncs.com/v1";
export const MODELSTUDIO_GLOBAL_BASE_URL = MODELSTUDIO_BASE_URL;

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { MODELSTUDIO_BASE_URL, MODELSTUDIO_MODEL_CATALOG } from "./models.js";
export function buildModelStudioProvider(): ModelProviderConfig {

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const MOONSHOT_BASE_URL = "https://api.moonshot.ai/v1";
export const MOONSHOT_CN_BASE_URL = "https://api.moonshot.cn/v1";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
const NVIDIA_BASE_URL = "https://integrate.api.nvidia.com/v1";
const NVIDIA_DEFAULT_MODEL_ID = "nvidia/llama-3.1-nemotron-70b-instruct";

View File

@ -15,7 +15,10 @@ import type {
ProviderRuntimeModel,
ProviderWrapStreamFnContext,
} from "openclaw/plugin-sdk/plugin-entry";
import { DEFAULT_CONTEXT_TOKENS, normalizeProviderId } from "openclaw/plugin-sdk/provider-models";
import {
DEFAULT_CONTEXT_TOKENS,
normalizeProviderId,
} from "openclaw/plugin-sdk/provider-model-shared";
import {
createMoonshotThinkingWrapper,
resolveMoonshotThinkingType,

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";

View File

@ -16,7 +16,7 @@ import {
normalizeModelCompat,
normalizeProviderId,
type ProviderPlugin,
} from "openclaw/plugin-sdk/provider-models";
} from "openclaw/plugin-sdk/provider-model-shared";
import { createOpenAIAttributionHeadersWrapper } from "openclaw/plugin-sdk/provider-stream";
import { fetchCodexUsage } from "openclaw/plugin-sdk/provider-usage";
import { OPENAI_CODEX_DEFAULT_MODEL } from "./default-models.js";

View File

@ -8,7 +8,7 @@ import {
normalizeModelCompat,
normalizeProviderId,
type ProviderPlugin,
} from "openclaw/plugin-sdk/provider-models";
} from "openclaw/plugin-sdk/provider-model-shared";
import {
createOpenAIAttributionHeadersWrapper,
createOpenAIDefaultTransportWrapper,

View File

@ -1,4 +1,4 @@
import { cloneFirstTemplateModel } from "openclaw/plugin-sdk/provider-models";
import { cloneFirstTemplateModel } from "openclaw/plugin-sdk/provider-model-shared";
export function findCatalogTemplate(params: {
entries: ReadonlyArray<{ provider: string; id: string }>;

View File

@ -5,7 +5,7 @@ import {
type ProviderRuntimeModel,
} from "openclaw/plugin-sdk/plugin-entry";
import { createProviderApiKeyAuthMethod } from "openclaw/plugin-sdk/provider-auth-api-key";
import { DEFAULT_CONTEXT_TOKENS } from "openclaw/plugin-sdk/provider-models";
import { DEFAULT_CONTEXT_TOKENS } from "openclaw/plugin-sdk/provider-model-shared";
import {
getOpenRouterModelCapabilities,
loadOpenRouterModelCapabilities,

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
const OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1";
const OPENROUTER_DEFAULT_MODEL_ID = "auto";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const QIANFAN_BASE_URL = "https://qianfan.baidubce.com/v2";
export const QIANFAN_DEFAULT_MODEL_ID = "deepseek-v3.2";

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const SYNTHETIC_BASE_URL = "https://api.synthetic.new/anthropic";
export const SYNTHETIC_DEFAULT_MODEL_ID = "hf:MiniMaxAI/MiniMax-M2.5";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import {
buildSyntheticModelDefinition,
SYNTHETIC_BASE_URL,

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const TOGETHER_BASE_URL = "https://api.together.xyz/v1";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { buildTogetherModelDefinition, TOGETHER_BASE_URL, TOGETHER_MODEL_CATALOG } from "./api.js";
export function buildTogetherProvider(): ModelProviderConfig {

View File

@ -1,5 +1,5 @@
import { retryAsync } from "openclaw/plugin-sdk/infra-runtime";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { createSubsystemLogger } from "openclaw/plugin-sdk/runtime-env";
const log = createSubsystemLogger("venice-models");

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { discoverVeniceModels, VENICE_BASE_URL } from "./api.js";
export async function buildVeniceProvider(): Promise<ModelProviderConfig> {

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { createSubsystemLogger } from "openclaw/plugin-sdk/runtime-env";
export const VERCEL_AI_GATEWAY_PROVIDER_ID = "vercel-ai-gateway";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { discoverVercelAiGatewayModels, VERCEL_AI_GATEWAY_BASE_URL } from "./api.js";
export async function buildVercelAiGatewayProvider(): Promise<ModelProviderConfig> {

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
type VolcModelCatalogEntry = {
id: string;

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import {
buildDoubaoModelDefinition,
DOUBAO_BASE_URL,

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const XAI_BASE_URL = "https://api.x.ai/v1";
export const XAI_DEFAULT_MODEL_ID = "grok-4";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { buildXaiCatalogModels, XAI_BASE_URL } from "./model-definitions.js";
export function buildXaiProvider(

View File

@ -2,7 +2,7 @@ import type {
ProviderResolveDynamicModelContext,
ProviderRuntimeModel,
} from "openclaw/plugin-sdk/core";
import { normalizeModelCompat } from "openclaw/plugin-sdk/provider-models";
import { normalizeModelCompat } from "openclaw/plugin-sdk/provider-model-shared";
import { applyXaiModelCompat } from "./api.js";
import { resolveXaiCatalogEntry, XAI_BASE_URL } from "./model-definitions.js";

View File

@ -1,4 +1,4 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
const XIAOMI_BASE_URL = "https://api.xiaomimimo.com/v1";
export const XIAOMI_DEFAULT_MODEL_ID = "mimo-v2-flash";

View File

@ -16,7 +16,10 @@ import {
upsertAuthProfile,
validateApiKeyInput,
} from "openclaw/plugin-sdk/provider-auth-api-key";
import { DEFAULT_CONTEXT_TOKENS, normalizeModelCompat } from "openclaw/plugin-sdk/provider-models";
import {
DEFAULT_CONTEXT_TOKENS,
normalizeModelCompat,
} from "openclaw/plugin-sdk/provider-model-shared";
import { createZaiToolStreamWrapper } from "openclaw/plugin-sdk/provider-stream";
import { fetchZaiUsage, resolveLegacyPiAgentAccessToken } from "openclaw/plugin-sdk/provider-usage";
import { detectZaiEndpoint, type ZaiEndpointId } from "./detect.js";

View File

@ -1,4 +1,4 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-models";
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const ZAI_CODING_GLOBAL_BASE_URL = "https://api.z.ai/api/coding/paas/v4";
export const ZAI_CODING_CN_BASE_URL = "https://open.bigmodel.cn/api/coding/paas/v4";

View File

@ -148,6 +148,10 @@
"types": "./dist/plugin-sdk/thread-bindings-runtime.d.ts",
"default": "./dist/plugin-sdk/thread-bindings-runtime.js"
},
"./plugin-sdk/together": {
"types": "./dist/plugin-sdk/together.d.ts",
"default": "./dist/plugin-sdk/together.js"
},
"./plugin-sdk/text-runtime": {
"types": "./dist/plugin-sdk/text-runtime.d.ts",
"default": "./dist/plugin-sdk/text-runtime.js"
@ -712,6 +716,10 @@
"types": "./dist/plugin-sdk/provider-google.d.ts",
"default": "./dist/plugin-sdk/provider-google.js"
},
"./plugin-sdk/provider-model-shared": {
"types": "./dist/plugin-sdk/provider-model-shared.d.ts",
"default": "./dist/plugin-sdk/provider-model-shared.js"
},
"./plugin-sdk/provider-models": {
"types": "./dist/plugin-sdk/provider-models.d.ts",
"default": "./dist/plugin-sdk/provider-models.js"

View File

@ -27,6 +27,7 @@
"matrix-runtime-heavy",
"matrix-runtime-shared",
"thread-bindings-runtime",
"together",
"text-runtime",
"agent-runtime",
"speech-runtime",
@ -168,6 +169,7 @@
"provider-env-vars",
"provider-http",
"provider-google",
"provider-model-shared",
"provider-models",
"provider-moonshot",
"provider-onboard",

View File

@ -0,0 +1,38 @@
// Shared model/catalog helpers for provider plugins.
//
// Keep provider-owned exports out of this subpath so plugin loaders can import it
// without recursing through provider-specific facades.
import type { BedrockDiscoveryConfig, ModelDefinitionConfig } from "../config/types.models.js";
export type { ModelApi, ModelProviderConfig } from "../config/types.models.js";
export type { BedrockDiscoveryConfig, ModelDefinitionConfig } from "../config/types.models.js";
export type { ProviderPlugin } from "../plugins/types.js";
export type { KilocodeModelCatalogEntry } from "../plugins/provider-model-kilocode.js";
export { DEFAULT_CONTEXT_TOKENS } from "../agents/defaults.js";
export {
hasNativeWebSearchTool,
HTML_ENTITY_TOOL_CALL_ARGUMENTS_ENCODING,
normalizeModelCompat,
resolveToolCallArgumentsEncoding,
usesXaiToolSchemaProfile,
XAI_TOOL_SCHEMA_PROFILE,
} from "../agents/model-compat.js";
export { normalizeProviderId } from "../agents/provider-id.js";
export {
createMoonshotThinkingWrapper,
resolveMoonshotThinkingType,
} from "../agents/pi-embedded-runner/moonshot-thinking-stream-wrappers.js";
export {
cloneFirstTemplateModel,
matchesExactOrPrefix,
} from "../plugins/provider-model-helpers.js";
export {
MINIMAX_DEFAULT_MODEL_ID,
MINIMAX_DEFAULT_MODEL_REF,
MINIMAX_TEXT_MODEL_CATALOG,
MINIMAX_TEXT_MODEL_ORDER,
MINIMAX_TEXT_MODEL_REFS,
isMiniMaxModernModelId,
} from "./minimax.js";

View File

@ -540,6 +540,10 @@ describe("plugin-sdk subpath exports", () => {
"resolveZaiBaseUrl",
],
});
expectSourceContract("provider-model-shared", {
mentions: ["DEFAULT_CONTEXT_TOKENS", "normalizeModelCompat", "cloneFirstTemplateModel"],
omits: ["applyOpenAIConfig", "buildKilocodeModelDefinition", "discoverHuggingfaceModels"],
});
expectSourceMentions("setup", [
"DEFAULT_ACCOUNT_ID",