mirror of https://github.com/openclaw/openclaw.git
167 lines
5.5 KiB
TypeScript
167 lines
5.5 KiB
TypeScript
import type { Api, Model } from "@mariozechner/pi-ai";
|
|
import type { OpenClawConfig } from "../../config/config.js";
|
|
import type { ModelDefinitionConfig } from "../../config/types.js";
|
|
import { resolveOpenClawAgentDir } from "../agent-paths.js";
|
|
import { DEFAULT_CONTEXT_TOKENS } from "../defaults.js";
|
|
import { normalizeModelCompat } from "../model-compat.js";
|
|
import { normalizeProviderId } from "../model-selection.js";
|
|
import {
|
|
discoverAuthStorage,
|
|
discoverModels,
|
|
type AuthStorage,
|
|
type ModelRegistry,
|
|
} from "../pi-model-discovery.js";
|
|
|
|
type InlineModelEntry = ModelDefinitionConfig & { provider: string; baseUrl?: string };
|
|
type InlineProviderConfig = {
|
|
baseUrl?: string;
|
|
api?: ModelDefinitionConfig["api"];
|
|
models?: ModelDefinitionConfig[];
|
|
};
|
|
|
|
const OPENAI_CODEX_GPT_53_MODEL_ID = "gpt-5.3-codex";
|
|
|
|
const OPENAI_CODEX_TEMPLATE_MODEL_IDS = ["gpt-5.2-codex"] as const;
|
|
|
|
function resolveOpenAICodexGpt53FallbackModel(
|
|
provider: string,
|
|
modelId: string,
|
|
modelRegistry: ModelRegistry,
|
|
): Model<Api> | undefined {
|
|
const normalizedProvider = normalizeProviderId(provider);
|
|
const trimmedModelId = modelId.trim();
|
|
if (normalizedProvider !== "openai-codex") {
|
|
return undefined;
|
|
}
|
|
if (trimmedModelId.toLowerCase() !== OPENAI_CODEX_GPT_53_MODEL_ID) {
|
|
return undefined;
|
|
}
|
|
|
|
for (const templateId of OPENAI_CODEX_TEMPLATE_MODEL_IDS) {
|
|
const template = modelRegistry.find(normalizedProvider, templateId) as Model<Api> | null;
|
|
if (!template) {
|
|
continue;
|
|
}
|
|
return normalizeModelCompat({
|
|
...template,
|
|
id: trimmedModelId,
|
|
name: trimmedModelId,
|
|
} as Model<Api>);
|
|
}
|
|
|
|
return normalizeModelCompat({
|
|
id: trimmedModelId,
|
|
name: trimmedModelId,
|
|
api: "openai-codex-responses",
|
|
provider: normalizedProvider,
|
|
baseUrl: "https://chatgpt.com/backend-api",
|
|
reasoning: true,
|
|
input: ["text", "image"],
|
|
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
|
contextWindow: DEFAULT_CONTEXT_TOKENS,
|
|
maxTokens: DEFAULT_CONTEXT_TOKENS,
|
|
} as Model<Api>);
|
|
}
|
|
|
|
export function buildInlineProviderModels(
|
|
providers: Record<string, InlineProviderConfig>,
|
|
): InlineModelEntry[] {
|
|
return Object.entries(providers).flatMap(([providerId, entry]) => {
|
|
const trimmed = providerId.trim();
|
|
if (!trimmed) {
|
|
return [];
|
|
}
|
|
return (entry?.models ?? []).map((model) => ({
|
|
...model,
|
|
provider: trimmed,
|
|
baseUrl: entry?.baseUrl,
|
|
api: model.api ?? entry?.api,
|
|
}));
|
|
});
|
|
}
|
|
|
|
export function buildModelAliasLines(cfg?: OpenClawConfig) {
|
|
const models = cfg?.agents?.defaults?.models ?? {};
|
|
const entries: Array<{ alias: string; model: string }> = [];
|
|
for (const [keyRaw, entryRaw] of Object.entries(models)) {
|
|
const model = String(keyRaw ?? "").trim();
|
|
if (!model) {
|
|
continue;
|
|
}
|
|
const alias = String((entryRaw as { alias?: string } | undefined)?.alias ?? "").trim();
|
|
if (!alias) {
|
|
continue;
|
|
}
|
|
entries.push({ alias, model });
|
|
}
|
|
return entries
|
|
.toSorted((a, b) => a.alias.localeCompare(b.alias))
|
|
.map((entry) => `- ${entry.alias}: ${entry.model}`);
|
|
}
|
|
|
|
export function resolveModel(
|
|
provider: string,
|
|
modelId: string,
|
|
agentDir?: string,
|
|
cfg?: OpenClawConfig,
|
|
): {
|
|
model?: Model<Api>;
|
|
error?: string;
|
|
authStorage: AuthStorage;
|
|
modelRegistry: ModelRegistry;
|
|
} {
|
|
const resolvedAgentDir = agentDir ?? resolveOpenClawAgentDir();
|
|
const authStorage = discoverAuthStorage(resolvedAgentDir);
|
|
const modelRegistry = discoverModels(authStorage, resolvedAgentDir);
|
|
const model = modelRegistry.find(provider, modelId) as Model<Api> | null;
|
|
if (!model) {
|
|
const providers = cfg?.models?.providers ?? {};
|
|
const inlineModels = buildInlineProviderModels(providers);
|
|
const normalizedProvider = normalizeProviderId(provider);
|
|
const inlineMatch = inlineModels.find(
|
|
(entry) => normalizeProviderId(entry.provider) === normalizedProvider && entry.id === modelId,
|
|
);
|
|
if (inlineMatch) {
|
|
const normalized = normalizeModelCompat(inlineMatch as Model<Api>);
|
|
return {
|
|
model: normalized,
|
|
authStorage,
|
|
modelRegistry,
|
|
};
|
|
}
|
|
// Codex gpt-5.3 forward-compat fallback must be checked BEFORE the generic providerCfg fallback.
|
|
// Otherwise, if cfg.models.providers["openai-codex"] is configured, the generic fallback fires
|
|
// with api: "openai-responses" instead of the correct "openai-codex-responses".
|
|
const codexForwardCompat = resolveOpenAICodexGpt53FallbackModel(
|
|
provider,
|
|
modelId,
|
|
modelRegistry,
|
|
);
|
|
if (codexForwardCompat) {
|
|
return { model: codexForwardCompat, authStorage, modelRegistry };
|
|
}
|
|
const providerCfg = providers[provider];
|
|
if (providerCfg || modelId.startsWith("mock-")) {
|
|
const fallbackModel: Model<Api> = normalizeModelCompat({
|
|
id: modelId,
|
|
name: modelId,
|
|
api: providerCfg?.api ?? "openai-responses",
|
|
provider,
|
|
baseUrl: providerCfg?.baseUrl,
|
|
reasoning: false,
|
|
input: ["text"],
|
|
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
|
contextWindow: providerCfg?.models?.[0]?.contextWindow ?? DEFAULT_CONTEXT_TOKENS,
|
|
maxTokens: providerCfg?.models?.[0]?.maxTokens ?? DEFAULT_CONTEXT_TOKENS,
|
|
} as Model<Api>);
|
|
return { model: fallbackModel, authStorage, modelRegistry };
|
|
}
|
|
return {
|
|
error: `Unknown model: ${provider}/${modelId}`,
|
|
authStorage,
|
|
modelRegistry,
|
|
};
|
|
}
|
|
return { model: normalizeModelCompat(model), authStorage, modelRegistry };
|
|
}
|