build: update deps and align pi sdk usage

This commit is contained in:
Peter Steinberger 2026-03-31 22:55:44 +09:00
parent cbfeecfab4
commit c1ea0ae9c8
No known key found for this signature in database
20 changed files with 1163 additions and 1140 deletions

View File

@ -4,7 +4,7 @@
"description": "OpenClaw ACP runtime backend via acpx",
"type": "module",
"dependencies": {
"acpx": "0.3.1"
"acpx": "0.4.0"
},
"openclaw": {
"extensions": [

View File

@ -5,7 +5,7 @@
"description": "OpenClaw Amazon Bedrock provider plugin",
"type": "module",
"dependencies": {
"@aws-sdk/client-bedrock": "3.1018.0"
"@aws-sdk/client-bedrock": "3.1020.0"
},
"openclaw": {
"bundle": {

View File

@ -9,7 +9,7 @@
},
"dependencies": {
"@pierre/diffs": "1.1.7",
"@pierre/theme": "0.0.24",
"@pierre/theme": "0.0.26",
"@sinclair/typebox": "0.34.49",
"playwright-core": "1.58.2"
},

View File

@ -8,7 +8,7 @@
"@matrix-org/matrix-sdk-crypto-wasm": "18.0.0",
"fake-indexeddb": "^6.2.5",
"markdown-it": "14.1.1",
"matrix-js-sdk": "41.2.0",
"matrix-js-sdk": "41.3.0-rc.0",
"music-metadata": "^11.12.3"
},
"devDependencies": {

View File

@ -58,7 +58,7 @@ function resolveTemplateModelId(modelId: string) {
}
function createTemplateModelRegistry(modelId: string): ModelRegistry {
const registry = new ModelRegistry(AuthStorage.inMemory());
const registry = ModelRegistry.inMemory(AuthStorage.inMemory());
const template = getModel("openai", resolveTemplateModelId(modelId));
registry.registerProvider("openai", {
apiKey: "test",

View File

@ -32,8 +32,14 @@ import {
const PROVIDER_ID = "openai-codex";
const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";
const OPENAI_CODEX_GPT_54_MODEL_ID = "gpt-5.4";
const OPENAI_CODEX_GPT_54_CONTEXT_TOKENS = 1_050_000;
const OPENAI_CODEX_GPT_54_CONTEXT_TOKENS = 272_000;
const OPENAI_CODEX_GPT_54_MAX_TOKENS = 128_000;
const OPENAI_CODEX_GPT_54_COST = {
input: 2.5,
output: 15,
cacheRead: 0.25,
cacheWrite: 0,
} as const;
const OPENAI_CODEX_GPT_54_TEMPLATE_MODEL_IDS = ["gpt-5.3-codex", "gpt-5.2-codex"] as const;
const OPENAI_CODEX_GPT_53_MODEL_ID = "gpt-5.3-codex";
const OPENAI_CODEX_GPT_53_SPARK_MODEL_ID = "gpt-5.3-codex-spark";
@ -95,6 +101,7 @@ function resolveCodexForwardCompatModel(
patch = {
contextWindow: OPENAI_CODEX_GPT_54_CONTEXT_TOKENS,
maxTokens: OPENAI_CODEX_GPT_54_MAX_TOKENS,
cost: OPENAI_CODEX_GPT_54_COST,
};
} else if (lower === OPENAI_CODEX_GPT_53_SPARK_MODEL_ID) {
templateIds = [OPENAI_CODEX_GPT_53_MODEL_ID, ...OPENAI_CODEX_TEMPLATE_MODEL_IDS];
@ -202,6 +209,28 @@ function buildOpenAICodexAuthDoctorHint(ctx: { profileId?: string }) {
return "Deprecated profile. Run `openclaw models auth login --provider openai-codex` or `openclaw configure`.";
}
function buildSyntheticCatalogEntry(
template: ReturnType<typeof findCatalogTemplate>,
entry: {
id: string;
reasoning: boolean;
input: readonly ("text" | "image")[];
contextWindow: number;
},
) {
if (!template) {
return undefined;
}
return {
...template,
id: entry.id,
name: entry.id,
reasoning: entry.reasoning,
input: [...entry.input],
contextWindow: entry.contextWindow,
};
}
export function buildOpenAICodexProviderPlugin(): ProviderPlugin {
return {
id: PROVIDER_ID,
@ -280,20 +309,18 @@ export function buildOpenAICodexProviderPlugin(): ProviderPlugin {
templateIds: [OPENAI_CODEX_GPT_53_MODEL_ID, ...OPENAI_CODEX_TEMPLATE_MODEL_IDS],
});
return [
gpt54Template
? {
...gpt54Template,
id: OPENAI_CODEX_GPT_54_MODEL_ID,
name: OPENAI_CODEX_GPT_54_MODEL_ID,
}
: undefined,
sparkTemplate
? {
...sparkTemplate,
id: OPENAI_CODEX_GPT_53_SPARK_MODEL_ID,
name: OPENAI_CODEX_GPT_53_SPARK_MODEL_ID,
}
: undefined,
buildSyntheticCatalogEntry(gpt54Template, {
id: OPENAI_CODEX_GPT_54_MODEL_ID,
reasoning: true,
input: ["text", "image"],
contextWindow: OPENAI_CODEX_GPT_54_CONTEXT_TOKENS,
}),
buildSyntheticCatalogEntry(sparkTemplate, {
id: OPENAI_CODEX_GPT_53_SPARK_MODEL_ID,
reasoning: true,
input: ["text"],
contextWindow: OPENAI_CODEX_GPT_53_SPARK_CONTEXT_TOKENS,
}),
].filter((entry): entry is NonNullable<typeof entry> => entry !== undefined);
},
};

View File

@ -30,7 +30,7 @@ function resolveLiveModelCase(modelId: string): LiveModelCase {
modelId,
templateId: "gpt-5.2",
templateName: "GPT-5.2",
cost: { input: 1, output: 2, cacheRead: 0, cacheWrite: 0 },
cost: { input: 1.75, output: 14, cacheRead: 0.175, cacheWrite: 0 },
contextWindow: 400_000,
maxTokens: 128_000,
};
@ -39,7 +39,7 @@ function resolveLiveModelCase(modelId: string): LiveModelCase {
modelId,
templateId: "gpt-5.2-pro",
templateName: "GPT-5.2 Pro",
cost: { input: 15, output: 60, cacheRead: 0, cacheWrite: 0 },
cost: { input: 21, output: 168, cacheRead: 0, cacheWrite: 0 },
contextWindow: 400_000,
maxTokens: 128_000,
};
@ -48,7 +48,7 @@ function resolveLiveModelCase(modelId: string): LiveModelCase {
modelId,
templateId: "gpt-5-mini",
templateName: "GPT-5 mini",
cost: { input: 1, output: 2, cacheRead: 0, cacheWrite: 0 },
cost: { input: 0.25, output: 2, cacheRead: 0.025, cacheWrite: 0 },
contextWindow: 400_000,
maxTokens: 128_000,
};
@ -57,9 +57,9 @@ function resolveLiveModelCase(modelId: string): LiveModelCase {
modelId,
templateId: "gpt-5-nano",
templateName: "GPT-5 nano",
cost: { input: 0.5, output: 1, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200_000,
maxTokens: 64_000,
cost: { input: 0.05, output: 0.4, cacheRead: 0.005, cacheWrite: 0 },
contextWindow: 400_000,
maxTokens: 128_000,
};
default:
throw new Error(`Unsupported live OpenAI model: ${modelId}`);
@ -139,8 +139,8 @@ describe("buildOpenAIProvider", () => {
id: "gpt-5.4-nano",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
contextWindow: 200_000,
maxTokens: 64_000,
contextWindow: 400_000,
maxTokens: 128_000,
});
});
@ -168,16 +168,26 @@ describe("buildOpenAIProvider", () => {
],
} as never);
expect(entries).toContainEqual({
provider: "openai",
id: "gpt-5.4-mini",
name: "gpt-5.4-mini",
});
expect(entries).toContainEqual({
provider: "openai",
id: "gpt-5.4-nano",
name: "gpt-5.4-nano",
});
expect(entries).toContainEqual(
expect.objectContaining({
provider: "openai",
id: "gpt-5.4-mini",
name: "gpt-5.4-mini",
reasoning: true,
input: ["text", "image"],
contextWindow: 400_000,
}),
);
expect(entries).toContainEqual(
expect.objectContaining({
provider: "openai",
id: "gpt-5.4-nano",
name: "gpt-5.4-nano",
reasoning: true,
input: ["text", "image"],
contextWindow: 400_000,
}),
);
});
it("keeps modern live selection on OpenAI 5.2+ and Codex 5.2+", () => {

View File

@ -26,8 +26,25 @@ const OPENAI_GPT_54_MODEL_ID = "gpt-5.4";
const OPENAI_GPT_54_PRO_MODEL_ID = "gpt-5.4-pro";
const OPENAI_GPT_54_MINI_MODEL_ID = "gpt-5.4-mini";
const OPENAI_GPT_54_NANO_MODEL_ID = "gpt-5.4-nano";
const OPENAI_GPT_54_CONTEXT_TOKENS = 1_050_000;
const OPENAI_GPT_54_CONTEXT_TOKENS = 272_000;
const OPENAI_GPT_54_PRO_CONTEXT_TOKENS = 1_050_000;
const OPENAI_GPT_54_MINI_CONTEXT_TOKENS = 400_000;
const OPENAI_GPT_54_NANO_CONTEXT_TOKENS = 400_000;
const OPENAI_GPT_54_MAX_TOKENS = 128_000;
const OPENAI_GPT_54_COST = { input: 2.5, output: 15, cacheRead: 0.25, cacheWrite: 0 } as const;
const OPENAI_GPT_54_PRO_COST = { input: 30, output: 180, cacheRead: 0, cacheWrite: 0 } as const;
const OPENAI_GPT_54_MINI_COST = {
input: 0.75,
output: 4.5,
cacheRead: 0.075,
cacheWrite: 0,
} as const;
const OPENAI_GPT_54_NANO_COST = {
input: 0.2,
output: 1.25,
cacheRead: 0.02,
cacheWrite: 0,
} as const;
const OPENAI_GPT_54_TEMPLATE_MODEL_IDS = ["gpt-5.2"] as const;
const OPENAI_GPT_54_PRO_TEMPLATE_MODEL_IDS = ["gpt-5.2-pro", "gpt-5.2"] as const;
const OPENAI_GPT_54_MINI_TEMPLATE_MODEL_IDS = ["gpt-5-mini"] as const;
@ -96,6 +113,7 @@ function resolveOpenAIGpt54ForwardCompatModel(
baseUrl: "https://api.openai.com/v1",
reasoning: true,
input: ["text", "image"],
cost: OPENAI_GPT_54_COST,
contextWindow: OPENAI_GPT_54_CONTEXT_TOKENS,
maxTokens: OPENAI_GPT_54_MAX_TOKENS,
};
@ -107,7 +125,8 @@ function resolveOpenAIGpt54ForwardCompatModel(
baseUrl: "https://api.openai.com/v1",
reasoning: true,
input: ["text", "image"],
contextWindow: OPENAI_GPT_54_CONTEXT_TOKENS,
cost: OPENAI_GPT_54_PRO_COST,
contextWindow: OPENAI_GPT_54_PRO_CONTEXT_TOKENS,
maxTokens: OPENAI_GPT_54_MAX_TOKENS,
};
} else if (lower === OPENAI_GPT_54_MINI_MODEL_ID) {
@ -118,6 +137,9 @@ function resolveOpenAIGpt54ForwardCompatModel(
baseUrl: "https://api.openai.com/v1",
reasoning: true,
input: ["text", "image"],
cost: OPENAI_GPT_54_MINI_COST,
contextWindow: OPENAI_GPT_54_MINI_CONTEXT_TOKENS,
maxTokens: OPENAI_GPT_54_MAX_TOKENS,
};
} else if (lower === OPENAI_GPT_54_NANO_MODEL_ID) {
templateIds = OPENAI_GPT_54_NANO_TEMPLATE_MODEL_IDS;
@ -127,6 +149,9 @@ function resolveOpenAIGpt54ForwardCompatModel(
baseUrl: "https://api.openai.com/v1",
reasoning: true,
input: ["text", "image"],
cost: OPENAI_GPT_54_NANO_COST,
contextWindow: OPENAI_GPT_54_NANO_CONTEXT_TOKENS,
maxTokens: OPENAI_GPT_54_MAX_TOKENS,
};
} else {
return undefined;
@ -151,6 +176,28 @@ function resolveOpenAIGpt54ForwardCompatModel(
);
}
function buildSyntheticCatalogEntry(
template: ReturnType<typeof findCatalogTemplate>,
entry: {
id: string;
reasoning: boolean;
input: readonly ("text" | "image")[];
contextWindow: number;
},
) {
if (!template) {
return undefined;
}
return {
...template,
id: entry.id,
name: entry.id,
reasoning: entry.reasoning,
input: [...entry.input],
contextWindow: entry.contextWindow,
};
}
export function buildOpenAIProvider(): ProviderPlugin {
return {
id: PROVIDER_ID,
@ -237,34 +284,30 @@ export function buildOpenAIProvider(): ProviderPlugin {
templateIds: OPENAI_GPT_54_NANO_TEMPLATE_MODEL_IDS,
});
return [
openAiGpt54Template
? {
...openAiGpt54Template,
id: OPENAI_GPT_54_MODEL_ID,
name: OPENAI_GPT_54_MODEL_ID,
}
: undefined,
openAiGpt54ProTemplate
? {
...openAiGpt54ProTemplate,
id: OPENAI_GPT_54_PRO_MODEL_ID,
name: OPENAI_GPT_54_PRO_MODEL_ID,
}
: undefined,
openAiGpt54MiniTemplate
? {
...openAiGpt54MiniTemplate,
id: OPENAI_GPT_54_MINI_MODEL_ID,
name: OPENAI_GPT_54_MINI_MODEL_ID,
}
: undefined,
openAiGpt54NanoTemplate
? {
...openAiGpt54NanoTemplate,
id: OPENAI_GPT_54_NANO_MODEL_ID,
name: OPENAI_GPT_54_NANO_MODEL_ID,
}
: undefined,
buildSyntheticCatalogEntry(openAiGpt54Template, {
id: OPENAI_GPT_54_MODEL_ID,
reasoning: true,
input: ["text", "image"],
contextWindow: OPENAI_GPT_54_CONTEXT_TOKENS,
}),
buildSyntheticCatalogEntry(openAiGpt54ProTemplate, {
id: OPENAI_GPT_54_PRO_MODEL_ID,
reasoning: true,
input: ["text", "image"],
contextWindow: OPENAI_GPT_54_PRO_CONTEXT_TOKENS,
}),
buildSyntheticCatalogEntry(openAiGpt54MiniTemplate, {
id: OPENAI_GPT_54_MINI_MODEL_ID,
reasoning: true,
input: ["text", "image"],
contextWindow: OPENAI_GPT_54_MINI_CONTEXT_TOKENS,
}),
buildSyntheticCatalogEntry(openAiGpt54NanoTemplate, {
id: OPENAI_GPT_54_NANO_MODEL_ID,
reasoning: true,
input: ["text", "image"],
contextWindow: OPENAI_GPT_54_NANO_CONTEXT_TOKENS,
}),
].filter((entry): entry is NonNullable<typeof entry> => entry !== undefined);
},
};

View File

@ -41,7 +41,7 @@ describeLive("openrouter plugin live", () => {
const resolved = provider.resolveDynamicModel?.({
provider: "openrouter",
modelId: LIVE_MODEL_ID,
modelRegistry: new ModelRegistry(AuthStorage.inMemory()),
modelRegistry: ModelRegistry.inMemory(AuthStorage.inMemory()),
});
if (!resolved) {
throw new Error(`openrouter provider did not resolve ${LIVE_MODEL_ID}`);

View File

@ -7,10 +7,10 @@
"dependencies": {
"mpg123-decoder": "^1.0.3",
"silk-wasm": "^3.7.1",
"ws": "^8.18.0"
"ws": "^8.20.0"
},
"devDependencies": {
"@types/ws": "^8.5.0",
"@types/ws": "^8.18.1",
"openclaw": "workspace:*"
},
"peerDependencies": {

View File

@ -4,8 +4,8 @@
"description": "OpenClaw Tlon/Urbit channel plugin",
"type": "module",
"dependencies": {
"@aws-sdk/client-s3": "3.1019.0",
"@aws-sdk/s3-request-presigner": "3.1019.0",
"@aws-sdk/client-s3": "3.1020.0",
"@aws-sdk/s3-request-presigner": "3.1020.0",
"@tloncorp/tlon-skill": "0.3.1",
"@urbit/aura": "^3.0.0"
},

View File

@ -1163,15 +1163,15 @@
"@agentclientprotocol/sdk": "0.17.1",
"@anthropic-ai/vertex-sdk": "^0.14.4",
"@clack/prompts": "^1.1.0",
"@homebridge/ciao": "^1.3.5",
"@homebridge/ciao": "^1.3.6",
"@line/bot-sdk": "^10.6.0",
"@lydell/node-pty": "1.2.0-beta.3",
"@mariozechner/pi-agent-core": "0.63.2",
"@mariozechner/pi-ai": "0.63.2",
"@mariozechner/pi-coding-agent": "0.63.2",
"@mariozechner/pi-tui": "0.63.2",
"@mariozechner/pi-agent-core": "0.64.0",
"@mariozechner/pi-ai": "0.64.0",
"@mariozechner/pi-coding-agent": "0.64.0",
"@mariozechner/pi-tui": "0.64.0",
"@matrix-org/matrix-sdk-crypto-wasm": "18.0.0",
"@modelcontextprotocol/sdk": "1.28.0",
"@modelcontextprotocol/sdk": "1.29.0",
"@mozilla/readability": "^0.6.0",
"@sinclair/typebox": "0.34.49",
"ajv": "^8.18.0",
@ -1192,14 +1192,14 @@
"linkedom": "^0.18.12",
"long": "^5.3.2",
"markdown-it": "^14.1.1",
"matrix-js-sdk": "41.2.0",
"matrix-js-sdk": "41.3.0-rc.0",
"node-edge-tts": "^1.2.10",
"osc-progress": "^0.3.0",
"pdfjs-dist": "^5.5.207",
"pdfjs-dist": "^5.6.205",
"playwright-core": "1.58.2",
"qrcode-terminal": "^0.12.0",
"sharp": "^0.34.5",
"sqlite-vec": "0.1.7",
"sqlite-vec": "0.1.9",
"tar": "7.5.13",
"tslog": "^4.10.2",
"undici": "^7.24.6",
@ -1217,13 +1217,13 @@
"@types/node": "^25.5.0",
"@types/qrcode-terminal": "^0.12.2",
"@types/ws": "^8.18.1",
"@typescript/native-preview": "7.0.0-dev.20260330.1",
"@typescript/native-preview": "7.0.0-dev.20260331.1",
"@vitest/coverage-v8": "^4.1.2",
"jscpd": "4.0.8",
"jsdom": "^29.0.1",
"lit": "^3.3.2",
"oxfmt": "0.42.0",
"oxlint": "^1.57.0",
"oxfmt": "0.43.0",
"oxlint": "^1.58.0",
"oxlint-tsgolint": "^0.18.1",
"signal-utils": "0.21.1",
"tsdown": "0.21.7",

File diff suppressed because it is too large Load Diff

View File

@ -28,6 +28,15 @@ type DiscoveredModel = {
};
type PiSdkModule = typeof import("./pi-model-discovery.js");
type PiRegistryInstance =
| Array<DiscoveredModel>
| {
getAll: () => Array<DiscoveredModel>;
};
type PiRegistryClassLike = {
create?: (authStorage: unknown, modelsFile: string) => PiRegistryInstance;
new (authStorage: unknown, modelsFile: string): PiRegistryInstance;
};
let modelCatalogPromise: Promise<ModelCatalogEntry[]> | null = null;
let hasLoggedModelCatalogError = false;
@ -140,6 +149,18 @@ export function __setModelCatalogImportForTest(loader?: () => Promise<PiSdkModul
importPiSdk = loader ?? defaultImportPiSdk;
}
function instantiatePiModelRegistry(
piSdk: PiSdkModule,
authStorage: unknown,
modelsFile: string,
): PiRegistryInstance {
const Registry = piSdk.ModelRegistry as unknown as PiRegistryClassLike;
if (typeof Registry.create === "function") {
return Registry.create(authStorage, modelsFile);
}
return new Registry(authStorage, modelsFile);
}
export async function loadModelCatalog(params?: {
config?: OpenClawConfig;
useCache?: boolean;
@ -186,16 +207,11 @@ export async function loadModelCatalog(params?: {
const { join } = await import("node:path");
const authStorage = piSdk.discoverAuthStorage(agentDir);
logStage("auth-storage-ready");
const registry = new (piSdk.ModelRegistry as unknown as {
new (
authStorage: unknown,
modelsFile: string,
):
| Array<DiscoveredModel>
| {
getAll: () => Array<DiscoveredModel>;
};
})(authStorage, join(agentDir, "models.json"));
const registry = instantiatePiModelRegistry(
piSdk,
authStorage,
join(agentDir, "models.json"),
);
logStage("registry-ready");
const entries = Array.isArray(registry) ? registry : registry.getAll();
logStage("registry-read", `entries=${entries.length}`);

View File

@ -204,7 +204,8 @@ function buildDynamicModel(
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: OPENAI_CODEX_BASE_URL,
contextWindow: 1_050_000,
cost: { input: 2.5, output: 15, cacheRead: 0.25, cacheWrite: 0 },
contextWindow: 272_000,
maxTokens: 128_000,
},
fallback,
@ -245,23 +246,49 @@ function buildDynamicModel(
}
const template = findTemplate(params, "openai", templateIds);
const patch =
lower === "gpt-5.4" || lower === "gpt-5.4-pro"
lower === "gpt-5.4"
? {
provider: "openai",
api: "openai-responses",
baseUrl: OPENAI_BASE_URL,
reasoning: true,
input: ["text", "image"],
contextWindow: 1_050_000,
cost: { input: 2.5, output: 15, cacheRead: 0.25, cacheWrite: 0 },
contextWindow: 272_000,
maxTokens: 128_000,
}
: {
provider: "openai",
api: "openai-responses",
baseUrl: OPENAI_BASE_URL,
reasoning: true,
input: ["text", "image"],
};
: lower === "gpt-5.4-pro"
? {
provider: "openai",
api: "openai-responses",
baseUrl: OPENAI_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 30, output: 180, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
maxTokens: 128_000,
}
: lower === "gpt-5.4-mini"
? {
provider: "openai",
api: "openai-responses",
baseUrl: OPENAI_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 0.75, output: 4.5, cacheRead: 0.075, cacheWrite: 0 },
contextWindow: 400_000,
maxTokens: 128_000,
}
: {
provider: "openai",
api: "openai-responses",
baseUrl: OPENAI_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 0.2, output: 1.25, cacheRead: 0.02, cacheWrite: 0 },
contextWindow: 400_000,
maxTokens: 128_000,
};
return cloneTemplate(template, modelId, patch, {
provider: "openai",
api: "openai-responses",

View File

@ -67,8 +67,10 @@ export function buildOpenAICodexForwardCompatExpectation(
input: isSpark ? ["text"] : ["text", "image"],
cost: isSpark
? { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }
: OPENAI_CODEX_TEMPLATE_MODEL.cost,
contextWindow: isGpt54 ? 1_050_000 : isSpark ? 128_000 : 272000,
: isGpt54
? { input: 2.5, output: 15, cacheRead: 0.25, cacheWrite: 0 }
: OPENAI_CODEX_TEMPLATE_MODEL.cost,
contextWindow: isGpt54 ? 272_000 : isSpark ? 128_000 : 272000,
maxTokens: 128000,
};
}

View File

@ -102,26 +102,23 @@ function normalizeRegistryModel<T>(value: T, agentDir: string): T {
return normalizeModelCompat(transportNormalized as Model<Api>) as T;
}
class OpenClawModelRegistry extends PiModelRegistryClass {
constructor(
authStorage: PiAuthStorage,
modelsJsonPath: string,
private readonly agentDir: string,
) {
super(authStorage, modelsJsonPath);
}
function createOpenClawModelRegistry(
authStorage: PiAuthStorage,
modelsJsonPath: string,
agentDir: string,
): PiModelRegistry {
const registry = PiModelRegistryClass.create(authStorage, modelsJsonPath);
const getAll = registry.getAll.bind(registry);
const getAvailable = registry.getAvailable.bind(registry);
const find = registry.find.bind(registry);
override getAll(): Array<Model<Api>> {
return super.getAll().map((entry) => normalizeRegistryModel(entry, this.agentDir));
}
registry.getAll = () => getAll().map((entry) => normalizeRegistryModel(entry, agentDir));
registry.getAvailable = () =>
getAvailable().map((entry) => normalizeRegistryModel(entry, agentDir));
registry.find = (provider: string, modelId: string) =>
normalizeRegistryModel(find(provider, modelId), agentDir);
override getAvailable(): Array<Model<Api>> {
return super.getAvailable().map((entry) => normalizeRegistryModel(entry, this.agentDir));
}
override find(provider: string, modelId: string): Model<Api> | undefined {
return normalizeRegistryModel(super.find(provider, modelId), this.agentDir);
}
return registry;
}
function scrubLegacyStaticAuthJsonEntries(pathname: string): void {
@ -243,5 +240,5 @@ export function discoverAuthStorage(agentDir: string): PiAuthStorage {
}
export function discoverModels(authStorage: PiAuthStorage, agentDir: string): PiModelRegistry {
return new OpenClawModelRegistry(authStorage, path.join(agentDir, "models.json"), agentDir);
return createOpenClawModelRegistry(authStorage, path.join(agentDir, "models.json"), agentDir);
}

View File

@ -366,14 +366,67 @@ vi.mock("../agents/pi-model-discovery.js", async () => {
"../agents/pi-model-discovery.js",
);
class MockModelRegistry extends actual.ModelRegistry {
override getAll(): ReturnType<typeof actual.ModelRegistry.prototype.getAll> {
const createActualRegistry = (...args: Parameters<typeof actual.discoverModels>) => {
const modelsFile = path.join(args[1], "models.json");
const Registry = actual.ModelRegistry as unknown as {
create?: (
authStorage: unknown,
modelsFile: string,
) => {
getAll: () => Array<{ provider?: string; id?: string }>;
getAvailable: () => Array<{ provider?: string; id?: string }>;
find: (provider: string, modelId: string) => unknown;
};
new (
authStorage: unknown,
modelsFile: string,
): {
getAll: () => Array<{ provider?: string; id?: string }>;
getAvailable: () => Array<{ provider?: string; id?: string }>;
find: (provider: string, modelId: string) => unknown;
};
};
if (typeof Registry.create === "function") {
return Registry.create(args[0], modelsFile);
}
return new Registry(args[0], modelsFile);
};
class MockModelRegistry {
private readonly actualRegistry?: ReturnType<typeof createActualRegistry>;
constructor(authStorage: unknown, modelsFile: string) {
if (!piSdkMock.enabled) {
return super.getAll();
this.actualRegistry = createActualRegistry(authStorage as never, path.dirname(modelsFile));
}
}
static create(authStorage: unknown, modelsFile: string) {
return new MockModelRegistry(authStorage, modelsFile);
}
getAll() {
if (!piSdkMock.enabled) {
return this.actualRegistry?.getAll() ?? [];
}
piSdkMock.discoverCalls += 1;
// Cast to expected type for testing purposes
return piSdkMock.models as ReturnType<typeof actual.ModelRegistry.prototype.getAll>;
return piSdkMock.models as Array<{ provider?: string; id?: string }>;
}
getAvailable() {
if (!piSdkMock.enabled) {
return this.actualRegistry?.getAvailable() ?? [];
}
return piSdkMock.models as Array<{ provider?: string; id?: string }>;
}
find(provider: string, modelId: string) {
if (!piSdkMock.enabled) {
return this.actualRegistry?.find(provider, modelId);
}
return (piSdkMock.models as Array<{ provider?: string; id?: string }>).find(
(model) => model.provider === provider && model.id === modelId,
);
}
}

View File

@ -79,13 +79,15 @@ export async function expectAugmentedCodexCatalog(
};
}) => Promise<unknown>,
) {
await expect(
augmentModelCatalogWithProviderPlugins({
const result = (await augmentModelCatalogWithProviderPlugins({
env: process.env,
context: {
env: process.env,
context: {
env: process.env,
entries: openaiCodexCatalogEntries,
},
}),
).resolves.toEqual(expectedAugmentedOpenaiCodexCatalogEntries);
entries: openaiCodexCatalogEntries,
},
})) as Array<Record<string, unknown>>;
expect(result).toHaveLength(expectedAugmentedOpenaiCodexCatalogEntries.length);
for (const entry of expectedAugmentedOpenaiCodexCatalogEntries) {
expect(result).toContainEqual(expect.objectContaining(entry));
}
}

View File

@ -599,7 +599,7 @@ export function describeOpenAIProviderRuntimeContract() {
id: "gpt-5.4",
provider: "openai-codex",
api: "openai-codex-responses",
contextWindow: 1_050_000,
contextWindow: 272_000,
maxTokens: 128_000,
});
});