mirror of https://github.com/openclaw/openclaw.git
chore: apply local workspace updates (#9911)
* chore: apply local workspace updates * fix: resolve prep findings after rebase (#9898) (thanks @gumadeiras) * refactor: centralize model allowlist normalization (#9898) (thanks @gumadeiras) * fix: guard model allowlist initialization (#9911) * docs: update changelog scope for #9911 * docs: remove model names from changelog entry (#9911) * fix: satisfy type-aware lint in model allowlist (#9911)
This commit is contained in:
parent
93b450349f
commit
4629054403
|
|
@ -7,6 +7,7 @@ Docs: https://docs.openclaw.ai
|
||||||
### Changes
|
### Changes
|
||||||
|
|
||||||
- Models: default Anthropic model to `anthropic/claude-opus-4-6`. (#9853) Thanks @TinyTb.
|
- Models: default Anthropic model to `anthropic/claude-opus-4-6`. (#9853) Thanks @TinyTb.
|
||||||
|
- Models/Onboarding: refresh provider defaults, update OpenAI/OpenAI Codex wizard defaults, and harden model allowlist initialization for first-time configs with matching docs/tests. (#9911) Thanks @gumadeiras.
|
||||||
- Telegram: auto-inject forum topic `threadId` in message tool and subagent announce so media, buttons, and subagent results land in the correct topic instead of General. (#7235) Thanks @Lukavyi.
|
- Telegram: auto-inject forum topic `threadId` in message tool and subagent announce so media, buttons, and subagent results land in the correct topic instead of General. (#7235) Thanks @Lukavyi.
|
||||||
- CLI: sort `openclaw --help` commands (and options) alphabetically. (#8068) Thanks @deepsoumya617.
|
- CLI: sort `openclaw --help` commands (and options) alphabetically. (#8068) Thanks @deepsoumya617.
|
||||||
- Telegram: remove last `@ts-nocheck` from `bot-handlers.ts`, use Grammy types directly, deduplicate `StickerMetadata`. Zero `@ts-nocheck` remaining in `src/telegram/`. (#9206)
|
- Telegram: remove last `@ts-nocheck` from `bot-handlers.ts`, use Grammy types directly, deduplicate `StickerMetadata`. Zero `@ts-nocheck` remaining in `src/telegram/`. (#9206)
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,7 @@ New install? Start here: [Getting started](https://docs.openclaw.ai/start/gettin
|
||||||
- **[Anthropic](https://www.anthropic.com/)** (Claude Pro/Max)
|
- **[Anthropic](https://www.anthropic.com/)** (Claude Pro/Max)
|
||||||
- **[OpenAI](https://openai.com/)** (ChatGPT/Codex)
|
- **[OpenAI](https://openai.com/)** (ChatGPT/Codex)
|
||||||
|
|
||||||
Model note: while any model is supported, I strongly recommend **Anthropic Pro/Max (100/200) + Opus 4.5** for long‑context strength and better prompt‑injection resistance. See [Onboarding](https://docs.openclaw.ai/start/onboarding).
|
Model note: while any model is supported, I strongly recommend **Anthropic Pro/Max (100/200) + Opus 4.6** for long‑context strength and better prompt‑injection resistance. See [Onboarding](https://docs.openclaw.ai/start/onboarding).
|
||||||
|
|
||||||
## Models (selection + auth)
|
## Models (selection + auth)
|
||||||
|
|
||||||
|
|
@ -316,7 +316,7 @@ Minimal `~/.openclaw/openclaw.json` (model + defaults):
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agent: {
|
agent: {
|
||||||
model: "anthropic/claude-opus-4-5",
|
model: "anthropic/claude-opus-4-6",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -335,7 +335,7 @@ extension OnboardingView {
|
||||||
.multilineTextAlignment(.center)
|
.multilineTextAlignment(.center)
|
||||||
.frame(maxWidth: 540)
|
.frame(maxWidth: 540)
|
||||||
.fixedSize(horizontal: false, vertical: true)
|
.fixedSize(horizontal: false, vertical: true)
|
||||||
Text("OpenClaw supports any model — we strongly recommend Opus 4.5 for the best experience.")
|
Text("OpenClaw supports any model — we strongly recommend Opus 4.6 for the best experience.")
|
||||||
.font(.callout)
|
.font(.callout)
|
||||||
.foregroundStyle(.secondary)
|
.foregroundStyle(.secondary)
|
||||||
.multilineTextAlignment(.center)
|
.multilineTextAlignment(.center)
|
||||||
|
|
|
||||||
|
|
@ -169,7 +169,7 @@ extension SessionRow {
|
||||||
systemSent: true,
|
systemSent: true,
|
||||||
abortedLastRun: true,
|
abortedLastRun: true,
|
||||||
tokens: SessionTokenStats(input: 5000, output: 1200, total: 6200, contextTokens: 200_000),
|
tokens: SessionTokenStats(input: 5000, output: 1200, total: 6200, contextTokens: 200_000),
|
||||||
model: "claude-opus-4-5"),
|
model: "claude-opus-4-6"),
|
||||||
SessionRow(
|
SessionRow(
|
||||||
id: "global",
|
id: "global",
|
||||||
key: "global",
|
key: "global",
|
||||||
|
|
@ -242,7 +242,7 @@ struct SessionStoreSnapshot {
|
||||||
|
|
||||||
@MainActor
|
@MainActor
|
||||||
enum SessionLoader {
|
enum SessionLoader {
|
||||||
static let fallbackModel = "claude-opus-4-5"
|
static let fallbackModel = "claude-opus-4-6"
|
||||||
static let fallbackContextTokens = 200_000
|
static let fallbackContextTokens = 200_000
|
||||||
|
|
||||||
static let defaultStorePath = standardize(
|
static let defaultStorePath = standardize(
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ struct MenuSessionsInjectorTests {
|
||||||
let injector = MenuSessionsInjector()
|
let injector = MenuSessionsInjector()
|
||||||
injector.setTestingControlChannelConnected(true)
|
injector.setTestingControlChannelConnected(true)
|
||||||
|
|
||||||
let defaults = SessionDefaults(model: "anthropic/claude-opus-4-5", contextTokens: 200_000)
|
let defaults = SessionDefaults(model: "anthropic/claude-opus-4-6", contextTokens: 200_000)
|
||||||
let rows = [
|
let rows = [
|
||||||
SessionRow(
|
SessionRow(
|
||||||
id: "main",
|
id: "main",
|
||||||
|
|
@ -41,7 +41,7 @@ struct MenuSessionsInjectorTests {
|
||||||
systemSent: false,
|
systemSent: false,
|
||||||
abortedLastRun: false,
|
abortedLastRun: false,
|
||||||
tokens: SessionTokenStats(input: 10, output: 20, total: 30, contextTokens: 200_000),
|
tokens: SessionTokenStats(input: 10, output: 20, total: 30, contextTokens: 200_000),
|
||||||
model: "claude-opus-4-5"),
|
model: "claude-opus-4-6"),
|
||||||
SessionRow(
|
SessionRow(
|
||||||
id: "discord:group:alpha",
|
id: "discord:group:alpha",
|
||||||
key: "discord:group:alpha",
|
key: "discord:group:alpha",
|
||||||
|
|
@ -58,7 +58,7 @@ struct MenuSessionsInjectorTests {
|
||||||
systemSent: true,
|
systemSent: true,
|
||||||
abortedLastRun: true,
|
abortedLastRun: true,
|
||||||
tokens: SessionTokenStats(input: 50, output: 50, total: 100, contextTokens: 200_000),
|
tokens: SessionTokenStats(input: 50, output: 50, total: 100, contextTokens: 200_000),
|
||||||
model: "claude-opus-4-5"),
|
model: "claude-opus-4-6"),
|
||||||
]
|
]
|
||||||
let snapshot = SessionStoreSnapshot(
|
let snapshot = SessionStoreSnapshot(
|
||||||
storePath: "/tmp/sessions.json",
|
storePath: "/tmp/sessions.json",
|
||||||
|
|
|
||||||
|
|
@ -78,8 +78,8 @@ export AWS_BEARER_TOKEN_BEDROCK="..."
|
||||||
auth: "aws-sdk",
|
auth: "aws-sdk",
|
||||||
models: [
|
models: [
|
||||||
{
|
{
|
||||||
id: "anthropic.claude-opus-4-5-20251101-v1:0",
|
id: "us.anthropic.claude-opus-4-6-v1:0",
|
||||||
name: "Claude Opus 4.5 (Bedrock)",
|
name: "Claude Opus 4.6 (Bedrock)",
|
||||||
reasoning: true,
|
reasoning: true,
|
||||||
input: ["text", "image"],
|
input: ["text", "image"],
|
||||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||||
|
|
@ -92,7 +92,7 @@ export AWS_BEARER_TOKEN_BEDROCK="..."
|
||||||
},
|
},
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
model: { primary: "amazon-bedrock/anthropic.claude-opus-4-5-20251101-v1:0" },
|
model: { primary: "amazon-bedrock/us.anthropic.claude-opus-4-6-v1:0" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ For model selection rules, see [/concepts/models](/concepts/models).
|
||||||
|
|
||||||
## Quick rules
|
## Quick rules
|
||||||
|
|
||||||
- Model refs use `provider/model` (example: `opencode/claude-opus-4-5`).
|
- Model refs use `provider/model` (example: `opencode/claude-opus-4-6`).
|
||||||
- If you set `agents.defaults.models`, it becomes the allowlist.
|
- If you set `agents.defaults.models`, it becomes the allowlist.
|
||||||
- CLI helpers: `openclaw onboard`, `openclaw models list`, `openclaw models set <provider/model>`.
|
- CLI helpers: `openclaw onboard`, `openclaw models list`, `openclaw models set <provider/model>`.
|
||||||
|
|
||||||
|
|
@ -26,12 +26,12 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no**
|
||||||
|
|
||||||
- Provider: `openai`
|
- Provider: `openai`
|
||||||
- Auth: `OPENAI_API_KEY`
|
- Auth: `OPENAI_API_KEY`
|
||||||
- Example model: `openai/gpt-5.2`
|
- Example model: `openai/gpt-5.1-codex`
|
||||||
- CLI: `openclaw onboard --auth-choice openai-api-key`
|
- CLI: `openclaw onboard --auth-choice openai-api-key`
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agents: { defaults: { model: { primary: "openai/gpt-5.2" } } },
|
agents: { defaults: { model: { primary: "openai/gpt-5.1-codex" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -39,12 +39,12 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no**
|
||||||
|
|
||||||
- Provider: `anthropic`
|
- Provider: `anthropic`
|
||||||
- Auth: `ANTHROPIC_API_KEY` or `claude setup-token`
|
- Auth: `ANTHROPIC_API_KEY` or `claude setup-token`
|
||||||
- Example model: `anthropic/claude-opus-4-5`
|
- Example model: `anthropic/claude-opus-4-6`
|
||||||
- CLI: `openclaw onboard --auth-choice token` (paste setup-token) or `openclaw models auth paste-token --provider anthropic`
|
- CLI: `openclaw onboard --auth-choice token` (paste setup-token) or `openclaw models auth paste-token --provider anthropic`
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } },
|
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-6" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -52,12 +52,12 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no**
|
||||||
|
|
||||||
- Provider: `openai-codex`
|
- Provider: `openai-codex`
|
||||||
- Auth: OAuth (ChatGPT)
|
- Auth: OAuth (ChatGPT)
|
||||||
- Example model: `openai-codex/gpt-5.2`
|
- Example model: `openai-codex/gpt-5.3-codex`
|
||||||
- CLI: `openclaw onboard --auth-choice openai-codex` or `openclaw models auth login --provider openai-codex`
|
- CLI: `openclaw onboard --auth-choice openai-codex` or `openclaw models auth login --provider openai-codex`
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agents: { defaults: { model: { primary: "openai-codex/gpt-5.2" } } },
|
agents: { defaults: { model: { primary: "openai-codex/gpt-5.3-codex" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -65,12 +65,12 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no**
|
||||||
|
|
||||||
- Provider: `opencode`
|
- Provider: `opencode`
|
||||||
- Auth: `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`)
|
- Auth: `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`)
|
||||||
- Example model: `opencode/claude-opus-4-5`
|
- Example model: `opencode/claude-opus-4-6`
|
||||||
- CLI: `openclaw onboard --auth-choice opencode-zen`
|
- CLI: `openclaw onboard --auth-choice opencode-zen`
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agents: { defaults: { model: { primary: "opencode/claude-opus-4-5" } } },
|
agents: { defaults: { model: { primary: "opencode/claude-opus-4-6" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -106,7 +106,7 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no**
|
||||||
|
|
||||||
- Provider: `vercel-ai-gateway`
|
- Provider: `vercel-ai-gateway`
|
||||||
- Auth: `AI_GATEWAY_API_KEY`
|
- Auth: `AI_GATEWAY_API_KEY`
|
||||||
- Example model: `vercel-ai-gateway/anthropic/claude-opus-4.5`
|
- Example model: `vercel-ai-gateway/anthropic/claude-opus-4.6`
|
||||||
- CLI: `openclaw onboard --auth-choice ai-gateway-api-key`
|
- CLI: `openclaw onboard --auth-choice ai-gateway-api-key`
|
||||||
|
|
||||||
### Other built-in providers
|
### Other built-in providers
|
||||||
|
|
@ -309,7 +309,7 @@ Notes:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
openclaw onboard --auth-choice opencode-zen
|
openclaw onboard --auth-choice opencode-zen
|
||||||
openclaw models set opencode/claude-opus-4-5
|
openclaw models set opencode/claude-opus-4-6
|
||||||
openclaw models list
|
openclaw models list
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -83,7 +83,7 @@ Example allowlist config:
|
||||||
model: { primary: "anthropic/claude-sonnet-4-5" },
|
model: { primary: "anthropic/claude-sonnet-4-5" },
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-sonnet-4-5": { alias: "Sonnet" },
|
"anthropic/claude-sonnet-4-5": { alias: "Sonnet" },
|
||||||
"anthropic/claude-opus-4-5": { alias: "Opus" },
|
"anthropic/claude-opus-4-6": { alias: "Opus" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -221,7 +221,7 @@ Split by channel: route WhatsApp to a fast everyday agent and Telegram to an Opu
|
||||||
id: "opus",
|
id: "opus",
|
||||||
name: "Deep Work",
|
name: "Deep Work",
|
||||||
workspace: "~/.openclaw/workspace-opus",
|
workspace: "~/.openclaw/workspace-opus",
|
||||||
model: "anthropic/claude-opus-4-5",
|
model: "anthropic/claude-opus-4-6",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|
@ -255,7 +255,7 @@ Keep WhatsApp on the fast agent, but route one DM to Opus:
|
||||||
id: "opus",
|
id: "opus",
|
||||||
name: "Deep Work",
|
name: "Deep Work",
|
||||||
workspace: "~/.openclaw/workspace-opus",
|
workspace: "~/.openclaw/workspace-opus",
|
||||||
model: "anthropic/claude-opus-4-5",
|
model: "anthropic/claude-opus-4-6",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -25,13 +25,13 @@ want “always works” text responses without relying on external APIs.
|
||||||
You can use Claude Code CLI **without any config** (OpenClaw ships a built-in default):
|
You can use Claude Code CLI **without any config** (OpenClaw ships a built-in default):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
openclaw agent --message "hi" --model claude-cli/opus-4.5
|
openclaw agent --message "hi" --model claude-cli/opus-4.6
|
||||||
```
|
```
|
||||||
|
|
||||||
Codex CLI also works out of the box:
|
Codex CLI also works out of the box:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
openclaw agent --message "hi" --model codex-cli/gpt-5.2-codex
|
openclaw agent --message "hi" --model codex-cli/gpt-5.3-codex
|
||||||
```
|
```
|
||||||
|
|
||||||
If your gateway runs under launchd/systemd and PATH is minimal, add just the
|
If your gateway runs under launchd/systemd and PATH is minimal, add just the
|
||||||
|
|
@ -62,11 +62,12 @@ Add a CLI backend to your fallback list so it only runs when primary models fail
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
model: {
|
model: {
|
||||||
primary: "anthropic/claude-opus-4-5",
|
primary: "anthropic/claude-opus-4-6",
|
||||||
fallbacks: ["claude-cli/opus-4.5"],
|
fallbacks: ["claude-cli/opus-4.6", "claude-cli/opus-4.5"],
|
||||||
},
|
},
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-5": { alias: "Opus" },
|
"anthropic/claude-opus-4-6": { alias: "Opus" },
|
||||||
|
"claude-cli/opus-4.6": {},
|
||||||
"claude-cli/opus-4.5": {},
|
"claude-cli/opus-4.5": {},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -112,6 +113,7 @@ The provider id becomes the left side of your model ref:
|
||||||
input: "arg",
|
input: "arg",
|
||||||
modelArg: "--model",
|
modelArg: "--model",
|
||||||
modelAliases: {
|
modelAliases: {
|
||||||
|
"claude-opus-4-6": "opus",
|
||||||
"claude-opus-4-5": "opus",
|
"claude-opus-4-5": "opus",
|
||||||
"claude-sonnet-4-5": "sonnet",
|
"claude-sonnet-4-5": "sonnet",
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -226,13 +226,13 @@ Save to `~/.openclaw/openclaw.json` and you can DM the bot from that number.
|
||||||
userTimezone: "America/Chicago",
|
userTimezone: "America/Chicago",
|
||||||
model: {
|
model: {
|
||||||
primary: "anthropic/claude-sonnet-4-5",
|
primary: "anthropic/claude-sonnet-4-5",
|
||||||
fallbacks: ["anthropic/claude-opus-4-5", "openai/gpt-5.2"],
|
fallbacks: ["anthropic/claude-opus-4-6", "openai/gpt-5.2"],
|
||||||
},
|
},
|
||||||
imageModel: {
|
imageModel: {
|
||||||
primary: "openrouter/anthropic/claude-sonnet-4-5",
|
primary: "openrouter/anthropic/claude-sonnet-4-5",
|
||||||
},
|
},
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-5": { alias: "opus" },
|
"anthropic/claude-opus-4-6": { alias: "opus" },
|
||||||
"anthropic/claude-sonnet-4-5": { alias: "sonnet" },
|
"anthropic/claude-sonnet-4-5": { alias: "sonnet" },
|
||||||
"openai/gpt-5.2": { alias: "gpt" },
|
"openai/gpt-5.2": { alias: "gpt" },
|
||||||
},
|
},
|
||||||
|
|
@ -496,7 +496,7 @@ If more than one person can DM your bot (multiple entries in `allowFrom`, pairin
|
||||||
workspace: "~/.openclaw/workspace",
|
workspace: "~/.openclaw/workspace",
|
||||||
model: {
|
model: {
|
||||||
primary: "anthropic/claude-sonnet-4-5",
|
primary: "anthropic/claude-sonnet-4-5",
|
||||||
fallbacks: ["anthropic/claude-opus-4-5"],
|
fallbacks: ["anthropic/claude-opus-4-6"],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -534,7 +534,7 @@ If more than one person can DM your bot (multiple entries in `allowFrom`, pairin
|
||||||
agent: {
|
agent: {
|
||||||
workspace: "~/.openclaw/workspace",
|
workspace: "~/.openclaw/workspace",
|
||||||
model: {
|
model: {
|
||||||
primary: "anthropic/claude-opus-4-5",
|
primary: "anthropic/claude-opus-4-6",
|
||||||
fallbacks: ["minimax/MiniMax-M2.1"],
|
fallbacks: ["minimax/MiniMax-M2.1"],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1547,8 +1547,8 @@ The `responsePrefix` string can include template variables that resolve dynamica
|
||||||
|
|
||||||
| Variable | Description | Example |
|
| Variable | Description | Example |
|
||||||
| ----------------- | ---------------------- | --------------------------- |
|
| ----------------- | ---------------------- | --------------------------- |
|
||||||
| `{model}` | Short model name | `claude-opus-4-5`, `gpt-4o` |
|
| `{model}` | Short model name | `claude-opus-4-6`, `gpt-4o` |
|
||||||
| `{modelFull}` | Full model identifier | `anthropic/claude-opus-4-5` |
|
| `{modelFull}` | Full model identifier | `anthropic/claude-opus-4-6` |
|
||||||
| `{provider}` | Provider name | `anthropic`, `openai` |
|
| `{provider}` | Provider name | `anthropic`, `openai` |
|
||||||
| `{thinkingLevel}` | Current thinking level | `high`, `low`, `off` |
|
| `{thinkingLevel}` | Current thinking level | `high`, `low`, `off` |
|
||||||
| `{identity.name}` | Agent identity name | (same as `"auto"` mode) |
|
| `{identity.name}` | Agent identity name | (same as `"auto"` mode) |
|
||||||
|
|
@ -1564,7 +1564,7 @@ Unresolved variables remain as literal text.
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Example output: `[claude-opus-4-5 | think:high] Here's my response...`
|
Example output: `[claude-opus-4-6 | think:high] Here's my response...`
|
||||||
|
|
||||||
WhatsApp inbound prefix is configured via `channels.whatsapp.messagePrefix` (deprecated:
|
WhatsApp inbound prefix is configured via `channels.whatsapp.messagePrefix` (deprecated:
|
||||||
`messages.messagePrefix`). Default stays **unchanged**: `"[openclaw]"` when
|
`messages.messagePrefix`). Default stays **unchanged**: `"[openclaw]"` when
|
||||||
|
|
@ -1710,7 +1710,7 @@ Z.AI GLM-4.x models automatically enable thinking mode unless you:
|
||||||
OpenClaw also ships a few built-in alias shorthands. Defaults only apply when the model
|
OpenClaw also ships a few built-in alias shorthands. Defaults only apply when the model
|
||||||
is already present in `agents.defaults.models`:
|
is already present in `agents.defaults.models`:
|
||||||
|
|
||||||
- `opus` -> `anthropic/claude-opus-4-5`
|
- `opus` -> `anthropic/claude-opus-4-6`
|
||||||
- `sonnet` -> `anthropic/claude-sonnet-4-5`
|
- `sonnet` -> `anthropic/claude-sonnet-4-5`
|
||||||
- `gpt` -> `openai/gpt-5.2`
|
- `gpt` -> `openai/gpt-5.2`
|
||||||
- `gpt-mini` -> `openai/gpt-5-mini`
|
- `gpt-mini` -> `openai/gpt-5-mini`
|
||||||
|
|
@ -1719,18 +1719,18 @@ is already present in `agents.defaults.models`:
|
||||||
|
|
||||||
If you configure the same alias name (case-insensitive) yourself, your value wins (defaults never override).
|
If you configure the same alias name (case-insensitive) yourself, your value wins (defaults never override).
|
||||||
|
|
||||||
Example: Opus 4.5 primary with MiniMax M2.1 fallback (hosted MiniMax):
|
Example: Opus 4.6 primary with MiniMax M2.1 fallback (hosted MiniMax):
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-5": { alias: "opus" },
|
"anthropic/claude-opus-4-6": { alias: "opus" },
|
||||||
"minimax/MiniMax-M2.1": { alias: "minimax" },
|
"minimax/MiniMax-M2.1": { alias: "minimax" },
|
||||||
},
|
},
|
||||||
model: {
|
model: {
|
||||||
primary: "anthropic/claude-opus-4-5",
|
primary: "anthropic/claude-opus-4-6",
|
||||||
fallbacks: ["minimax/MiniMax-M2.1"],
|
fallbacks: ["minimax/MiniMax-M2.1"],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -1786,7 +1786,7 @@ Example:
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-5": { alias: "Opus" },
|
"anthropic/claude-opus-4-6": { alias: "Opus" },
|
||||||
"anthropic/claude-sonnet-4-1": { alias: "Sonnet" },
|
"anthropic/claude-sonnet-4-1": { alias: "Sonnet" },
|
||||||
"openrouter/deepseek/deepseek-r1:free": {},
|
"openrouter/deepseek/deepseek-r1:free": {},
|
||||||
"zai/glm-4.7": {
|
"zai/glm-4.7": {
|
||||||
|
|
@ -1800,7 +1800,7 @@ Example:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
model: {
|
model: {
|
||||||
primary: "anthropic/claude-opus-4-5",
|
primary: "anthropic/claude-opus-4-6",
|
||||||
fallbacks: [
|
fallbacks: [
|
||||||
"openrouter/deepseek/deepseek-r1:free",
|
"openrouter/deepseek/deepseek-r1:free",
|
||||||
"openrouter/meta-llama/llama-3.3-70b-instruct:free",
|
"openrouter/meta-llama/llama-3.3-70b-instruct:free",
|
||||||
|
|
@ -2011,7 +2011,7 @@ Typing indicators:
|
||||||
- `session.typingIntervalSeconds`: per-session override for the refresh interval.
|
- `session.typingIntervalSeconds`: per-session override for the refresh interval.
|
||||||
See [/concepts/typing-indicators](/concepts/typing-indicators) for behavior details.
|
See [/concepts/typing-indicators](/concepts/typing-indicators) for behavior details.
|
||||||
|
|
||||||
`agents.defaults.model.primary` should be set as `provider/model` (e.g. `anthropic/claude-opus-4-5`).
|
`agents.defaults.model.primary` should be set as `provider/model` (e.g. `anthropic/claude-opus-4-6`).
|
||||||
Aliases come from `agents.defaults.models.*.alias` (e.g. `Opus`).
|
Aliases come from `agents.defaults.models.*.alias` (e.g. `Opus`).
|
||||||
If you omit the provider, OpenClaw currently assumes `anthropic` as a temporary
|
If you omit the provider, OpenClaw currently assumes `anthropic` as a temporary
|
||||||
deprecation fallback.
|
deprecation fallback.
|
||||||
|
|
@ -2485,7 +2485,7 @@ the built-in `opencode` provider from pi-ai; set `OPENCODE_API_KEY` (or
|
||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
|
|
||||||
- Model refs use `opencode/<modelId>` (example: `opencode/claude-opus-4-5`).
|
- Model refs use `opencode/<modelId>` (example: `opencode/claude-opus-4-6`).
|
||||||
- If you enable an allowlist via `agents.defaults.models`, add each model you plan to use.
|
- If you enable an allowlist via `agents.defaults.models`, add each model you plan to use.
|
||||||
- Shortcut: `openclaw onboard --auth-choice opencode-zen`.
|
- Shortcut: `openclaw onboard --auth-choice opencode-zen`.
|
||||||
|
|
||||||
|
|
@ -2493,8 +2493,8 @@ Notes:
|
||||||
{
|
{
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
model: { primary: "opencode/claude-opus-4-5" },
|
model: { primary: "opencode/claude-opus-4-6" },
|
||||||
models: { "opencode/claude-opus-4-5": { alias: "Opus" } },
|
models: { "opencode/claude-opus-4-6": { alias: "Opus" } },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -2652,7 +2652,7 @@ Use MiniMax M2.1 directly without LM Studio:
|
||||||
agent: {
|
agent: {
|
||||||
model: { primary: "minimax/MiniMax-M2.1" },
|
model: { primary: "minimax/MiniMax-M2.1" },
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-5": { alias: "Opus" },
|
"anthropic/claude-opus-4-6": { alias: "Opus" },
|
||||||
"minimax/MiniMax-M2.1": { alias: "Minimax" },
|
"minimax/MiniMax-M2.1": { alias: "Minimax" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -83,7 +83,7 @@ and logged; a message that is only `HEARTBEAT_OK` is dropped.
|
||||||
defaults: {
|
defaults: {
|
||||||
heartbeat: {
|
heartbeat: {
|
||||||
every: "30m", // default: 30m (0m disables)
|
every: "30m", // default: 30m (0m disables)
|
||||||
model: "anthropic/claude-opus-4-5",
|
model: "anthropic/claude-opus-4-6",
|
||||||
includeReasoning: false, // default: false (deliver separate Reasoning: message when available)
|
includeReasoning: false, // default: false (deliver separate Reasoning: message when available)
|
||||||
target: "last", // last | none | <channel id> (core or plugin, e.g. "bluebubbles")
|
target: "last", // last | none | <channel id> (core or plugin, e.g. "bluebubbles")
|
||||||
to: "+15551234567", // optional channel-specific override
|
to: "+15551234567", // optional channel-specific override
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,7 @@ Best current local stack. Load MiniMax M2.1 in LM Studio, enable the local serve
|
||||||
defaults: {
|
defaults: {
|
||||||
model: { primary: "lmstudio/minimax-m2.1-gs32" },
|
model: { primary: "lmstudio/minimax-m2.1-gs32" },
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-5": { alias: "Opus" },
|
"anthropic/claude-opus-4-6": { alias: "Opus" },
|
||||||
"lmstudio/minimax-m2.1-gs32": { alias: "Minimax" },
|
"lmstudio/minimax-m2.1-gs32": { alias: "Minimax" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -68,12 +68,12 @@ Keep hosted models configured even when running local; use `models.mode: "merge"
|
||||||
defaults: {
|
defaults: {
|
||||||
model: {
|
model: {
|
||||||
primary: "anthropic/claude-sonnet-4-5",
|
primary: "anthropic/claude-sonnet-4-5",
|
||||||
fallbacks: ["lmstudio/minimax-m2.1-gs32", "anthropic/claude-opus-4-5"],
|
fallbacks: ["lmstudio/minimax-m2.1-gs32", "anthropic/claude-opus-4-6"],
|
||||||
},
|
},
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-sonnet-4-5": { alias: "Sonnet" },
|
"anthropic/claude-sonnet-4-5": { alias: "Sonnet" },
|
||||||
"lmstudio/minimax-m2.1-gs32": { alias: "MiniMax Local" },
|
"lmstudio/minimax-m2.1-gs32": { alias: "MiniMax Local" },
|
||||||
"anthropic/claude-opus-4-5": { alias: "Opus" },
|
"anthropic/claude-opus-4-6": { alias: "Opus" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -243,7 +243,7 @@ Even with strong system prompts, **prompt injection is not solved**. System prom
|
||||||
- Run sensitive tool execution in a sandbox; keep secrets out of the agent’s reachable filesystem.
|
- Run sensitive tool execution in a sandbox; keep secrets out of the agent’s reachable filesystem.
|
||||||
- Note: sandboxing is opt-in. If sandbox mode is off, exec runs on the gateway host even though tools.exec.host defaults to sandbox, and host exec does not require approvals unless you set host=gateway and configure exec approvals.
|
- Note: sandboxing is opt-in. If sandbox mode is off, exec runs on the gateway host even though tools.exec.host defaults to sandbox, and host exec does not require approvals unless you set host=gateway and configure exec approvals.
|
||||||
- Limit high-risk tools (`exec`, `browser`, `web_fetch`, `web_search`) to trusted agents or explicit allowlists.
|
- Limit high-risk tools (`exec`, `browser`, `web_fetch`, `web_search`) to trusted agents or explicit allowlists.
|
||||||
- **Model choice matters:** older/legacy models can be less robust against prompt injection and tool misuse. Prefer modern, instruction-hardened models for any bot with tools. We recommend Anthropic Opus 4.5 because it’s quite good at recognizing prompt injections (see [“A step forward on safety”](https://www.anthropic.com/news/claude-opus-4-5)).
|
- **Model choice matters:** older/legacy models can be less robust against prompt injection and tool misuse. Prefer modern, instruction-hardened models for any bot with tools. We recommend Anthropic Opus 4.6 (or the latest Opus) because it’s strong at recognizing prompt injections (see [“A step forward on safety”](https://www.anthropic.com/news/claude-opus-4-5)).
|
||||||
|
|
||||||
Red flags to treat as untrusted:
|
Red flags to treat as untrusted:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -707,7 +707,7 @@ Yes - via pi-ai's **Amazon Bedrock (Converse)** provider with **manual config**.
|
||||||
|
|
||||||
### How does Codex auth work
|
### How does Codex auth work
|
||||||
|
|
||||||
OpenClaw supports **OpenAI Code (Codex)** via OAuth (ChatGPT sign-in). The wizard can run the OAuth flow and will set the default model to `openai-codex/gpt-5.2` when appropriate. See [Model providers](/concepts/model-providers) and [Wizard](/start/wizard).
|
OpenClaw supports **OpenAI Code (Codex)** via OAuth (ChatGPT sign-in). The wizard can run the OAuth flow and will set the default model to `openai-codex/gpt-5.3-codex` when appropriate. See [Model providers](/concepts/model-providers) and [Wizard](/start/wizard).
|
||||||
|
|
||||||
### Do you support OpenAI subscription auth Codex OAuth
|
### Do you support OpenAI subscription auth Codex OAuth
|
||||||
|
|
||||||
|
|
@ -1936,11 +1936,11 @@ OpenClaw's default model is whatever you set as:
|
||||||
agents.defaults.model.primary
|
agents.defaults.model.primary
|
||||||
```
|
```
|
||||||
|
|
||||||
Models are referenced as `provider/model` (example: `anthropic/claude-opus-4-5`). If you omit the provider, OpenClaw currently assumes `anthropic` as a temporary deprecation fallback - but you should still **explicitly** set `provider/model`.
|
Models are referenced as `provider/model` (example: `anthropic/claude-opus-4-6`). If you omit the provider, OpenClaw currently assumes `anthropic` as a temporary deprecation fallback - but you should still **explicitly** set `provider/model`.
|
||||||
|
|
||||||
### What model do you recommend
|
### What model do you recommend
|
||||||
|
|
||||||
**Recommended default:** `anthropic/claude-opus-4-5`.
|
**Recommended default:** `anthropic/claude-opus-4-6`.
|
||||||
**Good alternative:** `anthropic/claude-sonnet-4-5`.
|
**Good alternative:** `anthropic/claude-sonnet-4-5`.
|
||||||
**Reliable (less character):** `openai/gpt-5.2` - nearly as good as Opus, just less personality.
|
**Reliable (less character):** `openai/gpt-5.2` - nearly as good as Opus, just less personality.
|
||||||
**Budget:** `zai/glm-4.7`.
|
**Budget:** `zai/glm-4.7`.
|
||||||
|
|
@ -1989,7 +1989,7 @@ Docs: [Models](/concepts/models), [Configure](/cli/configure), [Config](/cli/con
|
||||||
|
|
||||||
### What do OpenClaw, Flawd, and Krill use for models
|
### What do OpenClaw, Flawd, and Krill use for models
|
||||||
|
|
||||||
- **OpenClaw + Flawd:** Anthropic Opus (`anthropic/claude-opus-4-5`) - see [Anthropic](/providers/anthropic).
|
- **OpenClaw + Flawd:** Anthropic Opus (`anthropic/claude-opus-4-6`) - see [Anthropic](/providers/anthropic).
|
||||||
- **Krill:** MiniMax M2.1 (`minimax/MiniMax-M2.1`) - see [MiniMax](/providers/minimax).
|
- **Krill:** MiniMax M2.1 (`minimax/MiniMax-M2.1`) - see [MiniMax](/providers/minimax).
|
||||||
|
|
||||||
### How do I switch models on the fly without restarting
|
### How do I switch models on the fly without restarting
|
||||||
|
|
@ -2029,7 +2029,7 @@ It also shows the configured provider endpoint (`baseUrl`) and API mode (`api`)
|
||||||
Re-run `/model` **without** the `@profile` suffix:
|
Re-run `/model` **without** the `@profile` suffix:
|
||||||
|
|
||||||
```
|
```
|
||||||
/model anthropic/claude-opus-4-5
|
/model anthropic/claude-opus-4-6
|
||||||
```
|
```
|
||||||
|
|
||||||
If you want to return to the default, pick it from `/model` (or send `/model <default provider/model>`).
|
If you want to return to the default, pick it from `/model` (or send `/model <default provider/model>`).
|
||||||
|
|
@ -2039,8 +2039,8 @@ Use `/model status` to confirm which auth profile is active.
|
||||||
|
|
||||||
Yes. Set one as default and switch as needed:
|
Yes. Set one as default and switch as needed:
|
||||||
|
|
||||||
- **Quick switch (per session):** `/model gpt-5.2` for daily tasks, `/model gpt-5.2-codex` for coding.
|
- **Quick switch (per session):** `/model gpt-5.2` for daily tasks, `/model gpt-5.3-codex` for coding.
|
||||||
- **Default + switch:** set `agents.defaults.model.primary` to `openai-codex/gpt-5.2`, then switch to `openai-codex/gpt-5.2-codex` when coding (or the other way around).
|
- **Default + switch:** set `agents.defaults.model.primary` to `openai-codex/gpt-5.3-codex`, then switch to `openai-codex/gpt-5.3-codex-codex` when coding (or the other way around).
|
||||||
- **Sub-agents:** route coding tasks to sub-agents with a different default model.
|
- **Sub-agents:** route coding tasks to sub-agents with a different default model.
|
||||||
|
|
||||||
See [Models](/concepts/models) and [Slash commands](/tools/slash-commands).
|
See [Models](/concepts/models) and [Slash commands](/tools/slash-commands).
|
||||||
|
|
@ -2118,7 +2118,7 @@ Docs: [Models](/concepts/models), [Multi-Agent Routing](/concepts/multi-agent),
|
||||||
|
|
||||||
Yes. OpenClaw ships a few default shorthands (only applied when the model exists in `agents.defaults.models`):
|
Yes. OpenClaw ships a few default shorthands (only applied when the model exists in `agents.defaults.models`):
|
||||||
|
|
||||||
- `opus` → `anthropic/claude-opus-4-5`
|
- `opus` → `anthropic/claude-opus-4-6`
|
||||||
- `sonnet` → `anthropic/claude-sonnet-4-5`
|
- `sonnet` → `anthropic/claude-sonnet-4-5`
|
||||||
- `gpt` → `openai/gpt-5.2`
|
- `gpt` → `openai/gpt-5.2`
|
||||||
- `gpt-mini` → `openai/gpt-5-mini`
|
- `gpt-mini` → `openai/gpt-5-mini`
|
||||||
|
|
@ -2135,9 +2135,9 @@ Aliases come from `agents.defaults.models.<modelId>.alias`. Example:
|
||||||
{
|
{
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
model: { primary: "anthropic/claude-opus-4-5" },
|
model: { primary: "anthropic/claude-opus-4-6" },
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-5": { alias: "opus" },
|
"anthropic/claude-opus-4-6": { alias: "opus" },
|
||||||
"anthropic/claude-sonnet-4-5": { alias: "sonnet" },
|
"anthropic/claude-sonnet-4-5": { alias: "sonnet" },
|
||||||
"anthropic/claude-haiku-4-5": { alias: "haiku" },
|
"anthropic/claude-haiku-4-5": { alias: "haiku" },
|
||||||
},
|
},
|
||||||
|
|
@ -2823,7 +2823,7 @@ You can add options like `debounce:2s cap:25 drop:summarize` for followup modes.
|
||||||
|
|
||||||
**Q: "What's the default model for Anthropic with an API key?"**
|
**Q: "What's the default model for Anthropic with an API key?"**
|
||||||
|
|
||||||
**A:** In OpenClaw, credentials and model selection are separate. Setting `ANTHROPIC_API_KEY` (or storing an Anthropic API key in auth profiles) enables authentication, but the actual default model is whatever you configure in `agents.defaults.model.primary` (for example, `anthropic/claude-sonnet-4-5` or `anthropic/claude-opus-4-5`). If you see `No credentials found for profile "anthropic:default"`, it means the Gateway couldn't find Anthropic credentials in the expected `auth-profiles.json` for the agent that's running.
|
**A:** In OpenClaw, credentials and model selection are separate. Setting `ANTHROPIC_API_KEY` (or storing an Anthropic API key in auth profiles) enables authentication, but the actual default model is whatever you configure in `agents.defaults.model.primary` (for example, `anthropic/claude-sonnet-4-5` or `anthropic/claude-opus-4-6`). If you see `No credentials found for profile "anthropic:default"`, it means the Gateway couldn't find Anthropic credentials in the expected `auth-profiles.json` for the agent that's running.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -186,7 +186,7 @@ If you omit `capabilities`, the entry is eligible for the list it appears in.
|
||||||
**Image**
|
**Image**
|
||||||
|
|
||||||
- Prefer your active model if it supports images.
|
- Prefer your active model if it supports images.
|
||||||
- Good defaults: `openai/gpt-5.2`, `anthropic/claude-opus-4-5`, `google/gemini-3-pro-preview`.
|
- Good defaults: `openai/gpt-5.2`, `anthropic/claude-opus-4-6`, `google/gemini-3-pro-preview`.
|
||||||
|
|
||||||
**Audio**
|
**Audio**
|
||||||
|
|
||||||
|
|
@ -300,7 +300,7 @@ When `mode: "all"`, outputs are labeled `[Image 1/2]`, `[Audio 2/2]`, etc.
|
||||||
maxChars: 500,
|
maxChars: 500,
|
||||||
models: [
|
models: [
|
||||||
{ provider: "openai", model: "gpt-5.2" },
|
{ provider: "openai", model: "gpt-5.2" },
|
||||||
{ provider: "anthropic", model: "claude-opus-4-5" },
|
{ provider: "anthropic", model: "claude-opus-4-6" },
|
||||||
{
|
{
|
||||||
type: "cli",
|
type: "cli",
|
||||||
command: "gemini",
|
command: "gemini",
|
||||||
|
|
|
||||||
|
|
@ -148,7 +148,7 @@ cat > /data/openclaw.json << 'EOF'
|
||||||
"agents": {
|
"agents": {
|
||||||
"defaults": {
|
"defaults": {
|
||||||
"model": {
|
"model": {
|
||||||
"primary": "anthropic/claude-opus-4-5",
|
"primary": "anthropic/claude-opus-4-6",
|
||||||
"fallbacks": ["anthropic/claude-sonnet-4-5", "openai/gpt-4o"]
|
"fallbacks": ["anthropic/claude-sonnet-4-5", "openai/gpt-4o"]
|
||||||
},
|
},
|
||||||
"maxConcurrent": 4
|
"maxConcurrent": 4
|
||||||
|
|
|
||||||
|
|
@ -31,7 +31,7 @@ openclaw onboard --anthropic-api-key "$ANTHROPIC_API_KEY"
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
env: { ANTHROPIC_API_KEY: "sk-ant-..." },
|
env: { ANTHROPIC_API_KEY: "sk-ant-..." },
|
||||||
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } },
|
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-6" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -54,7 +54,7 @@ Use the `cacheRetention` parameter in your model config:
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-5": {
|
"anthropic/claude-opus-4-6": {
|
||||||
params: { cacheRetention: "long" },
|
params: { cacheRetention: "long" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -114,7 +114,7 @@ openclaw onboard --auth-choice setup-token
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } },
|
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-6" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,7 @@ See [Venice AI](/providers/venice).
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } },
|
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-6" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -96,7 +96,7 @@ Configure via CLI:
|
||||||
|
|
||||||
### MiniMax M2.1 as fallback (Opus primary)
|
### MiniMax M2.1 as fallback (Opus primary)
|
||||||
|
|
||||||
**Best for:** keep Opus 4.5 as primary, fail over to MiniMax M2.1.
|
**Best for:** keep Opus 4.6 as primary, fail over to MiniMax M2.1.
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
|
|
@ -104,11 +104,11 @@ Configure via CLI:
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-5": { alias: "opus" },
|
"anthropic/claude-opus-4-6": { alias: "opus" },
|
||||||
"minimax/MiniMax-M2.1": { alias: "minimax" },
|
"minimax/MiniMax-M2.1": { alias: "minimax" },
|
||||||
},
|
},
|
||||||
model: {
|
model: {
|
||||||
primary: "anthropic/claude-opus-4-5",
|
primary: "anthropic/claude-opus-4-6",
|
||||||
fallbacks: ["minimax/MiniMax-M2.1"],
|
fallbacks: ["minimax/MiniMax-M2.1"],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ See [Venice AI](/providers/venice).
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } },
|
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-6" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,7 @@ openclaw onboard --openai-api-key "$OPENAI_API_KEY"
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
env: { OPENAI_API_KEY: "sk-..." },
|
env: { OPENAI_API_KEY: "sk-..." },
|
||||||
agents: { defaults: { model: { primary: "openai/gpt-5.2" } } },
|
agents: { defaults: { model: { primary: "openai/gpt-5.1-codex" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -52,7 +52,7 @@ openclaw models auth login --provider openai-codex
|
||||||
|
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
agents: { defaults: { model: { primary: "openai-codex/gpt-5.2" } } },
|
agents: { defaults: { model: { primary: "openai-codex/gpt-5.3-codex" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ openclaw onboard --opencode-zen-api-key "$OPENCODE_API_KEY"
|
||||||
```json5
|
```json5
|
||||||
{
|
{
|
||||||
env: { OPENCODE_API_KEY: "sk-..." },
|
env: { OPENCODE_API_KEY: "sk-..." },
|
||||||
agents: { defaults: { model: { primary: "opencode/claude-opus-4-5" } } },
|
agents: { defaults: { model: { primary: "opencode/claude-opus-4-6" } } },
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ openclaw onboard --auth-choice ai-gateway-api-key
|
||||||
{
|
{
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
model: { primary: "vercel-ai-gateway/anthropic/claude-opus-4.5" },
|
model: { primary: "vercel-ai-gateway/anthropic/claude-opus-4.6" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -142,7 +142,7 @@ Example:
|
||||||
{
|
{
|
||||||
logging: { level: "info" },
|
logging: { level: "info" },
|
||||||
agent: {
|
agent: {
|
||||||
model: "anthropic/claude-opus-4-5",
|
model: "anthropic/claude-opus-4-6",
|
||||||
workspace: "~/.openclaw/workspace",
|
workspace: "~/.openclaw/workspace",
|
||||||
thinkingDefault: "high",
|
thinkingDefault: "high",
|
||||||
timeoutSeconds: 1800,
|
timeoutSeconds: 1800,
|
||||||
|
|
|
||||||
|
|
@ -135,12 +135,15 @@ What you set:
|
||||||
<Accordion title="OpenAI Code subscription (OAuth)">
|
<Accordion title="OpenAI Code subscription (OAuth)">
|
||||||
Browser flow; paste `code#state`.
|
Browser flow; paste `code#state`.
|
||||||
|
|
||||||
Sets `agents.defaults.model` to `openai-codex/gpt-5.2` when model is unset or `openai/*`.
|
Sets `agents.defaults.model` to `openai-codex/gpt-5.3-codex` when model is unset or `openai/*`.
|
||||||
|
|
||||||
</Accordion>
|
</Accordion>
|
||||||
<Accordion title="OpenAI API key">
|
<Accordion title="OpenAI API key">
|
||||||
Uses `OPENAI_API_KEY` if present or prompts for a key, then saves it to
|
Uses `OPENAI_API_KEY` if present or prompts for a key, then saves it to
|
||||||
`~/.openclaw/.env` so launchd can read it.
|
`~/.openclaw/.env` so launchd can read it.
|
||||||
|
|
||||||
|
Sets `agents.defaults.model` to `openai/gpt-5.1-codex` when model is unset, `openai/*`, or `openai-codex/*`.
|
||||||
|
|
||||||
</Accordion>
|
</Accordion>
|
||||||
<Accordion title="OpenCode Zen">
|
<Accordion title="OpenCode Zen">
|
||||||
Prompts for `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`).
|
Prompts for `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`).
|
||||||
|
|
|
||||||
|
|
@ -110,7 +110,7 @@ Live tests are split into two layers so we can isolate failures:
|
||||||
- How to select models:
|
- How to select models:
|
||||||
- `OPENCLAW_LIVE_MODELS=modern` to run the modern allowlist (Opus/Sonnet/Haiku 4.5, GPT-5.x + Codex, Gemini 3, GLM 4.7, MiniMax M2.1, Grok 4)
|
- `OPENCLAW_LIVE_MODELS=modern` to run the modern allowlist (Opus/Sonnet/Haiku 4.5, GPT-5.x + Codex, Gemini 3, GLM 4.7, MiniMax M2.1, Grok 4)
|
||||||
- `OPENCLAW_LIVE_MODELS=all` is an alias for the modern allowlist
|
- `OPENCLAW_LIVE_MODELS=all` is an alias for the modern allowlist
|
||||||
- or `OPENCLAW_LIVE_MODELS="openai/gpt-5.2,anthropic/claude-opus-4-5,..."` (comma allowlist)
|
- or `OPENCLAW_LIVE_MODELS="openai/gpt-5.2,anthropic/claude-opus-4-6,..."` (comma allowlist)
|
||||||
- How to select providers:
|
- How to select providers:
|
||||||
- `OPENCLAW_LIVE_PROVIDERS="google,google-antigravity,google-gemini-cli"` (comma allowlist)
|
- `OPENCLAW_LIVE_PROVIDERS="google,google-antigravity,google-gemini-cli"` (comma allowlist)
|
||||||
- Where keys come from:
|
- Where keys come from:
|
||||||
|
|
@ -172,7 +172,7 @@ openclaw models list --json
|
||||||
- Profile: `OPENCLAW_LIVE_SETUP_TOKEN_PROFILE=anthropic:setup-token-test`
|
- Profile: `OPENCLAW_LIVE_SETUP_TOKEN_PROFILE=anthropic:setup-token-test`
|
||||||
- Raw token: `OPENCLAW_LIVE_SETUP_TOKEN_VALUE=sk-ant-oat01-...`
|
- Raw token: `OPENCLAW_LIVE_SETUP_TOKEN_VALUE=sk-ant-oat01-...`
|
||||||
- Model override (optional):
|
- Model override (optional):
|
||||||
- `OPENCLAW_LIVE_SETUP_TOKEN_MODEL=anthropic/claude-opus-4-5`
|
- `OPENCLAW_LIVE_SETUP_TOKEN_MODEL=anthropic/claude-opus-4-6`
|
||||||
|
|
||||||
Setup example:
|
Setup example:
|
||||||
|
|
||||||
|
|
@ -193,8 +193,8 @@ OPENCLAW_LIVE_SETUP_TOKEN=1 OPENCLAW_LIVE_SETUP_TOKEN_PROFILE=anthropic:setup-to
|
||||||
- Command: `claude`
|
- Command: `claude`
|
||||||
- Args: `["-p","--output-format","json","--dangerously-skip-permissions"]`
|
- Args: `["-p","--output-format","json","--dangerously-skip-permissions"]`
|
||||||
- Overrides (optional):
|
- Overrides (optional):
|
||||||
- `OPENCLAW_LIVE_CLI_BACKEND_MODEL="claude-cli/claude-opus-4-5"`
|
- `OPENCLAW_LIVE_CLI_BACKEND_MODEL="claude-cli/claude-opus-4-6"`
|
||||||
- `OPENCLAW_LIVE_CLI_BACKEND_MODEL="codex-cli/gpt-5.2-codex"`
|
- `OPENCLAW_LIVE_CLI_BACKEND_MODEL="codex-cli/gpt-5.3-codex"`
|
||||||
- `OPENCLAW_LIVE_CLI_BACKEND_COMMAND="/full/path/to/claude"`
|
- `OPENCLAW_LIVE_CLI_BACKEND_COMMAND="/full/path/to/claude"`
|
||||||
- `OPENCLAW_LIVE_CLI_BACKEND_ARGS='["-p","--output-format","json","--permission-mode","bypassPermissions"]'`
|
- `OPENCLAW_LIVE_CLI_BACKEND_ARGS='["-p","--output-format","json","--permission-mode","bypassPermissions"]'`
|
||||||
- `OPENCLAW_LIVE_CLI_BACKEND_CLEAR_ENV='["ANTHROPIC_API_KEY","ANTHROPIC_API_KEY_OLD"]'`
|
- `OPENCLAW_LIVE_CLI_BACKEND_CLEAR_ENV='["ANTHROPIC_API_KEY","ANTHROPIC_API_KEY_OLD"]'`
|
||||||
|
|
@ -223,7 +223,7 @@ Narrow, explicit allowlists are fastest and least flaky:
|
||||||
- `OPENCLAW_LIVE_GATEWAY_MODELS="openai/gpt-5.2" pnpm test:live src/gateway/gateway-models.profiles.live.test.ts`
|
- `OPENCLAW_LIVE_GATEWAY_MODELS="openai/gpt-5.2" pnpm test:live src/gateway/gateway-models.profiles.live.test.ts`
|
||||||
|
|
||||||
- Tool calling across several providers:
|
- Tool calling across several providers:
|
||||||
- `OPENCLAW_LIVE_GATEWAY_MODELS="openai/gpt-5.2,anthropic/claude-opus-4-5,google/gemini-3-flash-preview,zai/glm-4.7,minimax/minimax-m2.1" pnpm test:live src/gateway/gateway-models.profiles.live.test.ts`
|
- `OPENCLAW_LIVE_GATEWAY_MODELS="openai/gpt-5.2,anthropic/claude-opus-4-6,google/gemini-3-flash-preview,zai/glm-4.7,minimax/minimax-m2.1" pnpm test:live src/gateway/gateway-models.profiles.live.test.ts`
|
||||||
|
|
||||||
- Google focus (Gemini API key + Antigravity):
|
- Google focus (Gemini API key + Antigravity):
|
||||||
- Gemini (API key): `OPENCLAW_LIVE_GATEWAY_MODELS="google/gemini-3-flash-preview" pnpm test:live src/gateway/gateway-models.profiles.live.test.ts`
|
- Gemini (API key): `OPENCLAW_LIVE_GATEWAY_MODELS="google/gemini-3-flash-preview" pnpm test:live src/gateway/gateway-models.profiles.live.test.ts`
|
||||||
|
|
@ -247,22 +247,22 @@ There is no fixed “CI model list” (live is opt-in), but these are the **reco
|
||||||
This is the “common models” run we expect to keep working:
|
This is the “common models” run we expect to keep working:
|
||||||
|
|
||||||
- OpenAI (non-Codex): `openai/gpt-5.2` (optional: `openai/gpt-5.1`)
|
- OpenAI (non-Codex): `openai/gpt-5.2` (optional: `openai/gpt-5.1`)
|
||||||
- OpenAI Codex: `openai-codex/gpt-5.2` (optional: `openai-codex/gpt-5.2-codex`)
|
- OpenAI Codex: `openai-codex/gpt-5.3-codex` (optional: `openai-codex/gpt-5.3-codex-codex`)
|
||||||
- Anthropic: `anthropic/claude-opus-4-5` (or `anthropic/claude-sonnet-4-5`)
|
- Anthropic: `anthropic/claude-opus-4-6` (or `anthropic/claude-sonnet-4-5`)
|
||||||
- Google (Gemini API): `google/gemini-3-pro-preview` and `google/gemini-3-flash-preview` (avoid older Gemini 2.x models)
|
- Google (Gemini API): `google/gemini-3-pro-preview` and `google/gemini-3-flash-preview` (avoid older Gemini 2.x models)
|
||||||
- Google (Antigravity): `google-antigravity/claude-opus-4-5-thinking` and `google-antigravity/gemini-3-flash`
|
- Google (Antigravity): `google-antigravity/claude-opus-4-5-thinking` and `google-antigravity/gemini-3-flash`
|
||||||
- Z.AI (GLM): `zai/glm-4.7`
|
- Z.AI (GLM): `zai/glm-4.7`
|
||||||
- MiniMax: `minimax/minimax-m2.1`
|
- MiniMax: `minimax/minimax-m2.1`
|
||||||
|
|
||||||
Run gateway smoke with tools + image:
|
Run gateway smoke with tools + image:
|
||||||
`OPENCLAW_LIVE_GATEWAY_MODELS="openai/gpt-5.2,openai-codex/gpt-5.2,anthropic/claude-opus-4-5,google/gemini-3-pro-preview,google/gemini-3-flash-preview,google-antigravity/claude-opus-4-5-thinking,google-antigravity/gemini-3-flash,zai/glm-4.7,minimax/minimax-m2.1" pnpm test:live src/gateway/gateway-models.profiles.live.test.ts`
|
`OPENCLAW_LIVE_GATEWAY_MODELS="openai/gpt-5.2,openai-codex/gpt-5.3-codex,anthropic/claude-opus-4-6,google/gemini-3-pro-preview,google/gemini-3-flash-preview,google-antigravity/claude-opus-4-5-thinking,google-antigravity/gemini-3-flash,zai/glm-4.7,minimax/minimax-m2.1" pnpm test:live src/gateway/gateway-models.profiles.live.test.ts`
|
||||||
|
|
||||||
### Baseline: tool calling (Read + optional Exec)
|
### Baseline: tool calling (Read + optional Exec)
|
||||||
|
|
||||||
Pick at least one per provider family:
|
Pick at least one per provider family:
|
||||||
|
|
||||||
- OpenAI: `openai/gpt-5.2` (or `openai/gpt-5-mini`)
|
- OpenAI: `openai/gpt-5.2` (or `openai/gpt-5-mini`)
|
||||||
- Anthropic: `anthropic/claude-opus-4-5` (or `anthropic/claude-sonnet-4-5`)
|
- Anthropic: `anthropic/claude-opus-4-6` (or `anthropic/claude-sonnet-4-5`)
|
||||||
- Google: `google/gemini-3-flash-preview` (or `google/gemini-3-pro-preview`)
|
- Google: `google/gemini-3-flash-preview` (or `google/gemini-3-pro-preview`)
|
||||||
- Z.AI (GLM): `zai/glm-4.7`
|
- Z.AI (GLM): `zai/glm-4.7`
|
||||||
- MiniMax: `minimax/minimax-m2.1`
|
- MiniMax: `minimax/minimax-m2.1`
|
||||||
|
|
|
||||||
|
|
@ -93,9 +93,9 @@ https://docs.anthropic.com/docs/build-with-claude/prompt-caching
|
||||||
agents:
|
agents:
|
||||||
defaults:
|
defaults:
|
||||||
model:
|
model:
|
||||||
primary: "anthropic/claude-opus-4-5"
|
primary: "anthropic/claude-opus-4-6"
|
||||||
models:
|
models:
|
||||||
"anthropic/claude-opus-4-5":
|
"anthropic/claude-opus-4-6":
|
||||||
params:
|
params:
|
||||||
cacheRetention: "long"
|
cacheRetention: "long"
|
||||||
heartbeat:
|
heartbeat:
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ without writing custom OpenClaw code for each workflow.
|
||||||
"defaultProvider": "openai-codex",
|
"defaultProvider": "openai-codex",
|
||||||
"defaultModel": "gpt-5.2",
|
"defaultModel": "gpt-5.2",
|
||||||
"defaultAuthProfileId": "main",
|
"defaultAuthProfileId": "main",
|
||||||
"allowedModels": ["openai-codex/gpt-5.2"],
|
"allowedModels": ["openai-codex/gpt-5.3-codex"],
|
||||||
"maxTokens": 800,
|
"maxTokens": 800,
|
||||||
"timeoutMs": 30000
|
"timeoutMs": 30000
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ const DEFAULT_MODEL_IDS = [
|
||||||
"gpt-5.1-codex",
|
"gpt-5.1-codex",
|
||||||
"gpt-5.1-codex-max",
|
"gpt-5.1-codex-max",
|
||||||
"gpt-5-mini",
|
"gpt-5-mini",
|
||||||
|
"claude-opus-4.6",
|
||||||
"claude-opus-4.5",
|
"claude-opus-4.5",
|
||||||
"claude-sonnet-4.5",
|
"claude-sonnet-4.5",
|
||||||
"claude-haiku-4.5",
|
"claude-haiku-4.5",
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ export function formatModelName(modelString?: string | null): string {
|
||||||
}
|
}
|
||||||
const modelName = modelString.includes("/") ? modelString.split("/")[1] : modelString;
|
const modelName = modelString.includes("/") ? modelString.split("/")[1] : modelString;
|
||||||
const modelMappings: Record<string, string> = {
|
const modelMappings: Record<string, string> = {
|
||||||
|
"claude-opus-4-6": "Claude Opus 4.6",
|
||||||
"claude-opus-4-5": "Claude Opus 4.5",
|
"claude-opus-4-5": "Claude Opus 4.5",
|
||||||
"claude-sonnet-4-5": "Claude Sonnet 4.5",
|
"claude-sonnet-4-5": "Claude Sonnet 4.5",
|
||||||
"claude-sonnet-3-5": "Claude Sonnet 3.5",
|
"claude-sonnet-3-5": "Claude Sonnet 3.5",
|
||||||
|
|
|
||||||
|
|
@ -108,10 +108,10 @@
|
||||||
"@larksuiteoapi/node-sdk": "^1.58.0",
|
"@larksuiteoapi/node-sdk": "^1.58.0",
|
||||||
"@line/bot-sdk": "^10.6.0",
|
"@line/bot-sdk": "^10.6.0",
|
||||||
"@lydell/node-pty": "1.2.0-beta.3",
|
"@lydell/node-pty": "1.2.0-beta.3",
|
||||||
"@mariozechner/pi-agent-core": "0.52.0",
|
"@mariozechner/pi-agent-core": "0.52.2",
|
||||||
"@mariozechner/pi-ai": "0.52.0",
|
"@mariozechner/pi-ai": "0.52.2",
|
||||||
"@mariozechner/pi-coding-agent": "0.52.0",
|
"@mariozechner/pi-coding-agent": "0.52.2",
|
||||||
"@mariozechner/pi-tui": "0.52.0",
|
"@mariozechner/pi-tui": "0.52.2",
|
||||||
"@mozilla/readability": "^0.6.0",
|
"@mozilla/readability": "^0.6.0",
|
||||||
"@sinclair/typebox": "0.34.48",
|
"@sinclair/typebox": "0.34.48",
|
||||||
"@slack/bolt": "^4.6.0",
|
"@slack/bolt": "^4.6.0",
|
||||||
|
|
|
||||||
200
pnpm-lock.yaml
200
pnpm-lock.yaml
|
|
@ -49,17 +49,17 @@ importers:
|
||||||
specifier: 1.2.0-beta.3
|
specifier: 1.2.0-beta.3
|
||||||
version: 1.2.0-beta.3
|
version: 1.2.0-beta.3
|
||||||
'@mariozechner/pi-agent-core':
|
'@mariozechner/pi-agent-core':
|
||||||
specifier: 0.52.0
|
specifier: 0.52.2
|
||||||
version: 0.52.0(ws@8.19.0)(zod@4.3.6)
|
version: 0.52.2(ws@8.19.0)(zod@4.3.6)
|
||||||
'@mariozechner/pi-ai':
|
'@mariozechner/pi-ai':
|
||||||
specifier: 0.52.0
|
specifier: 0.52.2
|
||||||
version: 0.52.0(ws@8.19.0)(zod@4.3.6)
|
version: 0.52.2(ws@8.19.0)(zod@4.3.6)
|
||||||
'@mariozechner/pi-coding-agent':
|
'@mariozechner/pi-coding-agent':
|
||||||
specifier: 0.52.0
|
specifier: 0.52.2
|
||||||
version: 0.52.0(ws@8.19.0)(zod@4.3.6)
|
version: 0.52.2(ws@8.19.0)(zod@4.3.6)
|
||||||
'@mariozechner/pi-tui':
|
'@mariozechner/pi-tui':
|
||||||
specifier: 0.52.0
|
specifier: 0.52.2
|
||||||
version: 0.52.0
|
version: 0.52.2
|
||||||
'@mozilla/readability':
|
'@mozilla/readability':
|
||||||
specifier: ^0.6.0
|
specifier: ^0.6.0
|
||||||
version: 0.6.0
|
version: 0.6.0
|
||||||
|
|
@ -593,8 +593,8 @@ packages:
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
zod: ^3.25.0 || ^4.0.0
|
zod: ^3.25.0 || ^4.0.0
|
||||||
|
|
||||||
'@anthropic-ai/sdk@0.71.2':
|
'@anthropic-ai/sdk@0.73.0':
|
||||||
resolution: {integrity: sha512-TGNDEUuEstk/DKu0/TflXAEt+p+p/WhTlFzEnoosvbaDU2LTjm42igSdlL0VijrKpWejtOKxX0b8A7uc+XiSAQ==}
|
resolution: {integrity: sha512-URURVzhxXGJDGUGFunIOtBlSl7KWvZiAAKY/ttTkZAkXT9bTPqdk2eK0b8qqSxXpikh3QKPnPYpiyX98zf5ebw==}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
zod: ^3.25.0 || ^4.0.0
|
zod: ^3.25.0 || ^4.0.0
|
||||||
|
|
@ -619,8 +619,8 @@ packages:
|
||||||
'@aws-crypto/util@5.2.0':
|
'@aws-crypto/util@5.2.0':
|
||||||
resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==}
|
resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==}
|
||||||
|
|
||||||
'@aws-sdk/client-bedrock-runtime@3.983.0':
|
'@aws-sdk/client-bedrock-runtime@3.984.0':
|
||||||
resolution: {integrity: sha512-uur/DX7OKtWe05gSZ2PGCHIhV0etoi12h8EGDht5blmtI4njLzD/gL6vX2L8CUgsy+4/KGIpH7KV7naWKAKANQ==}
|
resolution: {integrity: sha512-iFrdkDXdo+ELZ5qD8ZYw9MHoOhcXyVutO8z7csnYpJO0rbET/X6B8cQlOCMsqJHxkyMwW21J4vt9S5k2/FgPCg==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@aws-sdk/client-bedrock@3.983.0':
|
'@aws-sdk/client-bedrock@3.983.0':
|
||||||
|
|
@ -667,8 +667,8 @@ packages:
|
||||||
resolution: {integrity: sha512-hIzw2XzrG8jzsUSEatehmpkd5rWzASg5IHUfA+m01k/RtvfAML7ZJVVohuKdhAYx+wV2AThLiQJVzqn7F0khrw==}
|
resolution: {integrity: sha512-hIzw2XzrG8jzsUSEatehmpkd5rWzASg5IHUfA+m01k/RtvfAML7ZJVVohuKdhAYx+wV2AThLiQJVzqn7F0khrw==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@aws-sdk/eventstream-handler-node@3.972.4':
|
'@aws-sdk/eventstream-handler-node@3.972.5':
|
||||||
resolution: {integrity: sha512-LPIN505kUqL3xwtoGYgYkctkUUuVUD4pzZfSo+CahavNft+zty5xWYWhKfnZOKBkYCMUl2Hl/9mkoPeYwxfQvQ==}
|
resolution: {integrity: sha512-xEmd3dnyn83K6t4AJxBJA63wpEoCD45ERFG0XMTViD2E/Ohls9TLxjOWPb1PAxR9/46cKy/TImez1GoqP6xVNQ==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@aws-sdk/middleware-eventstream@3.972.3':
|
'@aws-sdk/middleware-eventstream@3.972.3':
|
||||||
|
|
@ -691,8 +691,8 @@ packages:
|
||||||
resolution: {integrity: sha512-TehLN8W/kivl0U9HcS+keryElEWORROpghDXZBLfnb40DXM7hx/i+7OOjkogXQOF3QtUraJVRkHQ07bPhrWKlw==}
|
resolution: {integrity: sha512-TehLN8W/kivl0U9HcS+keryElEWORROpghDXZBLfnb40DXM7hx/i+7OOjkogXQOF3QtUraJVRkHQ07bPhrWKlw==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@aws-sdk/middleware-websocket@3.972.4':
|
'@aws-sdk/middleware-websocket@3.972.5':
|
||||||
resolution: {integrity: sha512-0lHsBuO5eVkWiirSHWVDHLHSghyajcVxSGvmv/6tYFdzaXx2PDvqNdfXhKdDZpOOHGCxuY5d3u11SKbVAtB0+Q==}
|
resolution: {integrity: sha512-BN4A9K71WRIlpQ3+IYGdBC2wVyobZ95g6ZomodmJ8Te772GWo0iDk2Mv6JIHdr842tOTgi1b3npLIFDUS4hl4g==}
|
||||||
engines: {node: '>= 14.0.0'}
|
engines: {node: '>= 14.0.0'}
|
||||||
|
|
||||||
'@aws-sdk/nested-clients@3.982.0':
|
'@aws-sdk/nested-clients@3.982.0':
|
||||||
|
|
@ -703,6 +703,10 @@ packages:
|
||||||
resolution: {integrity: sha512-4bUzDkJlSPwfegO23ZSBrheuTI8UyAgNzptm1K6fZAIOIc1vnFl12TonecbssAfmM0/UdyTn5QDomwEfIdmJkQ==}
|
resolution: {integrity: sha512-4bUzDkJlSPwfegO23ZSBrheuTI8UyAgNzptm1K6fZAIOIc1vnFl12TonecbssAfmM0/UdyTn5QDomwEfIdmJkQ==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
|
'@aws-sdk/nested-clients@3.984.0':
|
||||||
|
resolution: {integrity: sha512-E9Os+U9NWFoEJXbTVT8sCi+HMnzmsMA8cuCkvlUUfin/oWewUTnCkB/OwFwiUQ2N7v1oBk+i4ZSsI1PiuOy8/w==}
|
||||||
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@aws-sdk/region-config-resolver@3.972.3':
|
'@aws-sdk/region-config-resolver@3.972.3':
|
||||||
resolution: {integrity: sha512-v4J8qYAWfOMcZ4MJUyatntOicTzEMaU7j3OpkRCGGFSL2NgXQ5VbxauIyORA+pxdKZ0qQG2tCQjQjZDlXEC3Ow==}
|
resolution: {integrity: sha512-v4J8qYAWfOMcZ4MJUyatntOicTzEMaU7j3OpkRCGGFSL2NgXQ5VbxauIyORA+pxdKZ0qQG2tCQjQjZDlXEC3Ow==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
@ -715,6 +719,10 @@ packages:
|
||||||
resolution: {integrity: sha512-HR9MBAAEeQRpZAQ96XUalr8PhJG1Kr6JRs7Lk3u9MMN6tXFICxbn9s2rThGIJEPnU0t/edc+5F5tgTtQxsqBuQ==}
|
resolution: {integrity: sha512-HR9MBAAEeQRpZAQ96XUalr8PhJG1Kr6JRs7Lk3u9MMN6tXFICxbn9s2rThGIJEPnU0t/edc+5F5tgTtQxsqBuQ==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
|
'@aws-sdk/token-providers@3.984.0':
|
||||||
|
resolution: {integrity: sha512-UJ/+OzZv+4nAQ1bSspCSb4JlYbMB2Adn8CK7hySpKX5sjhRu1bm6w1PqQq59U67LZEKsPdhl1rzcZ7ybK8YQxw==}
|
||||||
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@aws-sdk/types@3.973.1':
|
'@aws-sdk/types@3.973.1':
|
||||||
resolution: {integrity: sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg==}
|
resolution: {integrity: sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
@ -727,6 +735,10 @@ packages:
|
||||||
resolution: {integrity: sha512-t/VbL2X3gvDEjC4gdySOeFFOZGQEBKwa23pRHeB7hBLBZ119BB/2OEFtTFWKyp3bnMQgxpeVeGS7/hxk6wpKJw==}
|
resolution: {integrity: sha512-t/VbL2X3gvDEjC4gdySOeFFOZGQEBKwa23pRHeB7hBLBZ119BB/2OEFtTFWKyp3bnMQgxpeVeGS7/hxk6wpKJw==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
|
'@aws-sdk/util-endpoints@3.984.0':
|
||||||
|
resolution: {integrity: sha512-9ebjLA0hMKHeVvXEtTDCCOBtwjb0bOXiuUV06HNeVdgAjH6gj4x4Zwt4IBti83TiyTGOCl5YfZqGx4ehVsasbQ==}
|
||||||
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@aws-sdk/util-format-url@3.972.3':
|
'@aws-sdk/util-format-url@3.972.3':
|
||||||
resolution: {integrity: sha512-n7F2ycckcKFXa01vAsT/SJdjFHfKH9s96QHcs5gn8AaaigASICeME8WdUL9uBp8XV/OVwEt8+6gzn6KFUgQa8g==}
|
resolution: {integrity: sha512-n7F2ycckcKFXa01vAsT/SJdjFHfKH9s96QHcs5gn8AaaigASICeME8WdUL9uBp8XV/OVwEt8+6gzn6KFUgQa8g==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
@ -1051,11 +1063,11 @@ packages:
|
||||||
'@eshaz/web-worker@1.2.2':
|
'@eshaz/web-worker@1.2.2':
|
||||||
resolution: {integrity: sha512-WxXiHFmD9u/owrzempiDlBB1ZYqiLnm9s6aPc8AlFQalq2tKmqdmMr9GXOupDgzXtqnBipj8Un0gkIm7Sjf8mw==}
|
resolution: {integrity: sha512-WxXiHFmD9u/owrzempiDlBB1ZYqiLnm9s6aPc8AlFQalq2tKmqdmMr9GXOupDgzXtqnBipj8Un0gkIm7Sjf8mw==}
|
||||||
|
|
||||||
'@google/genai@1.34.0':
|
'@google/genai@1.40.0':
|
||||||
resolution: {integrity: sha512-vu53UMPvjmb7PGzlYu6Tzxso8Dfhn+a7eQFaS2uNemVtDZKwzSpJ5+ikqBbXplF7RGB1STcVDqCkPvquiwb2sw==}
|
resolution: {integrity: sha512-fhIww8smT0QYRX78qWOiz/nIQhHMF5wXOrlXvj33HBrz3vKDBb+wibLcEmTA+L9dmPD4KmfNr7UF3LDQVTXNjA==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
'@modelcontextprotocol/sdk': ^1.24.0
|
'@modelcontextprotocol/sdk': ^1.25.2
|
||||||
peerDependenciesMeta:
|
peerDependenciesMeta:
|
||||||
'@modelcontextprotocol/sdk':
|
'@modelcontextprotocol/sdk':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
@ -1457,22 +1469,22 @@ packages:
|
||||||
resolution: {integrity: sha512-faGUlTcXka5l7rv0lP3K3vGW/ejRuOS24RR2aSFWREUQqzjgdsuWNo/IiPqL3kWRGt6Ahl2+qcDAwtdeWeuGUw==}
|
resolution: {integrity: sha512-faGUlTcXka5l7rv0lP3K3vGW/ejRuOS24RR2aSFWREUQqzjgdsuWNo/IiPqL3kWRGt6Ahl2+qcDAwtdeWeuGUw==}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
'@mariozechner/pi-agent-core@0.52.0':
|
'@mariozechner/pi-agent-core@0.52.2':
|
||||||
resolution: {integrity: sha512-4jmPixmg+nnU3yvUuz9pLeMYtwktTC9SOcfkCGqGWfAyvYOa6fc1KXfL/IGPk1cDG4INautQ0nHxGoIDwAKFww==}
|
resolution: {integrity: sha512-RavOGZUl1hm+0/3ZG5tJqlUjPavidA0ebQoloW1T8DbXPEP7WlWYKGs5qMH5SnSdCF/Hc0tDn6lSqMdGo60Lpg==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@mariozechner/pi-ai@0.52.0':
|
'@mariozechner/pi-ai@0.52.2':
|
||||||
resolution: {integrity: sha512-fNyW5k3Ap3mSg2lmeZBYzMRfyDD+/7gSTSDax3OlME9hsXw72rhIrVpvQoivFNroupU/13BOy73y8rvyTEWQqQ==}
|
resolution: {integrity: sha512-/iyI2CbFiuPB6A5MyakQKy/ez6iTW04CQYXseyaDv4XZszGQa/TYXc4QAW/HxEc8SpuEZhCo8T6ikZBdvTaWwA==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
'@mariozechner/pi-coding-agent@0.52.0':
|
'@mariozechner/pi-coding-agent@0.52.2':
|
||||||
resolution: {integrity: sha512-skUR/LYK0kupD8sTn0PCr/YnvGaBEpqSZgZxQ/gEjSzzRXa7Ywoxrr6y3Jvzk68Nv1JenKAyeR1GAI/3QPDKlA==}
|
resolution: {integrity: sha512-/qJxSmfi488jJLKQkGS9qO2VC21LC7mpms6F3JNMkHS0wdUoq1JFLGTA9OlZT/9WJHz1aLzXeCLAcZvFFcJGfA==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
'@mariozechner/pi-tui@0.52.0':
|
'@mariozechner/pi-tui@0.52.2':
|
||||||
resolution: {integrity: sha512-SOWBWI+7SX/CgfmuyO1o+S1nhS5I1QmWrCXxd+2lvhqAvqBiVTmSt3W8RagdAH4G6D4WOcR0FFjqLFezlKV79w==}
|
resolution: {integrity: sha512-ASNy0dU1cDWXNx4lHvyjOXdoUzrEbuSdTQwkvchiNMbau2nGogdzRXdnYuiJjJKMDqCFtkOPhEUXStpUoOzJZg==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@matrix-org/matrix-sdk-crypto-nodejs@0.4.0':
|
'@matrix-org/matrix-sdk-crypto-nodejs@0.4.0':
|
||||||
|
|
@ -2717,8 +2729,8 @@ packages:
|
||||||
'@types/node@10.17.60':
|
'@types/node@10.17.60':
|
||||||
resolution: {integrity: sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==}
|
resolution: {integrity: sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==}
|
||||||
|
|
||||||
'@types/node@20.19.31':
|
'@types/node@20.19.32':
|
||||||
resolution: {integrity: sha512-5jsi0wpncvTD33Sh1UCgacK37FFwDn+EG7wCmEvs62fCvBL+n8/76cAYDok21NF6+jaVWIqKwCZyX7Vbu8eB3A==}
|
resolution: {integrity: sha512-Ez8QE4DMfhjjTsES9K2dwfV258qBui7qxUsoaixZDiTzbde4U12e1pXGNu/ECsUIOi5/zoCxAQxIhQnaUQ2VvA==}
|
||||||
|
|
||||||
'@types/node@24.10.10':
|
'@types/node@24.10.10':
|
||||||
resolution: {integrity: sha512-+0/4J266CBGPUq/ELg7QUHhN25WYjE0wYTPSQJn1xeu8DOlIOPxXxrNGiLmfAWl7HMMgWFWXpt9IDjMWrF5Iow==}
|
resolution: {integrity: sha512-+0/4J266CBGPUq/ELg7QUHhN25WYjE0wYTPSQJn1xeu8DOlIOPxXxrNGiLmfAWl7HMMgWFWXpt9IDjMWrF5Iow==}
|
||||||
|
|
@ -5535,7 +5547,7 @@ snapshots:
|
||||||
dependencies:
|
dependencies:
|
||||||
zod: 4.3.6
|
zod: 4.3.6
|
||||||
|
|
||||||
'@anthropic-ai/sdk@0.71.2(zod@4.3.6)':
|
'@anthropic-ai/sdk@0.73.0(zod@4.3.6)':
|
||||||
dependencies:
|
dependencies:
|
||||||
json-schema-to-ts: 3.1.1
|
json-schema-to-ts: 3.1.1
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
|
|
@ -5573,23 +5585,23 @@ snapshots:
|
||||||
'@smithy/util-utf8': 2.3.0
|
'@smithy/util-utf8': 2.3.0
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
|
|
||||||
'@aws-sdk/client-bedrock-runtime@3.983.0':
|
'@aws-sdk/client-bedrock-runtime@3.984.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@aws-crypto/sha256-browser': 5.2.0
|
'@aws-crypto/sha256-browser': 5.2.0
|
||||||
'@aws-crypto/sha256-js': 5.2.0
|
'@aws-crypto/sha256-js': 5.2.0
|
||||||
'@aws-sdk/core': 3.973.6
|
'@aws-sdk/core': 3.973.6
|
||||||
'@aws-sdk/credential-provider-node': 3.972.5
|
'@aws-sdk/credential-provider-node': 3.972.5
|
||||||
'@aws-sdk/eventstream-handler-node': 3.972.4
|
'@aws-sdk/eventstream-handler-node': 3.972.5
|
||||||
'@aws-sdk/middleware-eventstream': 3.972.3
|
'@aws-sdk/middleware-eventstream': 3.972.3
|
||||||
'@aws-sdk/middleware-host-header': 3.972.3
|
'@aws-sdk/middleware-host-header': 3.972.3
|
||||||
'@aws-sdk/middleware-logger': 3.972.3
|
'@aws-sdk/middleware-logger': 3.972.3
|
||||||
'@aws-sdk/middleware-recursion-detection': 3.972.3
|
'@aws-sdk/middleware-recursion-detection': 3.972.3
|
||||||
'@aws-sdk/middleware-user-agent': 3.972.6
|
'@aws-sdk/middleware-user-agent': 3.972.6
|
||||||
'@aws-sdk/middleware-websocket': 3.972.4
|
'@aws-sdk/middleware-websocket': 3.972.5
|
||||||
'@aws-sdk/region-config-resolver': 3.972.3
|
'@aws-sdk/region-config-resolver': 3.972.3
|
||||||
'@aws-sdk/token-providers': 3.983.0
|
'@aws-sdk/token-providers': 3.984.0
|
||||||
'@aws-sdk/types': 3.973.1
|
'@aws-sdk/types': 3.973.1
|
||||||
'@aws-sdk/util-endpoints': 3.983.0
|
'@aws-sdk/util-endpoints': 3.984.0
|
||||||
'@aws-sdk/util-user-agent-browser': 3.972.3
|
'@aws-sdk/util-user-agent-browser': 3.972.3
|
||||||
'@aws-sdk/util-user-agent-node': 3.972.4
|
'@aws-sdk/util-user-agent-node': 3.972.4
|
||||||
'@smithy/config-resolver': 4.4.6
|
'@smithy/config-resolver': 4.4.6
|
||||||
|
|
@ -5833,7 +5845,7 @@ snapshots:
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- aws-crt
|
- aws-crt
|
||||||
|
|
||||||
'@aws-sdk/eventstream-handler-node@3.972.4':
|
'@aws-sdk/eventstream-handler-node@3.972.5':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@aws-sdk/types': 3.973.1
|
'@aws-sdk/types': 3.973.1
|
||||||
'@smithy/eventstream-codec': 4.2.8
|
'@smithy/eventstream-codec': 4.2.8
|
||||||
|
|
@ -5878,7 +5890,7 @@ snapshots:
|
||||||
'@smithy/types': 4.12.0
|
'@smithy/types': 4.12.0
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
|
|
||||||
'@aws-sdk/middleware-websocket@3.972.4':
|
'@aws-sdk/middleware-websocket@3.972.5':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@aws-sdk/types': 3.973.1
|
'@aws-sdk/types': 3.973.1
|
||||||
'@aws-sdk/util-format-url': 3.972.3
|
'@aws-sdk/util-format-url': 3.972.3
|
||||||
|
|
@ -5888,7 +5900,9 @@ snapshots:
|
||||||
'@smithy/protocol-http': 5.3.8
|
'@smithy/protocol-http': 5.3.8
|
||||||
'@smithy/signature-v4': 5.3.8
|
'@smithy/signature-v4': 5.3.8
|
||||||
'@smithy/types': 4.12.0
|
'@smithy/types': 4.12.0
|
||||||
|
'@smithy/util-base64': 4.3.0
|
||||||
'@smithy/util-hex-encoding': 4.2.0
|
'@smithy/util-hex-encoding': 4.2.0
|
||||||
|
'@smithy/util-utf8': 4.2.0
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
|
|
||||||
'@aws-sdk/nested-clients@3.982.0':
|
'@aws-sdk/nested-clients@3.982.0':
|
||||||
|
|
@ -5977,6 +5991,49 @@ snapshots:
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- aws-crt
|
- aws-crt
|
||||||
|
|
||||||
|
'@aws-sdk/nested-clients@3.984.0':
|
||||||
|
dependencies:
|
||||||
|
'@aws-crypto/sha256-browser': 5.2.0
|
||||||
|
'@aws-crypto/sha256-js': 5.2.0
|
||||||
|
'@aws-sdk/core': 3.973.6
|
||||||
|
'@aws-sdk/middleware-host-header': 3.972.3
|
||||||
|
'@aws-sdk/middleware-logger': 3.972.3
|
||||||
|
'@aws-sdk/middleware-recursion-detection': 3.972.3
|
||||||
|
'@aws-sdk/middleware-user-agent': 3.972.6
|
||||||
|
'@aws-sdk/region-config-resolver': 3.972.3
|
||||||
|
'@aws-sdk/types': 3.973.1
|
||||||
|
'@aws-sdk/util-endpoints': 3.984.0
|
||||||
|
'@aws-sdk/util-user-agent-browser': 3.972.3
|
||||||
|
'@aws-sdk/util-user-agent-node': 3.972.4
|
||||||
|
'@smithy/config-resolver': 4.4.6
|
||||||
|
'@smithy/core': 3.22.1
|
||||||
|
'@smithy/fetch-http-handler': 5.3.9
|
||||||
|
'@smithy/hash-node': 4.2.8
|
||||||
|
'@smithy/invalid-dependency': 4.2.8
|
||||||
|
'@smithy/middleware-content-length': 4.2.8
|
||||||
|
'@smithy/middleware-endpoint': 4.4.13
|
||||||
|
'@smithy/middleware-retry': 4.4.30
|
||||||
|
'@smithy/middleware-serde': 4.2.9
|
||||||
|
'@smithy/middleware-stack': 4.2.8
|
||||||
|
'@smithy/node-config-provider': 4.3.8
|
||||||
|
'@smithy/node-http-handler': 4.4.9
|
||||||
|
'@smithy/protocol-http': 5.3.8
|
||||||
|
'@smithy/smithy-client': 4.11.2
|
||||||
|
'@smithy/types': 4.12.0
|
||||||
|
'@smithy/url-parser': 4.2.8
|
||||||
|
'@smithy/util-base64': 4.3.0
|
||||||
|
'@smithy/util-body-length-browser': 4.2.0
|
||||||
|
'@smithy/util-body-length-node': 4.2.1
|
||||||
|
'@smithy/util-defaults-mode-browser': 4.3.29
|
||||||
|
'@smithy/util-defaults-mode-node': 4.2.32
|
||||||
|
'@smithy/util-endpoints': 3.2.8
|
||||||
|
'@smithy/util-middleware': 4.2.8
|
||||||
|
'@smithy/util-retry': 4.2.8
|
||||||
|
'@smithy/util-utf8': 4.2.0
|
||||||
|
tslib: 2.8.1
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- aws-crt
|
||||||
|
|
||||||
'@aws-sdk/region-config-resolver@3.972.3':
|
'@aws-sdk/region-config-resolver@3.972.3':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@aws-sdk/types': 3.973.1
|
'@aws-sdk/types': 3.973.1
|
||||||
|
|
@ -6009,6 +6066,18 @@ snapshots:
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- aws-crt
|
- aws-crt
|
||||||
|
|
||||||
|
'@aws-sdk/token-providers@3.984.0':
|
||||||
|
dependencies:
|
||||||
|
'@aws-sdk/core': 3.973.6
|
||||||
|
'@aws-sdk/nested-clients': 3.984.0
|
||||||
|
'@aws-sdk/types': 3.973.1
|
||||||
|
'@smithy/property-provider': 4.2.8
|
||||||
|
'@smithy/shared-ini-file-loader': 4.4.3
|
||||||
|
'@smithy/types': 4.12.0
|
||||||
|
tslib: 2.8.1
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- aws-crt
|
||||||
|
|
||||||
'@aws-sdk/types@3.973.1':
|
'@aws-sdk/types@3.973.1':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@smithy/types': 4.12.0
|
'@smithy/types': 4.12.0
|
||||||
|
|
@ -6030,6 +6099,14 @@ snapshots:
|
||||||
'@smithy/util-endpoints': 3.2.8
|
'@smithy/util-endpoints': 3.2.8
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
|
|
||||||
|
'@aws-sdk/util-endpoints@3.984.0':
|
||||||
|
dependencies:
|
||||||
|
'@aws-sdk/types': 3.973.1
|
||||||
|
'@smithy/types': 4.12.0
|
||||||
|
'@smithy/url-parser': 4.2.8
|
||||||
|
'@smithy/util-endpoints': 3.2.8
|
||||||
|
tslib: 2.8.1
|
||||||
|
|
||||||
'@aws-sdk/util-format-url@3.972.3':
|
'@aws-sdk/util-format-url@3.972.3':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@aws-sdk/types': 3.973.1
|
'@aws-sdk/types': 3.973.1
|
||||||
|
|
@ -6346,9 +6423,10 @@ snapshots:
|
||||||
'@eshaz/web-worker@1.2.2':
|
'@eshaz/web-worker@1.2.2':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@google/genai@1.34.0':
|
'@google/genai@1.40.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
google-auth-library: 10.5.0
|
google-auth-library: 10.5.0
|
||||||
|
protobufjs: 7.5.4
|
||||||
ws: 8.19.0
|
ws: 8.19.0
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- bufferutil
|
- bufferutil
|
||||||
|
|
@ -6584,7 +6662,7 @@ snapshots:
|
||||||
|
|
||||||
'@larksuiteoapi/node-sdk@1.58.0':
|
'@larksuiteoapi/node-sdk@1.58.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
axios: 1.13.4(debug@4.4.3)
|
axios: 1.13.4
|
||||||
lodash.identity: 3.0.0
|
lodash.identity: 3.0.0
|
||||||
lodash.merge: 4.6.2
|
lodash.merge: 4.6.2
|
||||||
lodash.pickby: 4.6.0
|
lodash.pickby: 4.6.0
|
||||||
|
|
@ -6600,7 +6678,7 @@ snapshots:
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/node': 24.10.10
|
'@types/node': 24.10.10
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
axios: 1.13.4(debug@4.4.3)
|
axios: 1.13.4
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- debug
|
- debug
|
||||||
|
|
||||||
|
|
@ -6695,9 +6773,9 @@ snapshots:
|
||||||
std-env: 3.10.0
|
std-env: 3.10.0
|
||||||
yoctocolors: 2.1.2
|
yoctocolors: 2.1.2
|
||||||
|
|
||||||
'@mariozechner/pi-agent-core@0.52.0(ws@8.19.0)(zod@4.3.6)':
|
'@mariozechner/pi-agent-core@0.52.2(ws@8.19.0)(zod@4.3.6)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@mariozechner/pi-ai': 0.52.0(ws@8.19.0)(zod@4.3.6)
|
'@mariozechner/pi-ai': 0.52.2(ws@8.19.0)(zod@4.3.6)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- '@modelcontextprotocol/sdk'
|
- '@modelcontextprotocol/sdk'
|
||||||
- aws-crt
|
- aws-crt
|
||||||
|
|
@ -6707,11 +6785,11 @@ snapshots:
|
||||||
- ws
|
- ws
|
||||||
- zod
|
- zod
|
||||||
|
|
||||||
'@mariozechner/pi-ai@0.52.0(ws@8.19.0)(zod@4.3.6)':
|
'@mariozechner/pi-ai@0.52.2(ws@8.19.0)(zod@4.3.6)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@anthropic-ai/sdk': 0.71.2(zod@4.3.6)
|
'@anthropic-ai/sdk': 0.73.0(zod@4.3.6)
|
||||||
'@aws-sdk/client-bedrock-runtime': 3.983.0
|
'@aws-sdk/client-bedrock-runtime': 3.984.0
|
||||||
'@google/genai': 1.34.0
|
'@google/genai': 1.40.0
|
||||||
'@mistralai/mistralai': 1.10.0
|
'@mistralai/mistralai': 1.10.0
|
||||||
'@sinclair/typebox': 0.34.47
|
'@sinclair/typebox': 0.34.47
|
||||||
ajv: 8.17.1
|
ajv: 8.17.1
|
||||||
|
|
@ -6731,12 +6809,12 @@ snapshots:
|
||||||
- ws
|
- ws
|
||||||
- zod
|
- zod
|
||||||
|
|
||||||
'@mariozechner/pi-coding-agent@0.52.0(ws@8.19.0)(zod@4.3.6)':
|
'@mariozechner/pi-coding-agent@0.52.2(ws@8.19.0)(zod@4.3.6)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@mariozechner/jiti': 2.6.5
|
'@mariozechner/jiti': 2.6.5
|
||||||
'@mariozechner/pi-agent-core': 0.52.0(ws@8.19.0)(zod@4.3.6)
|
'@mariozechner/pi-agent-core': 0.52.2(ws@8.19.0)(zod@4.3.6)
|
||||||
'@mariozechner/pi-ai': 0.52.0(ws@8.19.0)(zod@4.3.6)
|
'@mariozechner/pi-ai': 0.52.2(ws@8.19.0)(zod@4.3.6)
|
||||||
'@mariozechner/pi-tui': 0.52.0
|
'@mariozechner/pi-tui': 0.52.2
|
||||||
'@silvia-odwyer/photon-node': 0.3.4
|
'@silvia-odwyer/photon-node': 0.3.4
|
||||||
chalk: 5.6.2
|
chalk: 5.6.2
|
||||||
cli-highlight: 2.1.11
|
cli-highlight: 2.1.11
|
||||||
|
|
@ -6760,7 +6838,7 @@ snapshots:
|
||||||
- ws
|
- ws
|
||||||
- zod
|
- zod
|
||||||
|
|
||||||
'@mariozechner/pi-tui@0.52.0':
|
'@mariozechner/pi-tui@0.52.2':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/mime-types': 2.1.4
|
'@types/mime-types': 2.1.4
|
||||||
chalk: 5.6.2
|
chalk: 5.6.2
|
||||||
|
|
@ -6803,7 +6881,7 @@ snapshots:
|
||||||
'@azure/core-auth': 1.10.1
|
'@azure/core-auth': 1.10.1
|
||||||
'@azure/msal-node': 3.8.6
|
'@azure/msal-node': 3.8.6
|
||||||
'@microsoft/agents-activity': 1.2.3
|
'@microsoft/agents-activity': 1.2.3
|
||||||
axios: 1.13.4(debug@4.4.3)
|
axios: 1.13.4
|
||||||
jsonwebtoken: 9.0.3
|
jsonwebtoken: 9.0.3
|
||||||
jwks-rsa: 3.2.2
|
jwks-rsa: 3.2.2
|
||||||
object-path: 0.11.8
|
object-path: 0.11.8
|
||||||
|
|
@ -7590,7 +7668,7 @@ snapshots:
|
||||||
'@slack/types': 2.19.0
|
'@slack/types': 2.19.0
|
||||||
'@slack/web-api': 7.13.0
|
'@slack/web-api': 7.13.0
|
||||||
'@types/express': 5.0.6
|
'@types/express': 5.0.6
|
||||||
axios: 1.13.4(debug@4.4.3)
|
axios: 1.13.4
|
||||||
express: 5.2.1
|
express: 5.2.1
|
||||||
path-to-regexp: 8.3.0
|
path-to-regexp: 8.3.0
|
||||||
raw-body: 3.0.2
|
raw-body: 3.0.2
|
||||||
|
|
@ -7636,7 +7714,7 @@ snapshots:
|
||||||
'@slack/types': 2.19.0
|
'@slack/types': 2.19.0
|
||||||
'@types/node': 25.2.0
|
'@types/node': 25.2.0
|
||||||
'@types/retry': 0.12.0
|
'@types/retry': 0.12.0
|
||||||
axios: 1.13.4(debug@4.4.3)
|
axios: 1.13.4
|
||||||
eventemitter3: 5.0.4
|
eventemitter3: 5.0.4
|
||||||
form-data: 2.5.4
|
form-data: 2.5.4
|
||||||
is-electron: 2.2.2
|
is-electron: 2.2.2
|
||||||
|
|
@ -8120,7 +8198,7 @@ snapshots:
|
||||||
|
|
||||||
'@types/node@10.17.60': {}
|
'@types/node@10.17.60': {}
|
||||||
|
|
||||||
'@types/node@20.19.31':
|
'@types/node@20.19.32':
|
||||||
dependencies:
|
dependencies:
|
||||||
undici-types: 6.21.0
|
undici-types: 6.21.0
|
||||||
|
|
||||||
|
|
@ -8448,7 +8526,7 @@ snapshots:
|
||||||
'@swc/helpers': 0.5.18
|
'@swc/helpers': 0.5.18
|
||||||
'@types/command-line-args': 5.2.3
|
'@types/command-line-args': 5.2.3
|
||||||
'@types/command-line-usage': 5.0.4
|
'@types/command-line-usage': 5.0.4
|
||||||
'@types/node': 20.19.31
|
'@types/node': 20.19.32
|
||||||
command-line-args: 5.2.1
|
command-line-args: 5.2.1
|
||||||
command-line-usage: 7.0.3
|
command-line-usage: 7.0.3
|
||||||
flatbuffers: 24.12.23
|
flatbuffers: 24.12.23
|
||||||
|
|
@ -8530,6 +8608,14 @@ snapshots:
|
||||||
|
|
||||||
aws4@1.13.2: {}
|
aws4@1.13.2: {}
|
||||||
|
|
||||||
|
axios@1.13.4:
|
||||||
|
dependencies:
|
||||||
|
follow-redirects: 1.15.11
|
||||||
|
form-data: 2.5.4
|
||||||
|
proxy-from-env: 1.1.0
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- debug
|
||||||
|
|
||||||
axios@1.13.4(debug@4.4.3):
|
axios@1.13.4(debug@4.4.3):
|
||||||
dependencies:
|
dependencies:
|
||||||
follow-redirects: 1.15.11(debug@4.4.3)
|
follow-redirects: 1.15.11(debug@4.4.3)
|
||||||
|
|
@ -9105,6 +9191,8 @@ snapshots:
|
||||||
|
|
||||||
flatbuffers@24.12.23: {}
|
flatbuffers@24.12.23: {}
|
||||||
|
|
||||||
|
follow-redirects@1.15.11: {}
|
||||||
|
|
||||||
follow-redirects@1.15.11(debug@4.4.3):
|
follow-redirects@1.15.11(debug@4.4.3):
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
debug: 4.4.3
|
debug: 4.4.3
|
||||||
|
|
|
||||||
|
|
@ -106,7 +106,7 @@ async function main(): Promise<void> {
|
||||||
contextWindow: 200000,
|
contextWindow: 200000,
|
||||||
maxTokens: 8192,
|
maxTokens: 8192,
|
||||||
};
|
};
|
||||||
const opusModel = getModel("anthropic", "claude-opus-4-5");
|
const opusModel = getModel("anthropic", "claude-opus-4-6");
|
||||||
|
|
||||||
console.log(`Prompt: ${prompt}`);
|
console.log(`Prompt: ${prompt}`);
|
||||||
console.log(`Runs: ${runs}`);
|
console.log(`Runs: ${runs}`);
|
||||||
|
|
|
||||||
|
|
@ -400,9 +400,13 @@ run_profile() {
|
||||||
"openai/gpt-4.1-mini")"
|
"openai/gpt-4.1-mini")"
|
||||||
else
|
else
|
||||||
agent_model="$(set_agent_model "$profile" \
|
agent_model="$(set_agent_model "$profile" \
|
||||||
|
"anthropic/claude-opus-4-6" \
|
||||||
|
"claude-opus-4-6" \
|
||||||
"anthropic/claude-opus-4-5" \
|
"anthropic/claude-opus-4-5" \
|
||||||
"claude-opus-4-5")"
|
"claude-opus-4-5")"
|
||||||
image_model="$(set_image_model "$profile" \
|
image_model="$(set_image_model "$profile" \
|
||||||
|
"anthropic/claude-opus-4-6" \
|
||||||
|
"claude-opus-4-6" \
|
||||||
"anthropic/claude-opus-4-5" \
|
"anthropic/claude-opus-4-5" \
|
||||||
"claude-opus-4-5")"
|
"claude-opus-4-5")"
|
||||||
fi
|
fi
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ import (
|
||||||
const (
|
const (
|
||||||
workflowVersion = 15
|
workflowVersion = 15
|
||||||
providerName = "pi"
|
providerName = "pi"
|
||||||
modelVersion = "claude-opus-4-5"
|
modelVersion = "claude-opus-4-6"
|
||||||
)
|
)
|
||||||
|
|
||||||
func cacheNamespace() string {
|
func cacheNamespace() string {
|
||||||
|
|
|
||||||
|
|
@ -85,10 +85,11 @@ async function main() {
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
model: {
|
model: {
|
||||||
primary: "anthropic/claude-opus-4-5",
|
primary: "anthropic/claude-opus-4-6",
|
||||||
fallbacks: ["zai/glm-4.7"],
|
fallbacks: ["zai/glm-4.7"],
|
||||||
},
|
},
|
||||||
models: {
|
models: {
|
||||||
|
"anthropic/claude-opus-4-6": {},
|
||||||
"anthropic/claude-opus-4-5": {},
|
"anthropic/claude-opus-4-5": {},
|
||||||
"zai/glm-4.7": {},
|
"zai/glm-4.7": {},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -2,5 +2,5 @@
|
||||||
// Model id uses pi-ai's built-in Anthropic catalog.
|
// Model id uses pi-ai's built-in Anthropic catalog.
|
||||||
export const DEFAULT_PROVIDER = "anthropic";
|
export const DEFAULT_PROVIDER = "anthropic";
|
||||||
export const DEFAULT_MODEL = "claude-opus-4-6";
|
export const DEFAULT_MODEL = "claude-opus-4-6";
|
||||||
// Context window: Opus supports ~200k tokens (per pi-ai models.generated.ts for Opus 4.5).
|
// Conservative fallback used when model metadata is unavailable.
|
||||||
export const DEFAULT_CONTEXT_TOKENS = 200_000;
|
export const DEFAULT_CONTEXT_TOKENS = 200_000;
|
||||||
|
|
|
||||||
|
|
@ -140,7 +140,7 @@ describe("getApiKeyForModel", () => {
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
error = err;
|
error = err;
|
||||||
}
|
}
|
||||||
expect(String(error)).toContain("openai-codex/gpt-5.2");
|
expect(String(error)).toContain("openai-codex/gpt-5.3-codex");
|
||||||
} finally {
|
} finally {
|
||||||
if (previousOpenAiKey === undefined) {
|
if (previousOpenAiKey === undefined) {
|
||||||
delete process.env.OPENAI_API_KEY;
|
delete process.env.OPENAI_API_KEY;
|
||||||
|
|
|
||||||
|
|
@ -213,7 +213,7 @@ export async function resolveApiKeyForProvider(params: {
|
||||||
const hasCodex = listProfilesForProvider(store, "openai-codex").length > 0;
|
const hasCodex = listProfilesForProvider(store, "openai-codex").length > 0;
|
||||||
if (hasCodex) {
|
if (hasCodex) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'No API key found for provider "openai". You are authenticated with OpenAI Codex OAuth. Use openai-codex/gpt-5.2 (ChatGPT OAuth) or set OPENAI_API_KEY for openai/gpt-5.2.',
|
'No API key found for provider "openai". You are authenticated with OpenAI Codex OAuth. Use openai-codex/gpt-5.3-codex (OAuth) or set OPENAI_API_KEY to use openai/gpt-5.1-codex.',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -13,9 +13,9 @@ import {
|
||||||
isTimeoutError,
|
isTimeoutError,
|
||||||
} from "./failover-error.js";
|
} from "./failover-error.js";
|
||||||
import {
|
import {
|
||||||
|
buildConfiguredAllowlistKeys,
|
||||||
buildModelAliasIndex,
|
buildModelAliasIndex,
|
||||||
modelKey,
|
modelKey,
|
||||||
parseModelRef,
|
|
||||||
resolveConfiguredModelRef,
|
resolveConfiguredModelRef,
|
||||||
resolveModelRefFromString,
|
resolveModelRefFromString,
|
||||||
} from "./model-selection.js";
|
} from "./model-selection.js";
|
||||||
|
|
@ -51,28 +51,6 @@ function shouldRethrowAbort(err: unknown): boolean {
|
||||||
return isAbortError(err) && !isTimeoutError(err);
|
return isAbortError(err) && !isTimeoutError(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildAllowedModelKeys(
|
|
||||||
cfg: OpenClawConfig | undefined,
|
|
||||||
defaultProvider: string,
|
|
||||||
): Set<string> | null {
|
|
||||||
const rawAllowlist = (() => {
|
|
||||||
const modelMap = cfg?.agents?.defaults?.models ?? {};
|
|
||||||
return Object.keys(modelMap);
|
|
||||||
})();
|
|
||||||
if (rawAllowlist.length === 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
const keys = new Set<string>();
|
|
||||||
for (const raw of rawAllowlist) {
|
|
||||||
const parsed = parseModelRef(String(raw ?? ""), defaultProvider);
|
|
||||||
if (!parsed) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
keys.add(modelKey(parsed.provider, parsed.model));
|
|
||||||
}
|
|
||||||
return keys.size > 0 ? keys : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveImageFallbackCandidates(params: {
|
function resolveImageFallbackCandidates(params: {
|
||||||
cfg: OpenClawConfig | undefined;
|
cfg: OpenClawConfig | undefined;
|
||||||
defaultProvider: string;
|
defaultProvider: string;
|
||||||
|
|
@ -82,7 +60,10 @@ function resolveImageFallbackCandidates(params: {
|
||||||
cfg: params.cfg ?? {},
|
cfg: params.cfg ?? {},
|
||||||
defaultProvider: params.defaultProvider,
|
defaultProvider: params.defaultProvider,
|
||||||
});
|
});
|
||||||
const allowlist = buildAllowedModelKeys(params.cfg, params.defaultProvider);
|
const allowlist = buildConfiguredAllowlistKeys({
|
||||||
|
cfg: params.cfg,
|
||||||
|
defaultProvider: params.defaultProvider,
|
||||||
|
});
|
||||||
const seen = new Set<string>();
|
const seen = new Set<string>();
|
||||||
const candidates: ModelCandidate[] = [];
|
const candidates: ModelCandidate[] = [];
|
||||||
|
|
||||||
|
|
@ -166,7 +147,10 @@ function resolveFallbackCandidates(params: {
|
||||||
cfg: params.cfg ?? {},
|
cfg: params.cfg ?? {},
|
||||||
defaultProvider,
|
defaultProvider,
|
||||||
});
|
});
|
||||||
const allowlist = buildAllowedModelKeys(params.cfg, defaultProvider);
|
const allowlist = buildConfiguredAllowlistKeys({
|
||||||
|
cfg: params.cfg,
|
||||||
|
defaultProvider,
|
||||||
|
});
|
||||||
const seen = new Set<string>();
|
const seen = new Set<string>();
|
||||||
const candidates: ModelCandidate[] = [];
|
const candidates: ModelCandidate[] = [];
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -29,6 +29,17 @@ describe("model-selection", () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("normalizes anthropic alias refs to canonical model ids", () => {
|
||||||
|
expect(parseModelRef("anthropic/opus-4.6", "openai")).toEqual({
|
||||||
|
provider: "anthropic",
|
||||||
|
model: "claude-opus-4-6",
|
||||||
|
});
|
||||||
|
expect(parseModelRef("opus-4.6", "anthropic")).toEqual({
|
||||||
|
provider: "anthropic",
|
||||||
|
model: "claude-opus-4-6",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it("should use default provider if none specified", () => {
|
it("should use default provider if none specified", () => {
|
||||||
expect(parseModelRef("claude-3-5-sonnet", "anthropic")).toEqual({
|
expect(parseModelRef("claude-3-5-sonnet", "anthropic")).toEqual({
|
||||||
provider: "anthropic",
|
provider: "anthropic",
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,12 @@ export type ModelAliasIndex = {
|
||||||
byKey: Map<string, string[]>;
|
byKey: Map<string, string[]>;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const ANTHROPIC_MODEL_ALIASES: Record<string, string> = {
|
||||||
|
"opus-4.6": "claude-opus-4-6",
|
||||||
|
"opus-4.5": "claude-opus-4-5",
|
||||||
|
"sonnet-4.5": "claude-sonnet-4-5",
|
||||||
|
};
|
||||||
|
|
||||||
function normalizeAliasKey(value: string): string {
|
function normalizeAliasKey(value: string): string {
|
||||||
return value.trim().toLowerCase();
|
return value.trim().toLowerCase();
|
||||||
}
|
}
|
||||||
|
|
@ -59,19 +65,7 @@ function normalizeAnthropicModelId(model: string): string {
|
||||||
return trimmed;
|
return trimmed;
|
||||||
}
|
}
|
||||||
const lower = trimmed.toLowerCase();
|
const lower = trimmed.toLowerCase();
|
||||||
if (lower === "opus-4.6") {
|
return ANTHROPIC_MODEL_ALIASES[lower] ?? trimmed;
|
||||||
return "claude-opus-4-6";
|
|
||||||
}
|
|
||||||
if (lower === "opus-4.5") {
|
|
||||||
return "claude-opus-4-5";
|
|
||||||
}
|
|
||||||
if (lower === "opus-4.6") {
|
|
||||||
return "claude-opus-4-6";
|
|
||||||
}
|
|
||||||
if (lower === "sonnet-4.5") {
|
|
||||||
return "claude-sonnet-4-5";
|
|
||||||
}
|
|
||||||
return trimmed;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function normalizeProviderModelId(provider: string, model: string): string {
|
function normalizeProviderModelId(provider: string, model: string): string {
|
||||||
|
|
@ -105,6 +99,33 @@ export function parseModelRef(raw: string, defaultProvider: string): ModelRef |
|
||||||
return { provider, model: normalizedModel };
|
return { provider, model: normalizedModel };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function resolveAllowlistModelKey(raw: string, defaultProvider: string): string | null {
|
||||||
|
const parsed = parseModelRef(raw, defaultProvider);
|
||||||
|
if (!parsed) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return modelKey(parsed.provider, parsed.model);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildConfiguredAllowlistKeys(params: {
|
||||||
|
cfg: OpenClawConfig | undefined;
|
||||||
|
defaultProvider: string;
|
||||||
|
}): Set<string> | null {
|
||||||
|
const rawAllowlist = Object.keys(params.cfg?.agents?.defaults?.models ?? {});
|
||||||
|
if (rawAllowlist.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keys = new Set<string>();
|
||||||
|
for (const raw of rawAllowlist) {
|
||||||
|
const key = resolveAllowlistModelKey(String(raw ?? ""), params.defaultProvider);
|
||||||
|
if (key) {
|
||||||
|
keys.add(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return keys.size > 0 ? keys : null;
|
||||||
|
}
|
||||||
|
|
||||||
export function buildModelAliasIndex(params: {
|
export function buildModelAliasIndex(params: {
|
||||||
cfg: OpenClawConfig;
|
cfg: OpenClawConfig;
|
||||||
defaultProvider: string;
|
defaultProvider: string;
|
||||||
|
|
|
||||||
|
|
@ -8,12 +8,12 @@ import {
|
||||||
|
|
||||||
describe("resolveOpencodeZenAlias", () => {
|
describe("resolveOpencodeZenAlias", () => {
|
||||||
it("resolves opus alias", () => {
|
it("resolves opus alias", () => {
|
||||||
expect(resolveOpencodeZenAlias("opus")).toBe("claude-opus-4-5");
|
expect(resolveOpencodeZenAlias("opus")).toBe("claude-opus-4-6");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("keeps legacy aliases working", () => {
|
it("keeps legacy aliases working", () => {
|
||||||
expect(resolveOpencodeZenAlias("sonnet")).toBe("claude-opus-4-5");
|
expect(resolveOpencodeZenAlias("sonnet")).toBe("claude-opus-4-6");
|
||||||
expect(resolveOpencodeZenAlias("haiku")).toBe("claude-opus-4-5");
|
expect(resolveOpencodeZenAlias("haiku")).toBe("claude-opus-4-6");
|
||||||
expect(resolveOpencodeZenAlias("gpt4")).toBe("gpt-5.1");
|
expect(resolveOpencodeZenAlias("gpt4")).toBe("gpt-5.1");
|
||||||
expect(resolveOpencodeZenAlias("o1")).toBe("gpt-5.2");
|
expect(resolveOpencodeZenAlias("o1")).toBe("gpt-5.2");
|
||||||
expect(resolveOpencodeZenAlias("gemini-2.5")).toBe("gemini-3-pro");
|
expect(resolveOpencodeZenAlias("gemini-2.5")).toBe("gemini-3-pro");
|
||||||
|
|
@ -32,14 +32,14 @@ describe("resolveOpencodeZenAlias", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it("is case-insensitive", () => {
|
it("is case-insensitive", () => {
|
||||||
expect(resolveOpencodeZenAlias("OPUS")).toBe("claude-opus-4-5");
|
expect(resolveOpencodeZenAlias("OPUS")).toBe("claude-opus-4-6");
|
||||||
expect(resolveOpencodeZenAlias("Gpt5")).toBe("gpt-5.2");
|
expect(resolveOpencodeZenAlias("Gpt5")).toBe("gpt-5.2");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("resolveOpencodeZenModelApi", () => {
|
describe("resolveOpencodeZenModelApi", () => {
|
||||||
it("maps APIs by model family", () => {
|
it("maps APIs by model family", () => {
|
||||||
expect(resolveOpencodeZenModelApi("claude-opus-4-5")).toBe("anthropic-messages");
|
expect(resolveOpencodeZenModelApi("claude-opus-4-6")).toBe("anthropic-messages");
|
||||||
expect(resolveOpencodeZenModelApi("gemini-3-pro")).toBe("google-generative-ai");
|
expect(resolveOpencodeZenModelApi("gemini-3-pro")).toBe("google-generative-ai");
|
||||||
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-responses");
|
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-responses");
|
||||||
expect(resolveOpencodeZenModelApi("alpha-gd4")).toBe("openai-completions");
|
expect(resolveOpencodeZenModelApi("alpha-gd4")).toBe("openai-completions");
|
||||||
|
|
@ -53,13 +53,14 @@ describe("getOpencodeZenStaticFallbackModels", () => {
|
||||||
it("returns an array of models", () => {
|
it("returns an array of models", () => {
|
||||||
const models = getOpencodeZenStaticFallbackModels();
|
const models = getOpencodeZenStaticFallbackModels();
|
||||||
expect(Array.isArray(models)).toBe(true);
|
expect(Array.isArray(models)).toBe(true);
|
||||||
expect(models.length).toBe(9);
|
expect(models.length).toBe(10);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("includes Claude, GPT, Gemini, and GLM models", () => {
|
it("includes Claude, GPT, Gemini, and GLM models", () => {
|
||||||
const models = getOpencodeZenStaticFallbackModels();
|
const models = getOpencodeZenStaticFallbackModels();
|
||||||
const ids = models.map((m) => m.id);
|
const ids = models.map((m) => m.id);
|
||||||
|
|
||||||
|
expect(ids).toContain("claude-opus-4-6");
|
||||||
expect(ids).toContain("claude-opus-4-5");
|
expect(ids).toContain("claude-opus-4-5");
|
||||||
expect(ids).toContain("gpt-5.2");
|
expect(ids).toContain("gpt-5.2");
|
||||||
expect(ids).toContain("gpt-5.1-codex");
|
expect(ids).toContain("gpt-5.1-codex");
|
||||||
|
|
@ -83,15 +84,16 @@ describe("getOpencodeZenStaticFallbackModels", () => {
|
||||||
|
|
||||||
describe("OPENCODE_ZEN_MODEL_ALIASES", () => {
|
describe("OPENCODE_ZEN_MODEL_ALIASES", () => {
|
||||||
it("has expected aliases", () => {
|
it("has expected aliases", () => {
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES.opus).toBe("claude-opus-4-5");
|
expect(OPENCODE_ZEN_MODEL_ALIASES.opus).toBe("claude-opus-4-6");
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES.codex).toBe("gpt-5.1-codex");
|
expect(OPENCODE_ZEN_MODEL_ALIASES.codex).toBe("gpt-5.1-codex");
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES.gpt5).toBe("gpt-5.2");
|
expect(OPENCODE_ZEN_MODEL_ALIASES.gpt5).toBe("gpt-5.2");
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES.gemini).toBe("gemini-3-pro");
|
expect(OPENCODE_ZEN_MODEL_ALIASES.gemini).toBe("gemini-3-pro");
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES.glm).toBe("glm-4.7");
|
expect(OPENCODE_ZEN_MODEL_ALIASES.glm).toBe("glm-4.7");
|
||||||
|
expect(OPENCODE_ZEN_MODEL_ALIASES["opus-4.5"]).toBe("claude-opus-4-5");
|
||||||
|
|
||||||
// Legacy aliases (kept for backward compatibility).
|
// Legacy aliases (kept for backward compatibility).
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES.sonnet).toBe("claude-opus-4-5");
|
expect(OPENCODE_ZEN_MODEL_ALIASES.sonnet).toBe("claude-opus-4-6");
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES.haiku).toBe("claude-opus-4-5");
|
expect(OPENCODE_ZEN_MODEL_ALIASES.haiku).toBe("claude-opus-4-6");
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES.gpt4).toBe("gpt-5.1");
|
expect(OPENCODE_ZEN_MODEL_ALIASES.gpt4).toBe("gpt-5.1");
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES.o1).toBe("gpt-5.2");
|
expect(OPENCODE_ZEN_MODEL_ALIASES.o1).toBe("gpt-5.2");
|
||||||
expect(OPENCODE_ZEN_MODEL_ALIASES["gemini-2.5"]).toBe("gemini-3-pro");
|
expect(OPENCODE_ZEN_MODEL_ALIASES["gemini-2.5"]).toBe("gemini-3-pro");
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@
|
||||||
import type { ModelApi, ModelDefinitionConfig } from "../config/types.js";
|
import type { ModelApi, ModelDefinitionConfig } from "../config/types.js";
|
||||||
|
|
||||||
export const OPENCODE_ZEN_API_BASE_URL = "https://opencode.ai/zen/v1";
|
export const OPENCODE_ZEN_API_BASE_URL = "https://opencode.ai/zen/v1";
|
||||||
export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-5";
|
export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-6";
|
||||||
export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode/${OPENCODE_ZEN_DEFAULT_MODEL}`;
|
export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode/${OPENCODE_ZEN_DEFAULT_MODEL}`;
|
||||||
|
|
||||||
// Cache for fetched models (1 hour TTL)
|
// Cache for fetched models (1 hour TTL)
|
||||||
|
|
@ -21,19 +21,20 @@ const CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Model aliases for convenient shortcuts.
|
* Model aliases for convenient shortcuts.
|
||||||
* Users can use "opus" instead of "claude-opus-4-5", etc.
|
* Users can use "opus" instead of "claude-opus-4-6", etc.
|
||||||
*/
|
*/
|
||||||
export const OPENCODE_ZEN_MODEL_ALIASES: Record<string, string> = {
|
export const OPENCODE_ZEN_MODEL_ALIASES: Record<string, string> = {
|
||||||
// Claude
|
// Claude
|
||||||
opus: "claude-opus-4-5",
|
opus: "claude-opus-4-6",
|
||||||
|
"opus-4.6": "claude-opus-4-6",
|
||||||
"opus-4.5": "claude-opus-4-5",
|
"opus-4.5": "claude-opus-4-5",
|
||||||
"opus-4": "claude-opus-4-5",
|
"opus-4": "claude-opus-4-6",
|
||||||
|
|
||||||
// Legacy Claude aliases (OpenCode Zen rotates model catalogs; keep old keys working).
|
// Legacy Claude aliases (OpenCode Zen rotates model catalogs; keep old keys working).
|
||||||
sonnet: "claude-opus-4-5",
|
sonnet: "claude-opus-4-6",
|
||||||
"sonnet-4": "claude-opus-4-5",
|
"sonnet-4": "claude-opus-4-6",
|
||||||
haiku: "claude-opus-4-5",
|
haiku: "claude-opus-4-6",
|
||||||
"haiku-3.5": "claude-opus-4-5",
|
"haiku-3.5": "claude-opus-4-6",
|
||||||
|
|
||||||
// GPT-5.x family
|
// GPT-5.x family
|
||||||
gpt5: "gpt-5.2",
|
gpt5: "gpt-5.2",
|
||||||
|
|
@ -119,6 +120,7 @@ const MODEL_COSTS: Record<
|
||||||
cacheRead: 0.107,
|
cacheRead: 0.107,
|
||||||
cacheWrite: 0,
|
cacheWrite: 0,
|
||||||
},
|
},
|
||||||
|
"claude-opus-4-6": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
|
||||||
"claude-opus-4-5": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
|
"claude-opus-4-5": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
|
||||||
"gemini-3-pro": { input: 2, output: 12, cacheRead: 0.2, cacheWrite: 0 },
|
"gemini-3-pro": { input: 2, output: 12, cacheRead: 0.2, cacheWrite: 0 },
|
||||||
"gpt-5.1-codex-mini": {
|
"gpt-5.1-codex-mini": {
|
||||||
|
|
@ -143,6 +145,7 @@ const DEFAULT_COST = { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 };
|
||||||
|
|
||||||
const MODEL_CONTEXT_WINDOWS: Record<string, number> = {
|
const MODEL_CONTEXT_WINDOWS: Record<string, number> = {
|
||||||
"gpt-5.1-codex": 400000,
|
"gpt-5.1-codex": 400000,
|
||||||
|
"claude-opus-4-6": 1000000,
|
||||||
"claude-opus-4-5": 200000,
|
"claude-opus-4-5": 200000,
|
||||||
"gemini-3-pro": 1048576,
|
"gemini-3-pro": 1048576,
|
||||||
"gpt-5.1-codex-mini": 400000,
|
"gpt-5.1-codex-mini": 400000,
|
||||||
|
|
@ -159,6 +162,7 @@ function getDefaultContextWindow(modelId: string): number {
|
||||||
|
|
||||||
const MODEL_MAX_TOKENS: Record<string, number> = {
|
const MODEL_MAX_TOKENS: Record<string, number> = {
|
||||||
"gpt-5.1-codex": 128000,
|
"gpt-5.1-codex": 128000,
|
||||||
|
"claude-opus-4-6": 128000,
|
||||||
"claude-opus-4-5": 64000,
|
"claude-opus-4-5": 64000,
|
||||||
"gemini-3-pro": 65536,
|
"gemini-3-pro": 65536,
|
||||||
"gpt-5.1-codex-mini": 128000,
|
"gpt-5.1-codex-mini": 128000,
|
||||||
|
|
@ -195,6 +199,7 @@ function buildModelDefinition(modelId: string): ModelDefinitionConfig {
|
||||||
*/
|
*/
|
||||||
const MODEL_NAMES: Record<string, string> = {
|
const MODEL_NAMES: Record<string, string> = {
|
||||||
"gpt-5.1-codex": "GPT-5.1 Codex",
|
"gpt-5.1-codex": "GPT-5.1 Codex",
|
||||||
|
"claude-opus-4-6": "Claude Opus 4.6",
|
||||||
"claude-opus-4-5": "Claude Opus 4.5",
|
"claude-opus-4-5": "Claude Opus 4.5",
|
||||||
"gemini-3-pro": "Gemini 3 Pro",
|
"gemini-3-pro": "Gemini 3 Pro",
|
||||||
"gpt-5.1-codex-mini": "GPT-5.1 Codex Mini",
|
"gpt-5.1-codex-mini": "GPT-5.1 Codex Mini",
|
||||||
|
|
@ -222,6 +227,7 @@ function formatModelName(modelId: string): string {
|
||||||
export function getOpencodeZenStaticFallbackModels(): ModelDefinitionConfig[] {
|
export function getOpencodeZenStaticFallbackModels(): ModelDefinitionConfig[] {
|
||||||
const modelIds = [
|
const modelIds = [
|
||||||
"gpt-5.1-codex",
|
"gpt-5.1-codex",
|
||||||
|
"claude-opus-4-6",
|
||||||
"claude-opus-4-5",
|
"claude-opus-4-5",
|
||||||
"gemini-3-pro",
|
"gemini-3-pro",
|
||||||
"gpt-5.1-codex-mini",
|
"gpt-5.1-codex-mini",
|
||||||
|
|
|
||||||
|
|
@ -53,7 +53,7 @@ describe("image tool implicit imageModel config", () => {
|
||||||
};
|
};
|
||||||
expect(resolveImageModelConfigForTool({ cfg, agentDir })).toEqual({
|
expect(resolveImageModelConfigForTool({ cfg, agentDir })).toEqual({
|
||||||
primary: "minimax/MiniMax-VL-01",
|
primary: "minimax/MiniMax-VL-01",
|
||||||
fallbacks: ["openai/gpt-5-mini", "anthropic/claude-opus-4-6"],
|
fallbacks: ["openai/gpt-5-mini", "anthropic/claude-opus-4-5"],
|
||||||
});
|
});
|
||||||
expect(createImageTool({ config: cfg, agentDir })).not.toBeNull();
|
expect(createImageTool({ config: cfg, agentDir })).not.toBeNull();
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,8 @@ import {
|
||||||
} from "./image-tool.helpers.js";
|
} from "./image-tool.helpers.js";
|
||||||
|
|
||||||
const DEFAULT_PROMPT = "Describe the image.";
|
const DEFAULT_PROMPT = "Describe the image.";
|
||||||
|
const ANTHROPIC_IMAGE_PRIMARY = "anthropic/claude-opus-4-6";
|
||||||
|
const ANTHROPIC_IMAGE_FALLBACK = "anthropic/claude-opus-4-5";
|
||||||
|
|
||||||
export const __testing = {
|
export const __testing = {
|
||||||
decodeDataUrl,
|
decodeDataUrl,
|
||||||
|
|
@ -117,7 +119,7 @@ export function resolveImageModelConfigForTool(params: {
|
||||||
} else if (primary.provider === "openai" && openaiOk) {
|
} else if (primary.provider === "openai" && openaiOk) {
|
||||||
preferred = "openai/gpt-5-mini";
|
preferred = "openai/gpt-5-mini";
|
||||||
} else if (primary.provider === "anthropic" && anthropicOk) {
|
} else if (primary.provider === "anthropic" && anthropicOk) {
|
||||||
preferred = "anthropic/claude-opus-4-6";
|
preferred = ANTHROPIC_IMAGE_PRIMARY;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (preferred?.trim()) {
|
if (preferred?.trim()) {
|
||||||
|
|
@ -125,7 +127,7 @@ export function resolveImageModelConfigForTool(params: {
|
||||||
addFallback("openai/gpt-5-mini");
|
addFallback("openai/gpt-5-mini");
|
||||||
}
|
}
|
||||||
if (anthropicOk) {
|
if (anthropicOk) {
|
||||||
addFallback("anthropic/claude-opus-4-6");
|
addFallback(ANTHROPIC_IMAGE_FALLBACK);
|
||||||
}
|
}
|
||||||
// Don't duplicate primary in fallbacks.
|
// Don't duplicate primary in fallbacks.
|
||||||
const pruned = fallbacks.filter((ref) => ref !== preferred);
|
const pruned = fallbacks.filter((ref) => ref !== preferred);
|
||||||
|
|
@ -138,7 +140,7 @@ export function resolveImageModelConfigForTool(params: {
|
||||||
// Cross-provider fallback when we can't pair with the primary provider.
|
// Cross-provider fallback when we can't pair with the primary provider.
|
||||||
if (openaiOk) {
|
if (openaiOk) {
|
||||||
if (anthropicOk) {
|
if (anthropicOk) {
|
||||||
addFallback("anthropic/claude-opus-4-6");
|
addFallback(ANTHROPIC_IMAGE_FALLBACK);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
primary: "openai/gpt-5-mini",
|
primary: "openai/gpt-5-mini",
|
||||||
|
|
@ -146,7 +148,10 @@ export function resolveImageModelConfigForTool(params: {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if (anthropicOk) {
|
if (anthropicOk) {
|
||||||
return { primary: "anthropic/claude-opus-4-6" };
|
return {
|
||||||
|
primary: ANTHROPIC_IMAGE_PRIMARY,
|
||||||
|
fallbacks: [ANTHROPIC_IMAGE_FALLBACK],
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
|
|
|
||||||
|
|
@ -154,7 +154,7 @@ describe("directive behavior", () => {
|
||||||
|
|
||||||
const texts = (Array.isArray(res) ? res : [res]).map((entry) => entry?.text).filter(Boolean);
|
const texts = (Array.isArray(res) ? res : [res]).map((entry) => entry?.text).filter(Boolean);
|
||||||
expect(texts).toContain(
|
expect(texts).toContain(
|
||||||
'Thinking level "xhigh" is only supported for openai/gpt-5.2, openai-codex/gpt-5.2-codex or openai-codex/gpt-5.1-codex.',
|
'Thinking level "xhigh" is only supported for openai/gpt-5.2, openai-codex/gpt-5.3-codex, openai-codex/gpt-5.2-codex or openai-codex/gpt-5.1-codex.',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export type ResponsePrefixContext = {
|
export type ResponsePrefixContext = {
|
||||||
/** Short model name (e.g., "gpt-5.2", "claude-opus-4-5") */
|
/** Short model name (e.g., "gpt-5.2", "claude-opus-4-6") */
|
||||||
model?: string;
|
model?: string;
|
||||||
/** Full model ID including provider (e.g., "openai-codex/gpt-5.2") */
|
/** Full model ID including provider (e.g., "openai-codex/gpt-5.2") */
|
||||||
modelFull?: string;
|
modelFull?: string;
|
||||||
|
|
@ -71,12 +71,12 @@ export function resolveResponsePrefixTemplate(
|
||||||
*
|
*
|
||||||
* Strips:
|
* Strips:
|
||||||
* - Provider prefix (e.g., "openai/" from "openai/gpt-5.2")
|
* - Provider prefix (e.g., "openai/" from "openai/gpt-5.2")
|
||||||
* - Date suffixes (e.g., "-20251101" from "claude-opus-4-5-20251101")
|
* - Date suffixes (e.g., "-20260205" from "claude-opus-4-6-20260205")
|
||||||
* - Common version suffixes (e.g., "-latest")
|
* - Common version suffixes (e.g., "-latest")
|
||||||
*
|
*
|
||||||
* @example
|
* @example
|
||||||
* extractShortModelName("openai-codex/gpt-5.2") // "gpt-5.2"
|
* extractShortModelName("openai-codex/gpt-5.2") // "gpt-5.2"
|
||||||
* extractShortModelName("claude-opus-4-5-20251101") // "claude-opus-4-5"
|
* extractShortModelName("claude-opus-4-6-20260205") // "claude-opus-4-6"
|
||||||
* extractShortModelName("gpt-5.2-latest") // "gpt-5.2"
|
* extractShortModelName("gpt-5.2-latest") // "gpt-5.2"
|
||||||
*/
|
*/
|
||||||
export function extractShortModelName(fullModel: string): string {
|
export function extractShortModelName(fullModel: string): string {
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ describe("normalizeThinkLevel", () => {
|
||||||
describe("listThinkingLevels", () => {
|
describe("listThinkingLevels", () => {
|
||||||
it("includes xhigh for codex models", () => {
|
it("includes xhigh for codex models", () => {
|
||||||
expect(listThinkingLevels(undefined, "gpt-5.2-codex")).toContain("xhigh");
|
expect(listThinkingLevels(undefined, "gpt-5.2-codex")).toContain("xhigh");
|
||||||
|
expect(listThinkingLevels(undefined, "gpt-5.3-codex")).toContain("xhigh");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("includes xhigh for openai gpt-5.2", () => {
|
it("includes xhigh for openai gpt-5.2", () => {
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ export function isBinaryThinkingProvider(provider?: string | null): boolean {
|
||||||
|
|
||||||
export const XHIGH_MODEL_REFS = [
|
export const XHIGH_MODEL_REFS = [
|
||||||
"openai/gpt-5.2",
|
"openai/gpt-5.2",
|
||||||
|
"openai-codex/gpt-5.3-codex",
|
||||||
"openai-codex/gpt-5.2-codex",
|
"openai-codex/gpt-5.2-codex",
|
||||||
"openai-codex/gpt-5.1-codex",
|
"openai-codex/gpt-5.1-codex",
|
||||||
] as const;
|
] as const;
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ import {
|
||||||
normalizeApiKeyInput,
|
normalizeApiKeyInput,
|
||||||
validateApiKeyInput,
|
validateApiKeyInput,
|
||||||
} from "./auth-choice.api-key.js";
|
} from "./auth-choice.api-key.js";
|
||||||
|
import { applyDefaultModelChoice } from "./auth-choice.default-model.js";
|
||||||
import { isRemoteEnvironment } from "./oauth-env.js";
|
import { isRemoteEnvironment } from "./oauth-env.js";
|
||||||
import { createVpsAwareOAuthHandlers } from "./oauth-flow.js";
|
import { createVpsAwareOAuthHandlers } from "./oauth-flow.js";
|
||||||
import { applyAuthProfileConfig, writeOAuthCredentials } from "./onboard-auth.js";
|
import { applyAuthProfileConfig, writeOAuthCredentials } from "./onboard-auth.js";
|
||||||
|
|
@ -15,6 +16,11 @@ import {
|
||||||
applyOpenAICodexModelDefault,
|
applyOpenAICodexModelDefault,
|
||||||
OPENAI_CODEX_DEFAULT_MODEL,
|
OPENAI_CODEX_DEFAULT_MODEL,
|
||||||
} from "./openai-codex-model-default.js";
|
} from "./openai-codex-model-default.js";
|
||||||
|
import {
|
||||||
|
applyOpenAIConfig,
|
||||||
|
applyOpenAIProviderConfig,
|
||||||
|
OPENAI_DEFAULT_MODEL,
|
||||||
|
} from "./openai-model-default.js";
|
||||||
|
|
||||||
export async function applyAuthChoiceOpenAI(
|
export async function applyAuthChoiceOpenAI(
|
||||||
params: ApplyAuthChoiceParams,
|
params: ApplyAuthChoiceParams,
|
||||||
|
|
@ -25,6 +31,18 @@ export async function applyAuthChoiceOpenAI(
|
||||||
}
|
}
|
||||||
|
|
||||||
if (authChoice === "openai-api-key") {
|
if (authChoice === "openai-api-key") {
|
||||||
|
let nextConfig = params.config;
|
||||||
|
let agentModelOverride: string | undefined;
|
||||||
|
const noteAgentModel = async (model: string) => {
|
||||||
|
if (!params.agentId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await params.prompter.note(
|
||||||
|
`Default model set to ${model} for agent "${params.agentId}".`,
|
||||||
|
"Model configured",
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
const envKey = resolveEnvApiKey("openai");
|
const envKey = resolveEnvApiKey("openai");
|
||||||
if (envKey) {
|
if (envKey) {
|
||||||
const useExisting = await params.prompter.confirm({
|
const useExisting = await params.prompter.confirm({
|
||||||
|
|
@ -43,7 +61,19 @@ export async function applyAuthChoiceOpenAI(
|
||||||
`Copied OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
|
`Copied OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
|
||||||
"OpenAI API key",
|
"OpenAI API key",
|
||||||
);
|
);
|
||||||
return { config: params.config };
|
const applied = await applyDefaultModelChoice({
|
||||||
|
config: nextConfig,
|
||||||
|
setDefaultModel: params.setDefaultModel,
|
||||||
|
defaultModel: OPENAI_DEFAULT_MODEL,
|
||||||
|
applyDefaultConfig: applyOpenAIConfig,
|
||||||
|
applyProviderConfig: applyOpenAIProviderConfig,
|
||||||
|
noteDefault: OPENAI_DEFAULT_MODEL,
|
||||||
|
noteAgentModel,
|
||||||
|
prompter: params.prompter,
|
||||||
|
});
|
||||||
|
nextConfig = applied.config;
|
||||||
|
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
|
||||||
|
return { config: nextConfig, agentModelOverride };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -67,7 +97,19 @@ export async function applyAuthChoiceOpenAI(
|
||||||
`Saved OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
|
`Saved OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
|
||||||
"OpenAI API key",
|
"OpenAI API key",
|
||||||
);
|
);
|
||||||
return { config: params.config };
|
const applied = await applyDefaultModelChoice({
|
||||||
|
config: nextConfig,
|
||||||
|
setDefaultModel: params.setDefaultModel,
|
||||||
|
defaultModel: OPENAI_DEFAULT_MODEL,
|
||||||
|
applyDefaultConfig: applyOpenAIConfig,
|
||||||
|
applyProviderConfig: applyOpenAIProviderConfig,
|
||||||
|
noteDefault: OPENAI_DEFAULT_MODEL,
|
||||||
|
noteAgentModel,
|
||||||
|
prompter: params.prompter,
|
||||||
|
});
|
||||||
|
nextConfig = applied.config;
|
||||||
|
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
|
||||||
|
return { config: nextConfig, agentModelOverride };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (params.authChoice === "openai-codex") {
|
if (params.authChoice === "openai-codex") {
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,77 @@
|
||||||
|
import { describe, expect, it, vi } from "vitest";
|
||||||
|
import type { OpenClawConfig } from "../config/config.js";
|
||||||
|
import type { WizardPrompter } from "../wizard/prompts.js";
|
||||||
|
import { applyDefaultModelChoice } from "./auth-choice.default-model.js";
|
||||||
|
|
||||||
|
function makePrompter(): WizardPrompter {
|
||||||
|
return {
|
||||||
|
intro: async () => {},
|
||||||
|
outro: async () => {},
|
||||||
|
note: async () => {},
|
||||||
|
select: async () => "",
|
||||||
|
multiselect: async () => [],
|
||||||
|
text: async () => "",
|
||||||
|
confirm: async () => false,
|
||||||
|
progress: () => ({ update: () => {}, stop: () => {} }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("applyDefaultModelChoice", () => {
|
||||||
|
it("ensures allowlist entry exists when returning an agent override", async () => {
|
||||||
|
const defaultModel = "vercel-ai-gateway/anthropic/claude-opus-4.6";
|
||||||
|
const noteAgentModel = vi.fn(async () => {});
|
||||||
|
const applied = await applyDefaultModelChoice({
|
||||||
|
config: {},
|
||||||
|
setDefaultModel: false,
|
||||||
|
defaultModel,
|
||||||
|
// Simulate a provider function that does not explicitly add the entry.
|
||||||
|
applyProviderConfig: (config: OpenClawConfig) => config,
|
||||||
|
applyDefaultConfig: (config: OpenClawConfig) => config,
|
||||||
|
noteAgentModel,
|
||||||
|
prompter: makePrompter(),
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(noteAgentModel).toHaveBeenCalledWith(defaultModel);
|
||||||
|
expect(applied.agentModelOverride).toBe(defaultModel);
|
||||||
|
expect(applied.config.agents?.defaults?.models?.[defaultModel]).toEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("adds canonical allowlist key for anthropic aliases", async () => {
|
||||||
|
const defaultModel = "anthropic/opus-4.6";
|
||||||
|
const applied = await applyDefaultModelChoice({
|
||||||
|
config: {},
|
||||||
|
setDefaultModel: false,
|
||||||
|
defaultModel,
|
||||||
|
applyProviderConfig: (config: OpenClawConfig) => config,
|
||||||
|
applyDefaultConfig: (config: OpenClawConfig) => config,
|
||||||
|
noteAgentModel: async () => {},
|
||||||
|
prompter: makePrompter(),
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(applied.config.agents?.defaults?.models?.[defaultModel]).toEqual({});
|
||||||
|
expect(applied.config.agents?.defaults?.models?.["anthropic/claude-opus-4-6"]).toEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("uses applyDefaultConfig path when setDefaultModel is true", async () => {
|
||||||
|
const defaultModel = "openai/gpt-5.1-codex";
|
||||||
|
const applied = await applyDefaultModelChoice({
|
||||||
|
config: {},
|
||||||
|
setDefaultModel: true,
|
||||||
|
defaultModel,
|
||||||
|
applyProviderConfig: (config: OpenClawConfig) => config,
|
||||||
|
applyDefaultConfig: () => ({
|
||||||
|
agents: {
|
||||||
|
defaults: {
|
||||||
|
model: { primary: defaultModel },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
noteDefault: defaultModel,
|
||||||
|
noteAgentModel: async () => {},
|
||||||
|
prompter: makePrompter(),
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(applied.agentModelOverride).toBeUndefined();
|
||||||
|
expect(applied.config.agents?.defaults?.model).toEqual({ primary: defaultModel });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
import type { OpenClawConfig } from "../config/config.js";
|
import type { OpenClawConfig } from "../config/config.js";
|
||||||
import type { WizardPrompter } from "../wizard/prompts.js";
|
import type { WizardPrompter } from "../wizard/prompts.js";
|
||||||
|
import { ensureModelAllowlistEntry } from "./model-allowlist.js";
|
||||||
|
|
||||||
export async function applyDefaultModelChoice(params: {
|
export async function applyDefaultModelChoice(params: {
|
||||||
config: OpenClawConfig;
|
config: OpenClawConfig;
|
||||||
|
|
@ -20,6 +21,10 @@ export async function applyDefaultModelChoice(params: {
|
||||||
}
|
}
|
||||||
|
|
||||||
const next = params.applyProviderConfig(params.config);
|
const next = params.applyProviderConfig(params.config);
|
||||||
|
const nextWithModel = ensureModelAllowlistEntry({
|
||||||
|
cfg: next,
|
||||||
|
modelRef: params.defaultModel,
|
||||||
|
});
|
||||||
await params.noteAgentModel(params.defaultModel);
|
await params.noteAgentModel(params.defaultModel);
|
||||||
return { config: next, agentModelOverride: params.defaultModel };
|
return { config: nextWithModel, agentModelOverride: params.defaultModel };
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -284,7 +284,7 @@ describe("applyAuthChoice", () => {
|
||||||
);
|
);
|
||||||
expect(result.config.agents?.defaults?.model?.primary).toBe("anthropic/claude-opus-4-5");
|
expect(result.config.agents?.defaults?.model?.primary).toBe("anthropic/claude-opus-4-5");
|
||||||
expect(result.config.models?.providers?.["opencode-zen"]).toBeUndefined();
|
expect(result.config.models?.providers?.["opencode-zen"]).toBeUndefined();
|
||||||
expect(result.agentModelOverride).toBe("opencode/claude-opus-4-5");
|
expect(result.agentModelOverride).toBe("opencode/claude-opus-4-6");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("uses existing OPENROUTER_API_KEY when selecting openrouter-api-key", async () => {
|
it("uses existing OPENROUTER_API_KEY when selecting openrouter-api-key", async () => {
|
||||||
|
|
@ -398,7 +398,7 @@ describe("applyAuthChoice", () => {
|
||||||
mode: "api_key",
|
mode: "api_key",
|
||||||
});
|
});
|
||||||
expect(result.config.agents?.defaults?.model?.primary).toBe(
|
expect(result.config.agents?.defaults?.model?.primary).toBe(
|
||||||
"vercel-ai-gateway/anthropic/claude-opus-4.5",
|
"vercel-ai-gateway/anthropic/claude-opus-4.6",
|
||||||
);
|
);
|
||||||
|
|
||||||
const authProfilePath = authProfilePathFor(requireAgentDir());
|
const authProfilePath = authProfilePathFor(requireAgentDir());
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,41 @@
|
||||||
|
import type { OpenClawConfig } from "../config/config.js";
|
||||||
|
import { DEFAULT_PROVIDER } from "../agents/defaults.js";
|
||||||
|
import { resolveAllowlistModelKey } from "../agents/model-selection.js";
|
||||||
|
|
||||||
|
export function ensureModelAllowlistEntry(params: {
|
||||||
|
cfg: OpenClawConfig;
|
||||||
|
modelRef: string;
|
||||||
|
defaultProvider?: string;
|
||||||
|
}): OpenClawConfig {
|
||||||
|
const rawModelRef = params.modelRef.trim();
|
||||||
|
if (!rawModelRef) {
|
||||||
|
return params.cfg;
|
||||||
|
}
|
||||||
|
|
||||||
|
const models = { ...params.cfg.agents?.defaults?.models };
|
||||||
|
const keySet = new Set<string>([rawModelRef]);
|
||||||
|
const canonicalKey = resolveAllowlistModelKey(
|
||||||
|
rawModelRef,
|
||||||
|
params.defaultProvider ?? DEFAULT_PROVIDER,
|
||||||
|
);
|
||||||
|
if (canonicalKey) {
|
||||||
|
keySet.add(canonicalKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const key of keySet) {
|
||||||
|
models[key] = {
|
||||||
|
...models[key],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...params.cfg,
|
||||||
|
agents: {
|
||||||
|
...params.cfg.agents,
|
||||||
|
defaults: {
|
||||||
|
...params.cfg.agents?.defaults,
|
||||||
|
models,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
@ -12,6 +12,7 @@ import {
|
||||||
resolveConfiguredModelRef,
|
resolveConfiguredModelRef,
|
||||||
} from "../agents/model-selection.js";
|
} from "../agents/model-selection.js";
|
||||||
import { formatTokenK } from "./models/shared.js";
|
import { formatTokenK } from "./models/shared.js";
|
||||||
|
import { OPENAI_CODEX_DEFAULT_MODEL } from "./openai-codex-model-default.js";
|
||||||
|
|
||||||
const KEEP_VALUE = "__keep__";
|
const KEEP_VALUE = "__keep__";
|
||||||
const MANUAL_VALUE = "__manual__";
|
const MANUAL_VALUE = "__manual__";
|
||||||
|
|
@ -331,7 +332,7 @@ export async function promptModelAllowlist(params: {
|
||||||
params.message ??
|
params.message ??
|
||||||
"Allowlist models (comma-separated provider/model; blank to keep current)",
|
"Allowlist models (comma-separated provider/model; blank to keep current)",
|
||||||
initialValue: existingKeys.join(", "),
|
initialValue: existingKeys.join(", "),
|
||||||
placeholder: "openai-codex/gpt-5.2, anthropic/claude-opus-4-6",
|
placeholder: `${OPENAI_CODEX_DEFAULT_MODEL}, anthropic/claude-opus-4-6`,
|
||||||
});
|
});
|
||||||
const parsed = String(raw ?? "")
|
const parsed = String(raw ?? "")
|
||||||
.split(",")
|
.split(",")
|
||||||
|
|
|
||||||
|
|
@ -117,7 +117,7 @@ export async function setVeniceApiKey(key: string, agentDir?: string) {
|
||||||
export const ZAI_DEFAULT_MODEL_REF = "zai/glm-4.7";
|
export const ZAI_DEFAULT_MODEL_REF = "zai/glm-4.7";
|
||||||
export const XIAOMI_DEFAULT_MODEL_REF = "xiaomi/mimo-v2-flash";
|
export const XIAOMI_DEFAULT_MODEL_REF = "xiaomi/mimo-v2-flash";
|
||||||
export const OPENROUTER_DEFAULT_MODEL_REF = "openrouter/auto";
|
export const OPENROUTER_DEFAULT_MODEL_REF = "openrouter/auto";
|
||||||
export const VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF = "vercel-ai-gateway/anthropic/claude-opus-4.5";
|
export const VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF = "vercel-ai-gateway/anthropic/claude-opus-4.6";
|
||||||
|
|
||||||
export async function setZaiApiKey(key: string, agentDir?: string) {
|
export async function setZaiApiKey(key: string, agentDir?: string) {
|
||||||
// Write to resolved agent dir so gateway finds credentials on startup.
|
// Write to resolved agent dir so gateway finds credentials on startup.
|
||||||
|
|
|
||||||
|
|
@ -393,7 +393,7 @@ describe("applyOpencodeZenProviderConfig", () => {
|
||||||
it("adds allowlist entry for the default model", () => {
|
it("adds allowlist entry for the default model", () => {
|
||||||
const cfg = applyOpencodeZenProviderConfig({});
|
const cfg = applyOpencodeZenProviderConfig({});
|
||||||
const models = cfg.agents?.defaults?.models ?? {};
|
const models = cfg.agents?.defaults?.models ?? {};
|
||||||
expect(Object.keys(models)).toContain("opencode/claude-opus-4-5");
|
expect(Object.keys(models)).toContain("opencode/claude-opus-4-6");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("preserves existing alias for the default model", () => {
|
it("preserves existing alias for the default model", () => {
|
||||||
|
|
@ -401,19 +401,19 @@ describe("applyOpencodeZenProviderConfig", () => {
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
models: {
|
models: {
|
||||||
"opencode/claude-opus-4-5": { alias: "My Opus" },
|
"opencode/claude-opus-4-6": { alias: "My Opus" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
expect(cfg.agents?.defaults?.models?.["opencode/claude-opus-4-5"]?.alias).toBe("My Opus");
|
expect(cfg.agents?.defaults?.models?.["opencode/claude-opus-4-6"]?.alias).toBe("My Opus");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("applyOpencodeZenConfig", () => {
|
describe("applyOpencodeZenConfig", () => {
|
||||||
it("sets correct primary model", () => {
|
it("sets correct primary model", () => {
|
||||||
const cfg = applyOpencodeZenConfig({});
|
const cfg = applyOpencodeZenConfig({});
|
||||||
expect(cfg.agents?.defaults?.model?.primary).toBe("opencode/claude-opus-4-5");
|
expect(cfg.agents?.defaults?.model?.primary).toBe("opencode/claude-opus-4-6");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("preserves existing model fallbacks", () => {
|
it("preserves existing model fallbacks", () => {
|
||||||
|
|
|
||||||
|
|
@ -66,7 +66,7 @@ describe("onboard (non-interactive): Vercel AI Gateway", () => {
|
||||||
expect(cfg.auth?.profiles?.["vercel-ai-gateway:default"]?.provider).toBe("vercel-ai-gateway");
|
expect(cfg.auth?.profiles?.["vercel-ai-gateway:default"]?.provider).toBe("vercel-ai-gateway");
|
||||||
expect(cfg.auth?.profiles?.["vercel-ai-gateway:default"]?.mode).toBe("api_key");
|
expect(cfg.auth?.profiles?.["vercel-ai-gateway:default"]?.mode).toBe("api_key");
|
||||||
expect(cfg.agents?.defaults?.model?.primary).toBe(
|
expect(cfg.agents?.defaults?.model?.primary).toBe(
|
||||||
"vercel-ai-gateway/anthropic/claude-opus-4.5",
|
"vercel-ai-gateway/anthropic/claude-opus-4.6",
|
||||||
);
|
);
|
||||||
|
|
||||||
const { ensureAuthProfileStore } = await import("../agents/auth-profiles.js");
|
const { ensureAuthProfileStore } = await import("../agents/auth-profiles.js");
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,77 @@
|
||||||
|
import fs from "node:fs/promises";
|
||||||
|
import os from "node:os";
|
||||||
|
import path from "node:path";
|
||||||
|
import { describe, expect, it, vi } from "vitest";
|
||||||
|
import { OPENAI_DEFAULT_MODEL } from "./openai-model-default.js";
|
||||||
|
|
||||||
|
describe("onboard (non-interactive): OpenAI API key", () => {
|
||||||
|
it("stores OPENAI_API_KEY and configures the OpenAI default model", async () => {
|
||||||
|
const prev = {
|
||||||
|
home: process.env.HOME,
|
||||||
|
stateDir: process.env.OPENCLAW_STATE_DIR,
|
||||||
|
configPath: process.env.OPENCLAW_CONFIG_PATH,
|
||||||
|
skipChannels: process.env.OPENCLAW_SKIP_CHANNELS,
|
||||||
|
skipGmail: process.env.OPENCLAW_SKIP_GMAIL_WATCHER,
|
||||||
|
skipCron: process.env.OPENCLAW_SKIP_CRON,
|
||||||
|
skipCanvas: process.env.OPENCLAW_SKIP_CANVAS_HOST,
|
||||||
|
token: process.env.OPENCLAW_GATEWAY_TOKEN,
|
||||||
|
password: process.env.OPENCLAW_GATEWAY_PASSWORD,
|
||||||
|
};
|
||||||
|
|
||||||
|
process.env.OPENCLAW_SKIP_CHANNELS = "1";
|
||||||
|
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = "1";
|
||||||
|
process.env.OPENCLAW_SKIP_CRON = "1";
|
||||||
|
process.env.OPENCLAW_SKIP_CANVAS_HOST = "1";
|
||||||
|
delete process.env.OPENCLAW_GATEWAY_TOKEN;
|
||||||
|
delete process.env.OPENCLAW_GATEWAY_PASSWORD;
|
||||||
|
|
||||||
|
const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-onboard-openai-"));
|
||||||
|
process.env.HOME = tempHome;
|
||||||
|
process.env.OPENCLAW_STATE_DIR = tempHome;
|
||||||
|
process.env.OPENCLAW_CONFIG_PATH = path.join(tempHome, "openclaw.json");
|
||||||
|
vi.resetModules();
|
||||||
|
|
||||||
|
const runtime = {
|
||||||
|
log: () => {},
|
||||||
|
error: (msg: string) => {
|
||||||
|
throw new Error(msg);
|
||||||
|
},
|
||||||
|
exit: (code: number) => {
|
||||||
|
throw new Error(`exit:${code}`);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { runNonInteractiveOnboarding } = await import("./onboard-non-interactive.js");
|
||||||
|
await runNonInteractiveOnboarding(
|
||||||
|
{
|
||||||
|
nonInteractive: true,
|
||||||
|
authChoice: "openai-api-key",
|
||||||
|
openaiApiKey: "sk-openai-test",
|
||||||
|
skipHealth: true,
|
||||||
|
skipChannels: true,
|
||||||
|
skipSkills: true,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
runtime,
|
||||||
|
);
|
||||||
|
|
||||||
|
const { CONFIG_PATH } = await import("../config/config.js");
|
||||||
|
const cfg = JSON.parse(await fs.readFile(CONFIG_PATH, "utf8")) as {
|
||||||
|
agents?: { defaults?: { model?: { primary?: string } } };
|
||||||
|
};
|
||||||
|
expect(cfg.agents?.defaults?.model?.primary).toBe(OPENAI_DEFAULT_MODEL);
|
||||||
|
} finally {
|
||||||
|
await fs.rm(tempHome, { recursive: true, force: true });
|
||||||
|
process.env.HOME = prev.home;
|
||||||
|
process.env.OPENCLAW_STATE_DIR = prev.stateDir;
|
||||||
|
process.env.OPENCLAW_CONFIG_PATH = prev.configPath;
|
||||||
|
process.env.OPENCLAW_SKIP_CHANNELS = prev.skipChannels;
|
||||||
|
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = prev.skipGmail;
|
||||||
|
process.env.OPENCLAW_SKIP_CRON = prev.skipCron;
|
||||||
|
process.env.OPENCLAW_SKIP_CANVAS_HOST = prev.skipCanvas;
|
||||||
|
process.env.OPENCLAW_GATEWAY_TOKEN = prev.token;
|
||||||
|
process.env.OPENCLAW_GATEWAY_PASSWORD = prev.password;
|
||||||
|
}
|
||||||
|
}, 60_000);
|
||||||
|
});
|
||||||
|
|
@ -37,6 +37,7 @@ import {
|
||||||
setXiaomiApiKey,
|
setXiaomiApiKey,
|
||||||
setZaiApiKey,
|
setZaiApiKey,
|
||||||
} from "../../onboard-auth.js";
|
} from "../../onboard-auth.js";
|
||||||
|
import { applyOpenAIConfig } from "../../openai-model-default.js";
|
||||||
import { resolveNonInteractiveApiKey } from "../api-keys.js";
|
import { resolveNonInteractiveApiKey } from "../api-keys.js";
|
||||||
|
|
||||||
export async function applyNonInteractiveAuthChoice(params: {
|
export async function applyNonInteractiveAuthChoice(params: {
|
||||||
|
|
@ -234,7 +235,7 @@ export async function applyNonInteractiveAuthChoice(params: {
|
||||||
const result = upsertSharedEnvVar({ key: "OPENAI_API_KEY", value: key });
|
const result = upsertSharedEnvVar({ key: "OPENAI_API_KEY", value: key });
|
||||||
process.env.OPENAI_API_KEY = key;
|
process.env.OPENAI_API_KEY = key;
|
||||||
runtime.log(`Saved OPENAI_API_KEY to ${shortenHomePath(result.path)}`);
|
runtime.log(`Saved OPENAI_API_KEY to ${shortenHomePath(result.path)}`);
|
||||||
return nextConfig;
|
return applyOpenAIConfig(nextConfig);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (authChoice === "openrouter-api-key") {
|
if (authChoice === "openrouter-api-key") {
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import {
|
||||||
applyOpenAICodexModelDefault,
|
applyOpenAICodexModelDefault,
|
||||||
OPENAI_CODEX_DEFAULT_MODEL,
|
OPENAI_CODEX_DEFAULT_MODEL,
|
||||||
} from "./openai-codex-model-default.js";
|
} from "./openai-codex-model-default.js";
|
||||||
|
import { OPENAI_DEFAULT_MODEL } from "./openai-model-default.js";
|
||||||
|
|
||||||
describe("applyOpenAICodexModelDefault", () => {
|
describe("applyOpenAICodexModelDefault", () => {
|
||||||
it("sets openai-codex default when model is unset", () => {
|
it("sets openai-codex default when model is unset", () => {
|
||||||
|
|
@ -17,7 +18,7 @@ describe("applyOpenAICodexModelDefault", () => {
|
||||||
|
|
||||||
it("sets openai-codex default when model is openai/*", () => {
|
it("sets openai-codex default when model is openai/*", () => {
|
||||||
const cfg: OpenClawConfig = {
|
const cfg: OpenClawConfig = {
|
||||||
agents: { defaults: { model: "openai/gpt-5.2" } },
|
agents: { defaults: { model: OPENAI_DEFAULT_MODEL } },
|
||||||
};
|
};
|
||||||
const applied = applyOpenAICodexModelDefault(cfg);
|
const applied = applyOpenAICodexModelDefault(cfg);
|
||||||
expect(applied.changed).toBe(true);
|
expect(applied.changed).toBe(true);
|
||||||
|
|
@ -28,7 +29,7 @@ describe("applyOpenAICodexModelDefault", () => {
|
||||||
|
|
||||||
it("does not override openai-codex/*", () => {
|
it("does not override openai-codex/*", () => {
|
||||||
const cfg: OpenClawConfig = {
|
const cfg: OpenClawConfig = {
|
||||||
agents: { defaults: { model: "openai-codex/gpt-5.2" } },
|
agents: { defaults: { model: OPENAI_CODEX_DEFAULT_MODEL } },
|
||||||
};
|
};
|
||||||
const applied = applyOpenAICodexModelDefault(cfg);
|
const applied = applyOpenAICodexModelDefault(cfg);
|
||||||
expect(applied.changed).toBe(false);
|
expect(applied.changed).toBe(false);
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import type { OpenClawConfig } from "../config/config.js";
|
import type { OpenClawConfig } from "../config/config.js";
|
||||||
import type { AgentModelListConfig } from "../config/types.js";
|
import type { AgentModelListConfig } from "../config/types.js";
|
||||||
|
|
||||||
export const OPENAI_CODEX_DEFAULT_MODEL = "openai-codex/gpt-5.2";
|
export const OPENAI_CODEX_DEFAULT_MODEL = "openai-codex/gpt-5.3-codex";
|
||||||
|
|
||||||
function shouldSetOpenAICodexModel(model?: string): boolean {
|
function shouldSetOpenAICodexModel(model?: string): boolean {
|
||||||
const trimmed = model?.trim();
|
const trimmed = model?.trim();
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,40 @@
|
||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
import {
|
||||||
|
applyOpenAIConfig,
|
||||||
|
applyOpenAIProviderConfig,
|
||||||
|
OPENAI_DEFAULT_MODEL,
|
||||||
|
} from "./openai-model-default.js";
|
||||||
|
|
||||||
|
describe("applyOpenAIProviderConfig", () => {
|
||||||
|
it("adds allowlist entry for default model", () => {
|
||||||
|
const next = applyOpenAIProviderConfig({});
|
||||||
|
expect(Object.keys(next.agents?.defaults?.models ?? {})).toContain(OPENAI_DEFAULT_MODEL);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("preserves existing alias for default model", () => {
|
||||||
|
const next = applyOpenAIProviderConfig({
|
||||||
|
agents: {
|
||||||
|
defaults: {
|
||||||
|
models: {
|
||||||
|
[OPENAI_DEFAULT_MODEL]: { alias: "My GPT" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(next.agents?.defaults?.models?.[OPENAI_DEFAULT_MODEL]?.alias).toBe("My GPT");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("applyOpenAIConfig", () => {
|
||||||
|
it("sets default when model is unset", () => {
|
||||||
|
const next = applyOpenAIConfig({});
|
||||||
|
expect(next.agents?.defaults?.model).toEqual({ primary: OPENAI_DEFAULT_MODEL });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("overrides model.primary when model object already exists", () => {
|
||||||
|
const next = applyOpenAIConfig({
|
||||||
|
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-6", fallback: [] } } },
|
||||||
|
});
|
||||||
|
expect(next.agents?.defaults?.model).toEqual({ primary: OPENAI_DEFAULT_MODEL, fallback: [] });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,47 @@
|
||||||
|
import type { OpenClawConfig } from "../config/config.js";
|
||||||
|
import { ensureModelAllowlistEntry } from "./model-allowlist.js";
|
||||||
|
|
||||||
|
export const OPENAI_DEFAULT_MODEL = "openai/gpt-5.1-codex";
|
||||||
|
|
||||||
|
export function applyOpenAIProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
|
||||||
|
const next = ensureModelAllowlistEntry({
|
||||||
|
cfg,
|
||||||
|
modelRef: OPENAI_DEFAULT_MODEL,
|
||||||
|
});
|
||||||
|
const models = { ...next.agents?.defaults?.models };
|
||||||
|
models[OPENAI_DEFAULT_MODEL] = {
|
||||||
|
...models[OPENAI_DEFAULT_MODEL],
|
||||||
|
alias: models[OPENAI_DEFAULT_MODEL]?.alias ?? "GPT",
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...next,
|
||||||
|
agents: {
|
||||||
|
...next.agents,
|
||||||
|
defaults: {
|
||||||
|
...next.agents?.defaults,
|
||||||
|
models,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function applyOpenAIConfig(cfg: OpenClawConfig): OpenClawConfig {
|
||||||
|
const next = applyOpenAIProviderConfig(cfg);
|
||||||
|
return {
|
||||||
|
...next,
|
||||||
|
agents: {
|
||||||
|
...next.agents,
|
||||||
|
defaults: {
|
||||||
|
...next.agents?.defaults,
|
||||||
|
model:
|
||||||
|
next.agents?.defaults?.model && typeof next.agents.defaults.model === "object"
|
||||||
|
? {
|
||||||
|
...next.agents.defaults.model,
|
||||||
|
primary: OPENAI_DEFAULT_MODEL,
|
||||||
|
}
|
||||||
|
: { primary: OPENAI_DEFAULT_MODEL },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
@ -1,8 +1,11 @@
|
||||||
import type { OpenClawConfig } from "../config/config.js";
|
import type { OpenClawConfig } from "../config/config.js";
|
||||||
import type { AgentModelListConfig } from "../config/types.js";
|
import type { AgentModelListConfig } from "../config/types.js";
|
||||||
|
|
||||||
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode/claude-opus-4-5";
|
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode/claude-opus-4-6";
|
||||||
const LEGACY_OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5";
|
const LEGACY_OPENCODE_ZEN_DEFAULT_MODELS = new Set([
|
||||||
|
"opencode/claude-opus-4-5",
|
||||||
|
"opencode-zen/claude-opus-4-5",
|
||||||
|
]);
|
||||||
|
|
||||||
function resolvePrimaryModel(model?: AgentModelListConfig | string): string | undefined {
|
function resolvePrimaryModel(model?: AgentModelListConfig | string): string | undefined {
|
||||||
if (typeof model === "string") {
|
if (typeof model === "string") {
|
||||||
|
|
@ -20,7 +23,9 @@ export function applyOpencodeZenModelDefault(cfg: OpenClawConfig): {
|
||||||
} {
|
} {
|
||||||
const current = resolvePrimaryModel(cfg.agents?.defaults?.model)?.trim();
|
const current = resolvePrimaryModel(cfg.agents?.defaults?.model)?.trim();
|
||||||
const normalizedCurrent =
|
const normalizedCurrent =
|
||||||
current === LEGACY_OPENCODE_ZEN_DEFAULT_MODEL ? OPENCODE_ZEN_DEFAULT_MODEL : current;
|
current && LEGACY_OPENCODE_ZEN_DEFAULT_MODELS.has(current)
|
||||||
|
? OPENCODE_ZEN_DEFAULT_MODEL
|
||||||
|
: current;
|
||||||
if (normalizedCurrent === OPENCODE_ZEN_DEFAULT_MODEL) {
|
if (normalizedCurrent === OPENCODE_ZEN_DEFAULT_MODEL) {
|
||||||
return { next: cfg, changed: false };
|
return { next: cfg, changed: false };
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,7 @@ describe("applyModelDefaults", () => {
|
||||||
agents: {
|
agents: {
|
||||||
defaults: {
|
defaults: {
|
||||||
models: {
|
models: {
|
||||||
"anthropic/claude-opus-4-6": { alias: "Opus" },
|
"anthropic/claude-opus-4-5": { alias: "Opus" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -34,7 +34,7 @@ describe("applyModelDefaults", () => {
|
||||||
|
|
||||||
const next = applyModelDefaults(cfg);
|
const next = applyModelDefaults(cfg);
|
||||||
|
|
||||||
expect(next.agents?.defaults?.models?.["anthropic/claude-opus-4-6"]?.alias).toBe("Opus");
|
expect(next.agents?.defaults?.models?.["anthropic/claude-opus-4-5"]?.alias).toBe("Opus");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("respects explicit empty alias disables", () => {
|
it("respects explicit empty alias disables", () => {
|
||||||
|
|
|
||||||
|
|
@ -59,13 +59,13 @@ export type MessagesConfig = {
|
||||||
* - special value: `"auto"` derives `[{agents.list[].identity.name}]` for the routed agent (when set)
|
* - special value: `"auto"` derives `[{agents.list[].identity.name}]` for the routed agent (when set)
|
||||||
*
|
*
|
||||||
* Supported template variables (case-insensitive):
|
* Supported template variables (case-insensitive):
|
||||||
* - `{model}` - short model name (e.g., `claude-opus-4-5`, `gpt-4o`)
|
* - `{model}` - short model name (e.g., `claude-opus-4-6`, `gpt-4o`)
|
||||||
* - `{modelFull}` - full model identifier (e.g., `anthropic/claude-opus-4-5`)
|
* - `{modelFull}` - full model identifier (e.g., `anthropic/claude-opus-4-6`)
|
||||||
* - `{provider}` - provider name (e.g., `anthropic`, `openai`)
|
* - `{provider}` - provider name (e.g., `anthropic`, `openai`)
|
||||||
* - `{thinkingLevel}` or `{think}` - current thinking level (`high`, `low`, `off`)
|
* - `{thinkingLevel}` or `{think}` - current thinking level (`high`, `low`, `off`)
|
||||||
* - `{identity.name}` or `{identityName}` - agent identity name
|
* - `{identity.name}` or `{identityName}` - agent identity name
|
||||||
*
|
*
|
||||||
* Example: `"[{model} | think:{thinkingLevel}]"` → `"[claude-opus-4-5 | think:high]"`
|
* Example: `"[{model} | think:{thinkingLevel}]"` → `"[claude-opus-4-6 | think:high]"`
|
||||||
*
|
*
|
||||||
* Unresolved variables remain as literal text (e.g., `{model}` if context unavailable).
|
* Unresolved variables remain as literal text (e.g., `{model}` if context unavailable).
|
||||||
*
|
*
|
||||||
|
|
|
||||||
|
|
@ -404,7 +404,7 @@ vi.mock("../config/config.js", async () => {
|
||||||
? (fileAgents.defaults as Record<string, unknown>)
|
? (fileAgents.defaults as Record<string, unknown>)
|
||||||
: {};
|
: {};
|
||||||
const defaults = {
|
const defaults = {
|
||||||
model: { primary: "anthropic/claude-opus-4-5" },
|
model: { primary: "anthropic/claude-opus-4-6" },
|
||||||
workspace: path.join(os.tmpdir(), "openclaw-gateway-test"),
|
workspace: path.join(os.tmpdir(), "openclaw-gateway-test"),
|
||||||
...fileDefaults,
|
...fileDefaults,
|
||||||
...testState.agentConfig,
|
...testState.agentConfig,
|
||||||
|
|
|
||||||
|
|
@ -312,10 +312,12 @@ function isClaudeModel(id: string): boolean {
|
||||||
}
|
}
|
||||||
|
|
||||||
function isClaude45OrHigher(id: string): boolean {
|
function isClaude45OrHigher(id: string): boolean {
|
||||||
// Match claude-*-4-5, claude-*-45, claude-*4.5, or opus-4-5/opus-45 variants
|
// Match claude-*-4-5+, claude-*-45+, claude-*4.5+, or future 5.x+ majors.
|
||||||
// Examples that should match:
|
// Examples that should match:
|
||||||
// claude-opus-4-5, claude-opus-45, claude-4.5, venice/claude-opus-45
|
// claude-opus-4-5, claude-opus-4-6, claude-opus-45, claude-4.6, claude-sonnet-5
|
||||||
return /\bclaude-[^\s/]*?(?:-4-?5\b|4\.5\b)/i.test(id);
|
return /\bclaude-[^\s/]*?(?:-4-?(?:[5-9]|[1-9]\d)\b|4\.(?:[5-9]|[1-9]\d)\b|-[5-9](?:\b|[.-]))/i.test(
|
||||||
|
id,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function collectModelHygieneFindings(cfg: OpenClawConfig): SecurityAuditFinding[] {
|
export function collectModelHygieneFindings(cfg: OpenClawConfig): SecurityAuditFinding[] {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue